#import streamlit as st import gradio as gr from transformers import pipeline from huggingface_hub import InferenceClient #import gc #st.header("Sentiment-demo-app") #st.subheader("Please be patient and wait up to a minute until the demo app is loaded.") #st.caption("This is a very simple demo application for a zero-shot classification pipeline to classify positive, neutral, or negative sentiment for a short text. Enter your text in the box below and press CTRl+ENTER to run the model.") title = "Sentiment-demo-app" description = """This is a very simple demo application for a sentiment classification pipeline to classify positive, neutral, or negative sentiment for a short text. Enter your text in the box below and press CTRl+ENTER to run the model. Please be patient until the demo app is loaded. """ sentiment = pipeline("text-classification", model='tabularisai/multilingual-sentiment-analysis') #"zero-shot-classification" model='facebook/bart-large-mnli') client = InferenceClient("HuggingFaceH4/zephyr-7b-beta") def get_sentiment(text): output = sentiment(text) return f'The sentence was classified as "{output[0]["label"]}" with {output[0]["score"]*100}% confidence' demo = gr.Interface( fn=get_sentiment, inputs="text", outputs="text", title=title, description=description ) if __name__ == "__main__": demo.launch() #texts = st.text_area('Enter text here!') #candidate_labels = ['Positive', 'Neutral', 'Negative'] #result = pipe(texts) #if text: # out = pipe(text, result) # st.json(out) # del out # gc.collect()