Spaces:
Running
Running
File size: 1,573 Bytes
c93c941 be86868 c93c941 5a8c11f c93c941 9b0d5c8 c93c941 371d2ac c93c941 8a8dc07 c93c941 b6df0bc c93c941 6402ee3 c93c941 6aacc74 c93c941 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 |
#import streamlit as st
import gradio as gr
from transformers import pipeline
from huggingface_hub import InferenceClient
#import gc
#st.header("Sentiment-demo-app")
#st.subheader("Please be patient and wait up to a minute until the demo app is loaded.")
#st.caption("This is a very simple demo application for a zero-shot classification pipeline to classify positive, neutral, or negative sentiment for a short text. Enter your text in the box below and press CTRl+ENTER to run the model.")
title = "Sentiment-demo-app"
description = """This is a very simple demo application for a sentiment classification pipeline to classify positive, neutral, or negative sentiment for a short text. Enter your text in the box below and press CTRl+ENTER to run the model.
Please be patient until the demo app is loaded. """
sentiment = pipeline("text-classification", model='tabularisai/multilingual-sentiment-analysis') #"zero-shot-classification" model='facebook/bart-large-mnli')
client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
def get_sentiment(text):
output = sentiment(text)
return f'The sentence was classified as "{output[0]["label"]}" with {output[0]["score"]*100}% confidence'
demo = gr.Interface(
fn=get_sentiment,
inputs="text",
outputs="text",
title=title,
description=description
)
if __name__ == "__main__":
demo.launch()
#texts = st.text_area('Enter text here!')
#candidate_labels = ['Positive', 'Neutral', 'Negative']
#result = pipe(texts)
#if text:
# out = pipe(text, result)
# st.json(out)
# del out
# gc.collect() |