Spaces:
Running
Running
| import gradio as gr | |
| from transformers import TextClassificationPipeline, AutoTokenizer, AutoModelForSequenceClassification | |
| # Load a pre-trained text classification model | |
| model_name = "KoalaAI/Text-Moderation" | |
| tokenizer = AutoTokenizer.from_pretrained(model_name) | |
| model = AutoModelForSequenceClassification.from_pretrained(model_name) | |
| # Create a TextClassificationPipeline | |
| pipe = TextClassificationPipeline(model=model, tokenizer=tokenizer) | |
| # Define the classify_text function using the pipeline | |
| def classify_text(text): | |
| prediction = pipe(text)[0]["label"] | |
| return prediction | |
| # Create a Gradio interface | |
| iface = gr.Interface( | |
| fn=classify_text, | |
| inputs=gr.inputs.Textbox(label="Enter text"), | |
| outputs=gr.outputs.Label(label="Predicted classes"), | |
| ) | |
| # Launch the Gradio app | |
| iface.launch() |