Spaces:
Sleeping
Sleeping
| import gradio as gr | |
| from transformers import AutoTokenizer, AutoModelForTokenClassification | |
| from pipeline import DiscoursePipeline # ton code custom | |
| model_id = "poyum/test_discut" | |
| tokenizer = AutoTokenizer.from_pretrained(model_id) | |
| pipe = DiscoursePipeline(model_id=model_id, tokenizer=tokenizer) | |
| def predict(text): | |
| return pipe(text) | |
| demo = gr.Interface(fn=predict, inputs="text", outputs="text") | |
| demo.launch() | |