Spaces:
Runtime error
Runtime error
| # https://huggingface.co/transformers/main_classes/pipelines.html | |
| # https://huggingface.co/models?filter=conversational | |
| # Install Dependences | |
| # Use my Conda qna environment, then you're all set | |
| # !pip install transformers | |
| # !pip install ipywidgets | |
| # !pip install gradio # see setup for installing gradio | |
| # Import Dependencies | |
| from transformers import pipeline | |
| import gradio as gr | |
| # Create the Q&A pipeline | |
| nlp = pipeline('question-answering', model='deepset/roberta-base-squad2', tokenizer='deepset/roberta-base-squad2') | |
| #nlp = pipeline('question-answering', model='bert-large-uncased-whole-word-masking-finetuned-squad ', tokenizer='bert-large-uncased-whole-word-masking-finetuned-squad ') | |
| #nlp = pipeline("question-answering", model='distilbert-base-cased-distilled-squad') | |
| #nlp = pipeline("question-answering", model='distilbert-base-uncased-distilled-squad') | |
| def question_answer(context_filename, question): | |
| """Produce a NLP response based on the input text filename and question.""" | |
| with open(context_filename) as f: | |
| context = f.read() | |
| nlp_input = {'question': question, 'context': context} | |
| result = nlp(nlp_input) | |
| return result['answer'] | |
| demo = gr.Interface( | |
| fn=question_answer, | |
| #inputs=gr.inputs.Textbox(lines=2, placeholder='Enter your question'), | |
| inputs=[ | |
| gr.Dropdown([ | |
| 'spiderman.txt', | |
| 'world-john.txt', | |
| 'world-romans.txt', | |
| 'world-nt.txt', | |
| 'world-ot.txt']), # 'lotr01.txt' | |
| "text" | |
| ], | |
| outputs="textbox") | |
| demo.launch(share=False) | |