Spaces:
Sleeping
Sleeping
| import gradio as gr | |
| import requests | |
| API_URL = "https://api-inference.huggingface.co/models/tiiuae/falcon-7b-instruct" | |
| headers = {"Authorization": "Bearer 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"} | |
| def query(payload): | |
| response = requests.post(API_URL, headers=headers, json=payload) | |
| return response.json() | |
| API_URL2 = "https://api-inference.huggingface.co/models/valhalla/longformer-base-4096-finetuned-squadv1" | |
| headers2 = {"Authorization": "Bearer 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"} | |
| def query2(payload): | |
| response = requests.post(API_URL2, headers=headers2, json=payload) | |
| return response.json() | |
| def get_context_func(question, context_input): | |
| payload = {"question": question} | |
| result = query(payload) | |
| context_input.update(result) | |
| def ask_ai(question, context_output, answer_output): | |
| payload = {"question": question, "context": context_output.value} | |
| result = query2(payload) | |
| answer_output.update(result) | |
| iface = gr.Interface( | |
| fn=ask_ai, | |
| inputs=[ | |
| gr.Textbox("Question"), | |
| gr.Textbox("Context"), | |
| gr.Button("get_context", get_context_func), | |
| ], | |
| outputs=[ | |
| gr.Textbox("answer_output") | |
| ] | |
| ) | |
| iface.launch() | |