Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
|
@@ -7,7 +7,7 @@ headers = {"Authorization": "Bearer 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"}
|
|
| 7 |
def query(payload):
|
| 8 |
response = requests.post(API_URL, headers=headers, json=payload)
|
| 9 |
return response.json()
|
| 10 |
-
|
| 11 |
API_URL2 = "https://api-inference.huggingface.co/models/valhalla/longformer-base-4096-finetuned-squadv1"
|
| 12 |
headers2 = {"Authorization": "Bearer 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"}
|
| 13 |
|
|
@@ -22,16 +22,9 @@ class State:
|
|
| 22 |
|
| 23 |
state = State()
|
| 24 |
|
| 25 |
-
def
|
| 26 |
-
state.question =
|
| 27 |
-
|
| 28 |
-
"inputs": f"context for '{state.question}' is:",
|
| 29 |
-
})
|
| 30 |
-
detect_context_from_question.interface.inputs[1].value = output # Update the value of the context Textbox
|
| 31 |
-
|
| 32 |
-
def ask_question():
|
| 33 |
-
state.question = ask_question.interface.inputs[0].value
|
| 34 |
-
state.context = ask_question.interface.inputs[1].value
|
| 35 |
output2 = query2({
|
| 36 |
"inputs": {
|
| 37 |
"question": state.question,
|
|
@@ -40,17 +33,6 @@ def ask_question():
|
|
| 40 |
})
|
| 41 |
ask_question.interface.outputs[0].value = output2 # Update the value of the answer Textbox
|
| 42 |
|
| 43 |
-
iface_detect = gr.Interface(
|
| 44 |
-
fn=detect_context_from_question,
|
| 45 |
-
inputs=[
|
| 46 |
-
gr.Textbox(type="text", placeholder="Enter your question"),
|
| 47 |
-
gr.Textbox(type="text", placeholder="Enter context"),
|
| 48 |
-
gr.Button("Detect Context")
|
| 49 |
-
],
|
| 50 |
-
outputs=None,
|
| 51 |
-
live=True
|
| 52 |
-
)
|
| 53 |
-
|
| 54 |
iface_ask = gr.Interface(
|
| 55 |
fn=ask_question,
|
| 56 |
inputs=[
|
|
@@ -62,5 +44,5 @@ iface_ask = gr.Interface(
|
|
| 62 |
live=True
|
| 63 |
)
|
| 64 |
|
| 65 |
-
iface_detect.launch()
|
| 66 |
iface_ask.launch()
|
|
|
|
|
|
| 7 |
def query(payload):
|
| 8 |
response = requests.post(API_URL, headers=headers, json=payload)
|
| 9 |
return response.json()
|
| 10 |
+
|
| 11 |
API_URL2 = "https://api-inference.huggingface.co/models/valhalla/longformer-base-4096-finetuned-squadv1"
|
| 12 |
headers2 = {"Authorization": "Bearer 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"}
|
| 13 |
|
|
|
|
| 22 |
|
| 23 |
state = State()
|
| 24 |
|
| 25 |
+
def ask_question(question, context):
|
| 26 |
+
state.question = question
|
| 27 |
+
state.context = context
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 28 |
output2 = query2({
|
| 29 |
"inputs": {
|
| 30 |
"question": state.question,
|
|
|
|
| 33 |
})
|
| 34 |
ask_question.interface.outputs[0].value = output2 # Update the value of the answer Textbox
|
| 35 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 36 |
iface_ask = gr.Interface(
|
| 37 |
fn=ask_question,
|
| 38 |
inputs=[
|
|
|
|
| 44 |
live=True
|
| 45 |
)
|
| 46 |
|
|
|
|
| 47 |
iface_ask.launch()
|
| 48 |
+
|