Update app.py
Browse files
app.py
CHANGED
|
@@ -93,9 +93,9 @@ def parse_output_llm_with_sources(output):
|
|
| 93 |
|
| 94 |
|
| 95 |
|
| 96 |
-
Q = SimpleQueue()
|
| 97 |
job_done = object() # signals the processing is done
|
| 98 |
|
|
|
|
| 99 |
class StreamingGradioCallbackHandler(BaseCallbackHandler):
|
| 100 |
def __init__(self, q: SimpleQueue):
|
| 101 |
self.q = q
|
|
@@ -129,10 +129,7 @@ class StreamingGradioCallbackHandler(BaseCallbackHandler):
|
|
| 129 |
|
| 130 |
# Create embeddings function and LLM
|
| 131 |
embeddings_function = HuggingFaceEmbeddings(model_name = "sentence-transformers/multi-qa-mpnet-base-dot-v1")
|
| 132 |
-
|
| 133 |
-
llm_streaming = get_llm(max_tokens = 1024,temperature = 0.0,verbose = True,streaming = True,
|
| 134 |
-
callbacks=[StreamingGradioCallbackHandler(Q),StreamingStdOutCallbackHandler()],
|
| 135 |
-
)
|
| 136 |
|
| 137 |
# Create vectorstore and retriever
|
| 138 |
vectorstore = get_pinecone_vectorstore(embeddings_function)
|
|
@@ -146,12 +143,23 @@ vectorstore = get_pinecone_vectorstore(embeddings_function)
|
|
| 146 |
from threading import Thread
|
| 147 |
|
| 148 |
|
| 149 |
-
|
| 150 |
def answer_user(message,history):
|
| 151 |
return message, history + [[message, None]]
|
| 152 |
|
| 153 |
def answer_bot(message,history,audience,sources):
|
| 154 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 155 |
if len(sources) == 0:
|
| 156 |
sources = ["IPCC"]
|
| 157 |
|
|
@@ -160,9 +168,6 @@ def answer_bot(message,history,audience,sources):
|
|
| 160 |
# history[-1][1] += "\n\n" + complete_response
|
| 161 |
# return "", history, ""
|
| 162 |
|
| 163 |
-
retriever = ClimateQARetriever(vectorstore=vectorstore,sources = sources,k_summary = 3,k_total = 10)
|
| 164 |
-
chain = load_climateqa_chain(retriever,llm_reformulation,llm_streaming)
|
| 165 |
-
|
| 166 |
def threaded_chain(query,audience):
|
| 167 |
response = chain({"query":query,"audience":audience})
|
| 168 |
Q.put(response)
|
|
@@ -416,8 +421,8 @@ with gr.Blocks(title="π Climate Q&A", css="style.css", theme=theme) as demo:
|
|
| 416 |
# bot.like(vote,None,None)
|
| 417 |
|
| 418 |
with gr.Row(elem_id = "input-message"):
|
| 419 |
-
textbox=gr.Textbox(placeholder="Ask me anything here!",show_label=False,scale=
|
| 420 |
-
submit_button = gr.Button(">",scale = 1,elem_id = "submit-button")
|
| 421 |
|
| 422 |
|
| 423 |
with gr.Column(scale=1, variant="panel",elem_id = "right-panel"):
|
|
@@ -489,16 +494,15 @@ with gr.Blocks(title="π Climate Q&A", css="style.css", theme=theme) as demo:
|
|
| 489 |
|
| 490 |
|
| 491 |
# textbox.submit(predict_climateqa,[textbox,bot],[None,bot,sources_textbox])
|
| 492 |
-
|
| 493 |
-
textbox.submit(answer_user, [textbox, bot], [textbox, bot], queue=False).then(
|
| 494 |
-
answer_bot, [textbox,bot,dropdown_audience,dropdown_sources], [textbox,bot,sources_textbox]
|
| 495 |
-
)
|
| 496 |
-
examples_hidden.change(answer_user, [examples_hidden, bot], [textbox, bot], queue=False).then(
|
| 497 |
answer_bot, [textbox,bot,dropdown_audience,dropdown_sources], [textbox,bot,sources_textbox]
|
| 498 |
)
|
| 499 |
-
|
| 500 |
answer_bot, [textbox,bot,dropdown_audience,dropdown_sources], [textbox,bot,sources_textbox]
|
| 501 |
)
|
|
|
|
|
|
|
|
|
|
| 502 |
|
| 503 |
|
| 504 |
|
|
@@ -684,6 +688,6 @@ Or around 2 to 4 times more than a typical Google search.
|
|
| 684 |
"""
|
| 685 |
)
|
| 686 |
|
| 687 |
-
demo.queue(concurrency_count=
|
| 688 |
|
| 689 |
demo.launch()
|
|
|
|
| 93 |
|
| 94 |
|
| 95 |
|
|
|
|
| 96 |
job_done = object() # signals the processing is done
|
| 97 |
|
| 98 |
+
|
| 99 |
class StreamingGradioCallbackHandler(BaseCallbackHandler):
|
| 100 |
def __init__(self, q: SimpleQueue):
|
| 101 |
self.q = q
|
|
|
|
| 129 |
|
| 130 |
# Create embeddings function and LLM
|
| 131 |
embeddings_function = HuggingFaceEmbeddings(model_name = "sentence-transformers/multi-qa-mpnet-base-dot-v1")
|
| 132 |
+
|
|
|
|
|
|
|
|
|
|
| 133 |
|
| 134 |
# Create vectorstore and retriever
|
| 135 |
vectorstore = get_pinecone_vectorstore(embeddings_function)
|
|
|
|
| 143 |
from threading import Thread
|
| 144 |
|
| 145 |
|
|
|
|
| 146 |
def answer_user(message,history):
|
| 147 |
return message, history + [[message, None]]
|
| 148 |
|
| 149 |
def answer_bot(message,history,audience,sources):
|
| 150 |
|
| 151 |
+
|
| 152 |
+
Q = SimpleQueue()
|
| 153 |
+
|
| 154 |
+
llm_reformulation = get_llm(max_tokens = 512,temperature = 0.0,verbose = True,streaming = False)
|
| 155 |
+
llm_streaming = get_llm(max_tokens = 1024,temperature = 0.0,verbose = True,streaming = True,
|
| 156 |
+
callbacks=[StreamingGradioCallbackHandler(Q),StreamingStdOutCallbackHandler()],
|
| 157 |
+
)
|
| 158 |
+
|
| 159 |
+
retriever = ClimateQARetriever(vectorstore=vectorstore,sources = sources,k_summary = 3,k_total = 10)
|
| 160 |
+
chain = load_climateqa_chain(retriever,llm_reformulation,llm_streaming)
|
| 161 |
+
|
| 162 |
+
|
| 163 |
if len(sources) == 0:
|
| 164 |
sources = ["IPCC"]
|
| 165 |
|
|
|
|
| 168 |
# history[-1][1] += "\n\n" + complete_response
|
| 169 |
# return "", history, ""
|
| 170 |
|
|
|
|
|
|
|
|
|
|
| 171 |
def threaded_chain(query,audience):
|
| 172 |
response = chain({"query":query,"audience":audience})
|
| 173 |
Q.put(response)
|
|
|
|
| 421 |
# bot.like(vote,None,None)
|
| 422 |
|
| 423 |
with gr.Row(elem_id = "input-message"):
|
| 424 |
+
textbox=gr.Textbox(placeholder="Ask me anything here!",show_label=False,scale=1,lines = 1,interactive = True)
|
| 425 |
+
# submit_button = gr.Button(">",scale = 1,elem_id = "submit-button")
|
| 426 |
|
| 427 |
|
| 428 |
with gr.Column(scale=1, variant="panel",elem_id = "right-panel"):
|
|
|
|
| 494 |
|
| 495 |
|
| 496 |
# textbox.submit(predict_climateqa,[textbox,bot],[None,bot,sources_textbox])
|
| 497 |
+
textbox.submit(answer_user, [textbox, bot], [textbox, bot], queue=True).then(
|
|
|
|
|
|
|
|
|
|
|
|
|
| 498 |
answer_bot, [textbox,bot,dropdown_audience,dropdown_sources], [textbox,bot,sources_textbox]
|
| 499 |
)
|
| 500 |
+
examples_hidden.change(answer_user, [examples_hidden, bot], [textbox, bot], queue=True).then(
|
| 501 |
answer_bot, [textbox,bot,dropdown_audience,dropdown_sources], [textbox,bot,sources_textbox]
|
| 502 |
)
|
| 503 |
+
# submit_button.click(answer_user, [textbox, bot], [textbox, bot], queue=True).then(
|
| 504 |
+
# answer_bot, [textbox,bot,dropdown_audience,dropdown_sources], [textbox,bot,sources_textbox]
|
| 505 |
+
# )
|
| 506 |
|
| 507 |
|
| 508 |
|
|
|
|
| 688 |
"""
|
| 689 |
)
|
| 690 |
|
| 691 |
+
demo.queue(concurrency_count=1)
|
| 692 |
|
| 693 |
demo.launch()
|