Update app.py
Browse files
app.py
CHANGED
|
@@ -87,7 +87,7 @@ user_id = create_user_id()
|
|
| 87 |
# Create vectorstore and retriever
|
| 88 |
vectorstore = get_pinecone_vectorstore(embeddings_function)
|
| 89 |
llm = get_llm(provider="openai",max_tokens = 1024,temperature = 0.0)
|
| 90 |
-
reranker = get_reranker("
|
| 91 |
agent = make_graph_agent(llm,vectorstore,reranker)
|
| 92 |
|
| 93 |
|
|
@@ -167,6 +167,13 @@ async def chat(query,history,audience,sources,reports):
|
|
| 167 |
print(f"Error getting documents: {e}")
|
| 168 |
print(event)
|
| 169 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 170 |
|
| 171 |
for event_name,(event_description,display_output) in steps_display.items():
|
| 172 |
if event["name"] == event_name:
|
|
|
|
| 87 |
# Create vectorstore and retriever
|
| 88 |
vectorstore = get_pinecone_vectorstore(embeddings_function)
|
| 89 |
llm = get_llm(provider="openai",max_tokens = 1024,temperature = 0.0)
|
| 90 |
+
reranker = get_reranker("large")
|
| 91 |
agent = make_graph_agent(llm,vectorstore,reranker)
|
| 92 |
|
| 93 |
|
|
|
|
| 167 |
print(f"Error getting documents: {e}")
|
| 168 |
print(event)
|
| 169 |
|
| 170 |
+
# elif event["name"] == "retrieve_documents" and event["event"] == "on_chain_start":
|
| 171 |
+
# print(event)
|
| 172 |
+
# questions = event["data"]["input"]["questions"]
|
| 173 |
+
# questions = "\n".join([f"{i+1}. {q['question']} ({q['source']})" for i,q in enumerate(questions)])
|
| 174 |
+
# answer_yet = "🔄️ Searching in the knowledge base\n{questions}"
|
| 175 |
+
# history[-1] = (query,answer_yet)
|
| 176 |
+
|
| 177 |
|
| 178 |
for event_name,(event_description,display_output) in steps_display.items():
|
| 179 |
if event["name"] == event_name:
|