Spaces:
Sleeping
Sleeping
update output json
Browse files
main.py
CHANGED
|
@@ -530,6 +530,48 @@ async def followup_agent(query: FollowupQueryModel, background_tasks: Background
|
|
| 530 |
|
| 531 |
return StreamingResponse(process_response(), media_type="text/event-stream")
|
| 532 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 533 |
if __name__ == "__main__":
|
| 534 |
import uvicorn
|
| 535 |
logger.info("Starting the application")
|
|
|
|
| 530 |
|
| 531 |
return StreamingResponse(process_response(), media_type="text/event-stream")
|
| 532 |
|
| 533 |
+
@app.post("/v2/followup-agent")
|
| 534 |
+
async def followup_agent(query: FollowupQueryModel, background_tasks: BackgroundTasks, api_key: str = Depends(verify_api_key)):
|
| 535 |
+
"""
|
| 536 |
+
Followup agent endpoint that provides helpful responses or generates clarifying questions based on user queries.
|
| 537 |
+
Requires API Key authentication via X-API-Key header.
|
| 538 |
+
"""
|
| 539 |
+
logger.info(f"Received followup agent query: {query.query}")
|
| 540 |
+
|
| 541 |
+
if query.conversation_id not in conversations:
|
| 542 |
+
conversations[query.conversation_id] = [
|
| 543 |
+
{"role": "system", "content": FOLLOWUP_AGENT_PROMPT}
|
| 544 |
+
]
|
| 545 |
+
|
| 546 |
+
conversations[query.conversation_id].append({"role": "user", "content": query.query})
|
| 547 |
+
last_activity[query.conversation_id] = time.time()
|
| 548 |
+
|
| 549 |
+
# Limit tokens in the conversation history
|
| 550 |
+
limited_conversation = conversations[query.conversation_id]
|
| 551 |
+
|
| 552 |
+
def process_response():
|
| 553 |
+
full_response = ""
|
| 554 |
+
for content in chat_with_llama_stream(limited_conversation, model=query.model_id):
|
| 555 |
+
full_response += content
|
| 556 |
+
yield content
|
| 557 |
+
|
| 558 |
+
logger.info(f"LLM RAW response for query: {query.query}: {full_response}")
|
| 559 |
+
response_content, interact = parse_followup_response(full_response)
|
| 560 |
+
|
| 561 |
+
result = {
|
| 562 |
+
"clarification": interact
|
| 563 |
+
}
|
| 564 |
+
|
| 565 |
+
yield "<json>" + json.dumps(result)
|
| 566 |
+
|
| 567 |
+
# Add the assistant's response to the conversation history
|
| 568 |
+
conversations[query.conversation_id].append({"role": "assistant", "content": full_response})
|
| 569 |
+
|
| 570 |
+
background_tasks.add_task(update_db, query.user_id, query.conversation_id, query.query, full_response)
|
| 571 |
+
logger.info(f"Completed followup agent response for query: {query.query}, send result: {result}")
|
| 572 |
+
|
| 573 |
+
return StreamingResponse(process_response(), media_type="text/event-stream")
|
| 574 |
+
|
| 575 |
if __name__ == "__main__":
|
| 576 |
import uvicorn
|
| 577 |
logger.info("Starting the application")
|