vikramvasudevan commited on
Commit
91c2066
·
verified ·
1 Parent(s): 6a252fb

Upload folder using huggingface_hub

Browse files
Files changed (1) hide show
  1. app.py +30 -2
app.py CHANGED
@@ -16,7 +16,7 @@ graph = generate_graph()
16
 
17
  def init():
18
  load_dotenv(override=True)
19
-
20
  if(os.path.isdir("./chromadb-store")):
21
  logger.warning("database exists locally. not downloading.")
22
  return
@@ -39,6 +39,17 @@ def init_session():
39
  return str(uuid.uuid4())
40
 
41
 
 
 
 
 
 
 
 
 
 
 
 
42
  def chat(message, history, thread_id):
43
  print("received chat message for thread:", thread_id)
44
  config = {"configurable": {"thread_id": thread_id}}
@@ -47,6 +58,15 @@ def chat(message, history, thread_id):
47
  )
48
  return response["messages"][-1].content
49
 
 
 
 
 
 
 
 
 
 
50
 
51
  thread_id = gr.State(init_session)
52
  supported_scriptures = "\n - ".join(
@@ -87,9 +107,17 @@ chatbot = gr.Chatbot(
87
  height=700,
88
  render_markdown=True,
89
  )
 
 
 
90
  chatInterface = gr.ChatInterface(
91
- title="Sanatan-AI", fn=chat, additional_inputs=[thread_id], chatbot=chatbot
 
 
 
92
  )
 
 
93
  # initializze database
94
  init()
95
  chatInterface.launch()
 
16
 
17
  def init():
18
  load_dotenv(override=True)
19
+
20
  if(os.path.isdir("./chromadb-store")):
21
  logger.warning("database exists locally. not downloading.")
22
  return
 
39
  return str(uuid.uuid4())
40
 
41
 
42
+ async def chat_wrapper(message, history, thread_id, debug):
43
+ if debug:
44
+ # Yield each message as it's generated
45
+ async for chunk in chat_streaming(message, history, thread_id):
46
+ yield chunk
47
+ else:
48
+ # Yield full response once
49
+ response = chat(message, history, thread_id)
50
+ yield response
51
+
52
+
53
  def chat(message, history, thread_id):
54
  print("received chat message for thread:", thread_id)
55
  config = {"configurable": {"thread_id": thread_id}}
 
58
  )
59
  return response["messages"][-1].content
60
 
61
+ async def chat_streaming(message, history, thread_id):
62
+ state = {"messages": (history or []) + [{"role": "user", "content": message}]}
63
+ config = {"configurable": {"thread_id": thread_id}}
64
+ async for step in graph.astream(state,config=config):
65
+ # LangGraph yields steps like {"respond": ChatState, "post_process": ChatState}
66
+ for node_output in step.values():
67
+ yield node_output["messages"][-1].content
68
+
69
+ print("received chat message for thread:", thread_id)
70
 
71
  thread_id = gr.State(init_session)
72
  supported_scriptures = "\n - ".join(
 
107
  height=700,
108
  render_markdown=True,
109
  )
110
+
111
+ debug_checkbox = gr.Checkbox(label="Debug (Streaming)", value=False)
112
+
113
  chatInterface = gr.ChatInterface(
114
+ title="Sanatan-AI",
115
+ fn=chat_wrapper,
116
+ additional_inputs=[thread_id, debug_checkbox],
117
+ chatbot=chatbot,
118
  )
119
+
120
+
121
  # initializze database
122
  init()
123
  chatInterface.launch()