WebashalarForML commited on
Commit
947aef7
·
verified ·
1 Parent(s): 620904e

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -1
app.py CHANGED
@@ -179,8 +179,9 @@ def chat():
179
  logger.info("Invoking LLM with full history and prepared prompt...")
180
  llm_response = llm.invoke(llm_messages)
181
  raw_response = llm_response.content if hasattr(llm_response, "content") else str(llm_response)
 
182
 
183
- logger.info(f"Raw LLM response: {raw_response[:200]}...")
184
  parsed_result = extract_json_from_llm_response(raw_response)
185
 
186
  except Exception as e:
 
179
  logger.info("Invoking LLM with full history and prepared prompt...")
180
  llm_response = llm.invoke(llm_messages)
181
  raw_response = llm_response.content if hasattr(llm_response, "content") else str(llm_response)
182
+
183
 
184
+ logger.info(f"Raw LLM response: {raw_response}")
185
  parsed_result = extract_json_from_llm_response(raw_response)
186
 
187
  except Exception as e: