WebashalarForML commited on
Commit
34bf502
·
verified ·
1 Parent(s): 441bbe1

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +23 -70
app.py CHANGED
@@ -122,85 +122,38 @@ def serve_frontend():
122
  except Exception:
123
  return "<h3>frontend.html not found in static/ — please add your frontend.html there.</h3>", 404
124
 
125
- @app.route("/chat", methods=["POST"])
126
  def chat():
127
  data = request.get_json(force=True)
128
- if not isinstance(data, dict):
129
- return jsonify({"error": "invalid request body"}), 400
130
 
131
- chat_history: List[Dict[str, str]] = data.get("chat_history") or []
132
- assistant_state: AssistantState = data.get("assistant_state") or {}
133
 
134
- state: AssistantState = {
135
- "conversationSummary": assistant_state.get("conversationSummary", ""),
136
- "language": assistant_state.get("language", "Python"),
137
- "taggedReplies": assistant_state.get("taggedReplies", []),
138
- }
139
-
140
- llm_messages = [{"role": "system", "content": PROGRAMMING_ASSISTANT_PROMPT}]
141
-
142
- last_user_message = ""
143
- for msg in chat_history:
144
- role = msg.get("role")
145
- content = msg.get("content")
146
- if role in ["user", "assistant"] and content:
147
- llm_messages.append({"role": role, "content": content})
148
- if role == "user":
149
- last_user_message = content
150
 
151
- detected_lang = detect_language_from_text(last_user_message)
152
- if detected_lang and detected_lang.lower() != state["language"].lower():
153
- logger.info("Detected new language: %s", detected_lang)
154
- state["language"] = detected_lang
155
 
156
- context_hint = f"Current Language: {state['language']}. Conversation Summary so far: {state['conversationSummary']}"
157
- if llm_messages and llm_messages[-1]["role"] == "user":
158
- llm_messages[-1]["content"] = f"USER MESSAGE: {last_user_message}\n\n[CONTEXT HINT: {context_hint}]"
159
- elif last_user_message:
160
- llm_messages.append({"role": "user", "content": f"USER MESSAGE: {last_user_message}\n\n[CONTEXT HINT: {context_hint}]"})
161
 
162
- try:
163
- logger.info("Invoking LLM with full history and prepared prompt...")
164
- llm_response = llm.invoke(llm_messages)
165
- raw_response = llm_response.content if hasattr(llm_response, "content") else str(llm_response)
166
- logger.info(f"Raw LLM response: {raw_response}")
167
- parsed_result = extract_json_from_llm_response(raw_response)
168
- except Exception as e:
169
- logger.exception("LLM invocation failed")
170
- error_detail = str(e)
171
- if 'decommissioned' in error_detail:
172
- error_detail = "LLM Model Error: The model is likely decommissioned. Please check the 'LLM_MODEL' environment variable or the default model in app.py."
173
- return jsonify({"error": "LLM invocation failed", "detail": error_detail}), 500
174
-
175
- if parsed_result.get("assistant_reply") == LLM_PARSE_ERROR_MESSAGE:
176
- return jsonify({
177
- "assistant_reply": LLM_PARSE_ERROR_MESSAGE,
178
- "updated_state": state,
179
- "suggested_tags": [],
180
- })
181
-
182
- updated_state_from_llm = parsed_result.get("state_updates", {})
183
- if 'conversationSummary' in updated_state_from_llm:
184
- state["conversationSummary"] = updated_state_from_llm["conversationSummary"]
185
- if 'language' in updated_state_from_llm and updated_state_from_llm['language'].strip():
186
- state["language"] = updated_state_from_llm["language"]
187
-
188
- assistant_reply = parsed_result.get("assistant_reply")
189
- code_snippet = parsed_result.get("code_snippet")
190
-
191
- final_reply_content = assistant_reply
192
- if code_snippet and code_snippet.strip():
193
- if final_reply_content.strip():
194
- final_reply_content += "\n\n"
195
- final_reply_content += code_snippet
196
-
197
- if not final_reply_content.strip():
198
- final_reply_content = "I'm here to help with your code! What programming language are you using?"
199
 
 
200
  return jsonify({
201
- "assistant_reply": final_reply_content,
202
- "updated_state": state,
203
- "suggested_tags": parsed_result.get("suggested_tags", []),
 
 
 
 
204
  })
205
 
206
  @app.route("/tag_reply", methods=["POST"])
 
122
  except Exception:
123
  return "<h3>frontend.html not found in static/ — please add your frontend.html there.</h3>", 404
124
 
 
125
  def chat():
126
  data = request.get_json(force=True)
127
+ chat_history = data.get("chat_history", [])
128
+ assistant_state = data.get("assistant_state", {})
129
 
130
+ conversation_summary = assistant_state.get("conversationSummary", "")
131
+ language = assistant_state.get("language", "Python")
132
 
133
+ # Build prompt with system + conversation summary + chat history
134
+ system_prompt = f"You are a helpful programming assistant. Current language: {language}. Conversation summary: {conversation_summary}"
135
+ messages = [{"role": "system", "content": system_prompt}]
136
+ messages.extend(chat_history)
 
 
 
 
 
 
 
 
 
 
 
 
137
 
138
+ # Call LLM, get plain text response
139
+ llm_response = llm.invoke(messages)
140
+ assistant_reply = llm_response.content if hasattr(llm_response, "content") else str(llm_response)
 
141
 
142
+ # Append assistant reply to chat history
143
+ chat_history.append({"role": "assistant", "content": assistant_reply})
 
 
 
144
 
145
+ # Optionally update conversation summary (e.g., call summarization chain)
146
+ conversation_summary = update_summary(chat_history)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
147
 
148
+ # Return plain text reply and updated state
149
  return jsonify({
150
+ "assistant_reply": assistant_reply,
151
+ "updated_state": {
152
+ "conversationSummary": conversation_summary,
153
+ "language": language,
154
+ "taggedReplies": assistant_state.get("taggedReplies", []),
155
+ },
156
+ "chat_history": chat_history,
157
  })
158
 
159
  @app.route("/tag_reply", methods=["POST"])