rdune71 commited on
Commit
857c4c0
·
1 Parent(s): f6d2b4a

Add debug panel to sidebar and implement error handling for LLM responses

Browse files
Files changed (1) hide show
  1. app.py +35 -10
app.py CHANGED
@@ -8,12 +8,15 @@ sys.path.append(str(Path(__file__).parent))
8
  from utils.config import config
9
  from core.llm import send_to_ollama, send_to_hf
10
  from core.session import session_manager
 
11
 
12
  st.set_page_config(page_title="AI Life Coach", page_icon="🧠", layout="wide")
13
 
14
  # Initialize session state
15
  if "messages" not in st.session_state:
16
  st.session_state.messages = []
 
 
17
 
18
  # Sidebar
19
  with st.sidebar:
@@ -47,6 +50,17 @@ with st.sidebar:
47
  if st.button("Clear History"):
48
  st.session_state.messages = []
49
  st.success("History cleared!")
 
 
 
 
 
 
 
 
 
 
 
50
 
51
  # Main chat interface
52
  st.title("🧠 AI Life Coach")
@@ -78,6 +92,9 @@ if send_button and user_input.strip():
78
  # Add user message to history
79
  st.session_state.messages.append({"role": "user", "content": user_input})
80
 
 
 
 
81
  # Get conversation history
82
  user_session = session_manager.get_session("default_user")
83
  conversation = user_session.get("conversation", [])
@@ -89,26 +106,33 @@ if send_button and user_input.strip():
89
  with st.spinner("AI Coach is thinking..."):
90
  ai_response = None
91
  backend_used = ""
 
92
 
93
  # Try Ollama first if not falling back
94
  if not config.use_fallback:
95
- ai_response = send_to_ollama(
96
- user_input,
97
- conversation_history,
98
- st.session_state.ngrok_url,
99
- st.session_state.selected_model
100
- )
101
- backend_used = "Ollama"
 
 
 
102
 
103
  # Fallback to Hugging Face
104
  if not ai_response and config.hf_token:
105
- ai_response = send_to_hf(user_input, conversation_history)
106
- backend_used = "Hugging Face"
 
 
 
107
 
108
  if ai_response:
109
  st.markdown(f"{ai_response}")
110
 
111
- # Update conversation history (stub – actual save will come later)
112
  conversation.append({"role": "user", "content": user_input})
113
  conversation.append({"role": "assistant", "content": ai_response})
114
 
@@ -120,6 +144,7 @@ if send_button and user_input.strip():
120
  st.session_state.messages.append({"role": "assistant", "content": ai_response})
121
  else:
122
  st.error("Failed to get response from both providers.")
 
123
 
124
  # Clear input by forcing rerun
125
  st.experimental_rerun()
 
8
  from utils.config import config
9
  from core.llm import send_to_ollama, send_to_hf
10
  from core.session import session_manager
11
+ from core.memory import check_redis_health
12
 
13
  st.set_page_config(page_title="AI Life Coach", page_icon="🧠", layout="wide")
14
 
15
  # Initialize session state
16
  if "messages" not in st.session_state:
17
  st.session_state.messages = []
18
+ if "last_error" not in st.session_state:
19
+ st.session_state.last_error = ""
20
 
21
  # Sidebar
22
  with st.sidebar:
 
50
  if st.button("Clear History"):
51
  st.session_state.messages = []
52
  st.success("History cleared!")
53
+
54
+ # Debug info
55
+ with st.sidebar.expander("🔧 Debug Info"):
56
+ st.write(f"**OLLAMA_HOST**: `{st.session_state.ngrok_url}`")
57
+ st.write(f"**Selected Model**: `{st.session_state.selected_model}`")
58
+ st.write(f"**Fallback Mode**: {'✅ On' if config.use_fallback else '❌ Off'}")
59
+ st.write(f"**Redis Status**: {'✅ Healthy' if check_redis_health() else '⚠️ Unavailable'}")
60
+ st.write(f"**Env Detected As**: {'☁️ HF Space' if config.is_hf_space else '🏠 Local'}")
61
+ st.write(f"**HF Token Set**: {'✅ Yes' if config.hf_token else '❌ No'}")
62
+ if st.session_state.last_error:
63
+ st.warning(f"Last Error: {st.session_state.last_error}")
64
 
65
  # Main chat interface
66
  st.title("🧠 AI Life Coach")
 
92
  # Add user message to history
93
  st.session_state.messages.append({"role": "user", "content": user_input})
94
 
95
+ # Reset error state
96
+ st.session_state.last_error = ""
97
+
98
  # Get conversation history
99
  user_session = session_manager.get_session("default_user")
100
  conversation = user_session.get("conversation", [])
 
106
  with st.spinner("AI Coach is thinking..."):
107
  ai_response = None
108
  backend_used = ""
109
+ error_msg = ""
110
 
111
  # Try Ollama first if not falling back
112
  if not config.use_fallback:
113
+ try:
114
+ ai_response = send_to_ollama(
115
+ user_input,
116
+ conversation_history,
117
+ st.session_state.ngrok_url,
118
+ st.session_state.selected_model
119
+ )
120
+ backend_used = "Ollama"
121
+ except Exception as e:
122
+ error_msg = f"Ollama error: {str(e)}"
123
 
124
  # Fallback to Hugging Face
125
  if not ai_response and config.hf_token:
126
+ try:
127
+ ai_response = send_to_hf(user_input, conversation_history)
128
+ backend_used = "Hugging Face"
129
+ except Exception as e:
130
+ error_msg = f"Hugging Face error: {str(e)}"
131
 
132
  if ai_response:
133
  st.markdown(f"{ai_response}")
134
 
135
+ # Update conversation history
136
  conversation.append({"role": "user", "content": user_input})
137
  conversation.append({"role": "assistant", "content": ai_response})
138
 
 
144
  st.session_state.messages.append({"role": "assistant", "content": ai_response})
145
  else:
146
  st.error("Failed to get response from both providers.")
147
+ st.session_state.last_error = error_msg or "No response from either provider"
148
 
149
  # Clear input by forcing rerun
150
  st.experimental_rerun()