rdune71 commited on
Commit
482aace
Β·
1 Parent(s): 2a0dea8

Implement enhanced debug panel with comprehensive system monitoring and controls

Browse files
app.py CHANGED
@@ -68,72 +68,83 @@ with st.sidebar:
68
  st.success("History cleared!")
69
 
70
  # Enhanced Debug Panel
71
- with st.expander("πŸ” Advanced Debug", expanded=False):
72
- st.subheader("System Controls")
73
 
74
  # Fallback Mode Toggle
75
  fallback_mode = st.checkbox(
76
- "Enable Fallback Mode",
77
  value=config.use_fallback,
78
- help="Enable automatic fallback between providers"
79
  )
80
 
81
- # HF Endpoint Control
82
- hf_enabled = st.checkbox(
83
  "Enable HF Deep Analysis",
84
  value=bool(config.hf_token),
85
- help="Enable Hugging Face endpoint coordination"
86
  )
87
 
88
  # Web Search Toggle
89
- web_search_enabled = st.checkbox(
90
- "Enable Web Search",
91
  value=bool(os.getenv("TAVILY_API_KEY")),
92
  help="Enable Tavily/DDG web search integration"
93
  )
94
 
95
- st.subheader("Provider Status")
96
 
97
- # Ollama Status
 
 
98
  try:
99
  from services.ollama_monitor import check_ollama_status
100
  ollama_status = check_ollama_status()
101
  if ollama_status.get("running"):
102
- st.success(f"πŸ¦™ Ollama: Running ({ollama_status.get('model_loaded', 'Unknown')})")
 
 
 
103
  else:
104
  st.error("πŸ¦™ Ollama: Unavailable")
 
 
105
  except Exception as e:
106
  st.warning(f"πŸ¦™ Ollama: Status check failed")
107
 
108
- # Enhanced HF Status Display
109
- st.subheader("HF Endpoint Status")
110
  try:
111
  from services.hf_endpoint_monitor import hf_monitor
 
112
 
113
- # Show current status
114
- status_summary = hf_monitor.get_status_summary()
115
- if "🟒" in status_summary:
116
- st.success(status_summary)
117
- elif "🟑" in status_summary:
118
- st.warning(status_summary)
119
- if st.button("⚑ Wake Up HF Endpoint"):
120
- with st.spinner("Waking up HF endpoint... (this may take 2-4 minutes)"):
121
- success = hf_monitor.handle_scale_to_zero()
122
- if success:
123
- st.success("βœ… HF endpoint is now awake!")
124
- else:
125
- st.error("❌ Failed to wake up HF endpoint. Please try again.")
126
  else:
127
- st.error(status_summary)
128
- if st.button("πŸš€ Initialize HF Endpoint"):
129
- with st.spinner("Initializing HF endpoint... (this may take 2-4 minutes)"):
 
 
 
 
 
 
 
 
130
  success = hf_monitor.handle_scale_to_zero()
131
  if success:
132
- st.success("βœ… HF endpoint initialized successfully!")
 
 
133
  else:
134
- st.error("❌ Failed to initialize HF endpoint.")
 
135
  except Exception as e:
136
- st.error(f"❌ HF Monitor Error: {str(e)}")
 
137
 
138
  # Redis Status
139
  redis_healthy = check_redis_health()
@@ -142,76 +153,82 @@ with st.sidebar:
142
  else:
143
  st.error("πŸ’Ύ Redis: Disconnected")
144
 
145
- st.subheader("External Services")
 
 
146
 
147
  # Web Search Status
148
- if os.getenv("TAVILY_API_KEY"):
 
149
  st.success("πŸ” Web Search: Tavily API Active")
 
 
 
 
 
 
 
 
 
 
150
  else:
151
  st.info("πŸ” Web Search: Not configured")
152
 
153
  # Weather Service
154
  if config.openweather_api_key:
155
  st.success("🌀️ Weather: API Active")
 
 
 
 
 
 
 
 
 
 
 
156
  else:
157
  st.info("🌀️ Weather: Not configured")
158
 
 
 
 
 
159
  # Session Stats
160
  try:
161
  user_session = session_manager.get_session("default_user")
162
- conversation_length = len(user_session.get("conversation", []))
163
- st.info(f"πŸ’¬ Conversation Length: {conversation_length} messages")
164
- except:
165
- st.info("πŸ’¬ Session: Not initialized")
166
-
167
- # Real-time Web Search Status
168
- st.subheader("Web Search Activity")
169
-
170
- # Recent searches (if tracking enabled)
171
- if 'recent_searches' in st.session_state:
172
- for search in st.session_state.recent_searches[-3:]: # Last 3 searches
173
- st.caption(f"πŸ” {search['query'][:30]}... ({search['timestamp']})")
174
- else:
175
- st.info("No recent searches")
176
-
177
- # Search test button
178
- if st.button("πŸ§ͺ Test Web Search"):
179
- try:
180
- from tavily import TavilyClient
181
- if os.getenv("TAVILY_API_KEY"):
182
- tavily = TavilyClient(api_key=os.getenv("TAVILY_API_KEY"))
183
- test_result = tavily.search("AI life coach benefits", max_results=1)
184
- st.success("βœ… Web search working")
185
- if test_result.get('results'):
186
- st.caption(f"Sample: {test_result['results'][0].get('title', 'No title')}")
187
- else:
188
- st.warning("Web API key not configured")
189
- except Exception as e:
190
- st.error(f"❌ Web search test failed: {e}")
191
 
192
- # Enhanced Configuration Display
193
- st.subheader("Configuration Details")
194
 
195
- # Provider Configuration
196
- st.caption(f"**Primary Provider**: Ollama ({config.local_model_name})")
197
- if config.hf_token:
198
- st.caption(f"**Secondary Provider**: Hugging Face")
199
- st.caption(f"**HF Endpoint**: {config.hf_api_url}")
200
 
201
- # Environment Detection
202
- env_type = "☁️ HF Space" if config.is_hf_space else "🏠 Local"
203
- st.caption(f"**Environment**: {env_type}")
204
 
205
- # Feature Flags
206
  features = []
207
- if config.use_fallback:
208
- features.append("Fallback Mode")
209
- if os.getenv("TAVILY_API_KEY"):
 
 
210
  features.append("Web Search")
211
  if config.openweather_api_key:
212
- features.append("Weather Data")
213
- if config.hf_token:
214
- features.append("Deep Analysis")
215
 
216
  if features:
217
  st.caption(f"**Active Features**: {', '.join(features)}")
 
68
  st.success("History cleared!")
69
 
70
  # Enhanced Debug Panel
71
+ with st.sidebar.expander("πŸ” Advanced System Monitor", expanded=False):
72
+ st.subheader("πŸŽ›οΈ System Controls")
73
 
74
  # Fallback Mode Toggle
75
  fallback_mode = st.checkbox(
76
+ "Enable Provider Fallback",
77
  value=config.use_fallback,
78
+ help="Enable automatic fallback between AI providers"
79
  )
80
 
81
+ # HF Deep Analysis Toggle
82
+ hf_analysis = st.checkbox(
83
  "Enable HF Deep Analysis",
84
  value=bool(config.hf_token),
85
+ help="Enable Hugging Face endpoint for deep analysis"
86
  )
87
 
88
  # Web Search Toggle
89
+ web_search = st.checkbox(
90
+ "Enable Web Research",
91
  value=bool(os.getenv("TAVILY_API_KEY")),
92
  help="Enable Tavily/DDG web search integration"
93
  )
94
 
95
+ st.divider()
96
 
97
+ st.subheader("πŸ“Š Provider Status")
98
+
99
+ # Ollama Status with Detailed Info
100
  try:
101
  from services.ollama_monitor import check_ollama_status
102
  ollama_status = check_ollama_status()
103
  if ollama_status.get("running"):
104
+ st.success(f"πŸ¦™ Ollama: Running")
105
+ if ollama_status.get("model_loaded"):
106
+ st.caption(f"Model: {ollama_status['model_loaded']}")
107
+ st.caption(f"URL: {ollama_status.get('ngrok_url', 'N/A')}")
108
  else:
109
  st.error("πŸ¦™ Ollama: Unavailable")
110
+ if st.button("πŸ”„ Refresh Ollama Status", key="refresh_ollama"):
111
+ st.experimental_rerun()
112
  except Exception as e:
113
  st.warning(f"πŸ¦™ Ollama: Status check failed")
114
 
115
+ # HF Endpoint Status with Scale-to-Zero Handling
 
116
  try:
117
  from services.hf_endpoint_monitor import hf_monitor
118
+ hf_status_detail = hf_monitor.check_endpoint_status()
119
 
120
+ if hf_status_detail['available']:
121
+ if hf_status_detail.get('initialized', False):
122
+ st.success("πŸ€— HF Endpoint: Available & Initialized")
123
+ else:
124
+ st.warning("πŸ€— HF Endpoint: Available (Initializing)")
 
 
 
 
 
 
 
 
125
  else:
126
+ st.error("πŸ€— HF Endpoint: Scaled to Zero")
127
+
128
+ # Show detailed status
129
+ st.caption(f"Status Code: {hf_status_detail.get('status_code', 'N/A')}")
130
+ if 'response_time' in hf_status_detail:
131
+ st.caption(f"Response Time: {hf_status_detail['response_time']:.2f}s")
132
+
133
+ # Wake-up button for scaled-to-zero endpoints
134
+ if not hf_status_detail['available'] and config.hf_token:
135
+ if st.button("⚑ Wake Up HF Endpoint", key="wake_hf"):
136
+ with st.spinner("Waking up HF endpoint... (2-4 minutes)"):
137
  success = hf_monitor.handle_scale_to_zero()
138
  if success:
139
+ st.success("βœ… HF endpoint activated!")
140
+ time.sleep(2)
141
+ st.experimental_rerun()
142
  else:
143
+ st.error("❌ Failed to activate HF endpoint")
144
+
145
  except Exception as e:
146
+ st.warning(f"πŸ€— HF Endpoint: Monitor unavailable")
147
+ st.caption(f"Error: {str(e)[:50]}...")
148
 
149
  # Redis Status
150
  redis_healthy = check_redis_health()
 
153
  else:
154
  st.error("πŸ’Ύ Redis: Disconnected")
155
 
156
+ st.divider()
157
+
158
+ st.subheader("🌐 External Services")
159
 
160
  # Web Search Status
161
+ tavily_key = os.getenv("TAVILY_API_KEY")
162
+ if tavily_key:
163
  st.success("πŸ” Web Search: Tavily API Active")
164
+ # Test search button
165
+ if st.button("πŸ§ͺ Test Web Search", key="test_web_search"):
166
+ try:
167
+ from tavily import TavilyClient
168
+ tavily = TavilyClient(api_key=tavily_key)
169
+ with st.spinner("Testing web search..."):
170
+ test_result = tavily.search("AI life coach benefits", max_results=1)
171
+ st.success("βœ… Web search working!")
172
+ except Exception as e:
173
+ st.error(f"❌ Web search test failed: {str(e)[:30]}...")
174
  else:
175
  st.info("πŸ” Web Search: Not configured")
176
 
177
  # Weather Service
178
  if config.openweather_api_key:
179
  st.success("🌀️ Weather: API Active")
180
+ if st.button("🌑️ Test Weather", key="test_weather"):
181
+ try:
182
+ from services.weather import weather_service
183
+ with st.spinner("Testing weather service..."):
184
+ test_weather = weather_service.get_current_weather("New York")
185
+ if test_weather:
186
+ st.success(f"βœ… Weather working! {test_weather['temperature']}Β°C in New York")
187
+ else:
188
+ st.warning("⚠️ Weather service returned no data")
189
+ except Exception as e:
190
+ st.error(f"❌ Weather test failed: {str(e)[:30]}...")
191
  else:
192
  st.info("🌀️ Weather: Not configured")
193
 
194
+ st.divider()
195
+
196
+ st.subheader("πŸ“ˆ Session Statistics")
197
+
198
  # Session Stats
199
  try:
200
  user_session = session_manager.get_session("default_user")
201
+ conversation = user_session.get("conversation", [])
202
+ st.caption(f"πŸ’¬ Messages: {len(conversation)}")
203
+
204
+ # AI Coordination Stats (if available)
205
+ coord_stats = user_session.get('ai_coordination', {})
206
+ if coord_stats:
207
+ st.caption(f"πŸ€– AI Requests: {coord_stats.get('requests_processed', 0)}")
208
+ st.caption(f"πŸ¦™ Ollama Responses: {coord_stats.get('ollama_responses', 0)}")
209
+ st.caption(f"πŸ€— HF Responses: {coord_stats.get('hf_responses', 0)}")
210
+ else:
211
+ st.caption("πŸ€– AI Coordination: Not active")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
212
 
213
+ except Exception as e:
214
+ st.caption("πŸ’¬ Session: Not initialized")
215
 
216
+ st.divider()
 
 
 
 
217
 
218
+ st.subheader("βš™οΈ Configuration")
219
+ st.caption(f"**Environment**: {'☁️ HF Space' if config.is_hf_space else '🏠 Local'}")
220
+ st.caption(f"**Primary Model**: {config.local_model_name or 'Not set'}")
221
 
222
+ # Feature Flags Summary
223
  features = []
224
+ if fallback_mode:
225
+ features.append("Fallback")
226
+ if hf_analysis and config.hf_token:
227
+ features.append("HF Deep Analysis")
228
+ if web_search and tavily_key:
229
  features.append("Web Search")
230
  if config.openweather_api_key:
231
+ features.append("Weather")
 
 
232
 
233
  if features:
234
  st.caption(f"**Active Features**: {', '.join(features)}")
services/__pycache__/hf_endpoint_monitor.cpython-313.pyc CHANGED
Binary files a/services/__pycache__/hf_endpoint_monitor.cpython-313.pyc and b/services/__pycache__/hf_endpoint_monitor.cpython-313.pyc differ
 
services/hf_endpoint_monitor.py CHANGED
@@ -19,6 +19,10 @@ class HFEndpointMonitor:
19
  self.check_interval = 60 # Check every minute
20
  self.warmup_attempts = 0
21
  self.max_warmup_attempts = 3
 
 
 
 
22
 
23
  logger.info(f"Initialized HF Monitor with URL: {self.endpoint_url}")
24
 
@@ -140,6 +144,7 @@ class HFEndpointMonitor:
140
  success = response.status_code in [200, 201]
141
  if success:
142
  self.is_initialized = True
 
143
  self.warmup_attempts = 0 # Reset on success
144
  logger.info("βœ… HF endpoint warmed up successfully")
145
  else:
@@ -150,6 +155,7 @@ class HFEndpointMonitor:
150
 
151
  except Exception as e:
152
  logger.error(f"HF endpoint warm-up failed: {e}")
 
153
  return False
154
 
155
  def get_status_summary(self) -> str:
 
19
  self.check_interval = 60 # Check every minute
20
  self.warmup_attempts = 0
21
  self.max_warmup_attempts = 3
22
+ self.warmup_count = 0
23
+ self.successful_requests = 0
24
+ self.failed_requests = 0
25
+ self.avg_response_time = 0
26
 
27
  logger.info(f"Initialized HF Monitor with URL: {self.endpoint_url}")
28
 
 
144
  success = response.status_code in [200, 201]
145
  if success:
146
  self.is_initialized = True
147
+ self.warmup_count += 1
148
  self.warmup_attempts = 0 # Reset on success
149
  logger.info("βœ… HF endpoint warmed up successfully")
150
  else:
 
155
 
156
  except Exception as e:
157
  logger.error(f"HF endpoint warm-up failed: {e}")
158
+ self.failed_requests += 1
159
  return False
160
 
161
  def get_status_summary(self) -> str:
test_enhanced_debug.py ADDED
@@ -0,0 +1,93 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import sys
2
+ from pathlib import Path
3
+
4
+ # Add project root to path
5
+ project_root = Path(__file__).parent
6
+ sys.path.append(str(project_root))
7
+
8
+ from utils.config import config
9
+ from core.session import session_manager
10
+ import os
11
+
12
+ def test_enhanced_debug_features():
13
+ """Test the enhanced debug panel features"""
14
+ print("=== Enhanced Debug Panel Feature Test ===")
15
+ print()
16
+
17
+ # Test System Controls Configuration
18
+ print("1. Testing System Controls Configuration:")
19
+ print(f" Fallback Mode: {config.use_fallback}")
20
+ print(f" HF Token Available: {bool(config.hf_token)}")
21
+ print(f" Tavily API Key: {bool(os.getenv('TAVILY_API_KEY'))}")
22
+ print(f" OpenWeather API Key: {bool(config.openweather_api_key)}")
23
+ print()
24
+
25
+ # Test Provider Status
26
+ print("2. Testing Provider Status:")
27
+ try:
28
+ from services.ollama_monitor import check_ollama_status
29
+ ollama_status = check_ollama_status()
30
+ print(f" Ollama Running: {ollama_status.get('running', 'Unknown')}")
31
+ print(f" Ollama Model: {ollama_status.get('model_loaded', 'Unknown')}")
32
+ except Exception as e:
33
+ print(f" Ollama Status Check Failed: {e}")
34
+
35
+ try:
36
+ from services.hf_endpoint_monitor import hf_monitor
37
+ hf_status = hf_monitor.check_endpoint_status()
38
+ print(f" HF Available: {hf_status.get('available', 'Unknown')}")
39
+ print(f" HF Initialized: {hf_status.get('initialized', 'Unknown')}")
40
+ print(f" HF Status Code: {hf_status.get('status_code', 'Unknown')}")
41
+ except Exception as e:
42
+ print(f" HF Status Check Failed: {e}")
43
+
44
+ print()
45
+
46
+ # Test External Services
47
+ print("3. Testing External Services:")
48
+ print(f" Tavily API: {'βœ… Configured' if os.getenv('TAVILY_API_KEY') else '❌ Not configured'}")
49
+ print(f" Weather API: {'βœ… Configured' if config.openweather_api_key else '❌ Not configured'}")
50
+ print()
51
+
52
+ # Test Session Statistics
53
+ print("4. Testing Session Statistics:")
54
+ try:
55
+ user_session = session_manager.get_session("default_user")
56
+ conversation = user_session.get("conversation", [])
57
+ print(f" Conversation Length: {len(conversation)} messages")
58
+
59
+ coord_stats = user_session.get('ai_coordination', {})
60
+ if coord_stats:
61
+ print(f" AI Requests: {coord_stats.get('requests_processed', 0)}")
62
+ print(f" Ollama Responses: {coord_stats.get('ollama_responses', 0)}")
63
+ print(f" HF Responses: {coord_stats.get('hf_responses', 0)}")
64
+ else:
65
+ print(" AI Coordination: Not active")
66
+ except Exception as e:
67
+ print(f" Session Stats Failed: {e}")
68
+
69
+ print()
70
+
71
+ # Test Configuration Summary
72
+ print("5. Testing Configuration Summary:")
73
+ print(f" Environment: {'HF Space' if config.is_hf_space else 'Local'}")
74
+ print(f" Primary Model: {config.local_model_name or 'Not set'}")
75
+
76
+ # Feature Flags Summary
77
+ features = []
78
+ if config.use_fallback:
79
+ features.append("Fallback")
80
+ if config.hf_token:
81
+ features.append("HF Deep Analysis")
82
+ if os.getenv("TAVILY_API_KEY"):
83
+ features.append("Web Search")
84
+ if config.openweather_api_key:
85
+ features.append("Weather")
86
+
87
+ print(f" Active Features: {', '.join(features) if features else 'None'}")
88
+
89
+ print()
90
+ print("πŸŽ‰ Enhanced Debug Panel Feature Test Completed!")
91
+
92
+ if __name__ == "__main__":
93
+ test_enhanced_debug_features()
utils/__pycache__/config.cpython-313.pyc CHANGED
Binary files a/utils/__pycache__/config.cpython-313.pyc and b/utils/__pycache__/config.cpython-313.pyc differ