rdune71 commited on
Commit
f546abb
Β·
1 Parent(s): 9da7658

Fix HF monitor import error and improve error handling

Browse files
Files changed (1) hide show
  1. app.py +35 -47
app.py CHANGED
@@ -16,6 +16,7 @@ from core.errors import translate_error
16
  from core.personality import personality
17
  from src.analytics.user_logger import user_logger
18
  from src.analytics.session_analytics import session_analytics
 
19
  import logging
20
 
21
  # Set up logging
@@ -24,49 +25,25 @@ logger = logging.getLogger(__name__)
24
 
25
  st.set_page_config(page_title="CosmicCat AI Assistant", page_icon="🐱", layout="wide")
26
 
27
- # Initialize session state properly with error handling
28
- def initialize_session_state():
29
- """Initialize all session state variables safely"""
30
- session_vars = {
31
- "messages": [],
32
- "is_processing": False,
33
- "ngrok_url_temp": st.session_state.get("ngrok_url", "https://7bcc180dffd1.ngrok-free.app"),
34
- "cosmic_mode": True,
35
- "show_welcome": True,
36
- "last_processed_message": "",
37
- "selected_model_value": "auto",
38
- "session_id": f"sess_{int(time.time())}_{abs(hash(str(time.time()))) % 10000}"
39
- }
40
-
41
- for var_name, default_value in session_vars.items():
42
- if var_name not in st.session_state:
43
- st.session_state[var_name] = default_value
44
- logger.info(f"Initialized session variable: {var_name}")
45
 
46
- # Initialize session state
47
- try:
48
- initialize_session_state()
49
- except Exception as e:
50
- logger.error(f"Session state initialization failed: {e}")
51
- # Fallback initialization
52
- if "messages" not in st.session_state:
53
- st.session_state.messages = []
54
- if "is_processing" not in st.session_state:
55
- st.session_state.is_processing = False
56
- if "ngrok_url_temp" not in st.session_state:
57
- st.session_state.ngrok_url_temp = st.session_state.get("ngrok_url", "https://7bcc180dffd1.ngrok-free.app")
58
- if "cosmic_mode" not in st.session_state:
59
- st.session_state.cosmic_mode = True
60
- if "show_welcome" not in st.session_state:
61
- st.session_state.show_welcome = True
62
- if "last_processed_message" not in st.session_state:
63
- st.session_state.last_processed_message = ""
64
- if "selected_model_value" not in st.session_state:
65
- st.session_state.selected_model_value = "auto"
66
- if "session_id" not in st.session_state:
67
- st.session_state.session_id = f"sess_{int(time.time())}_{abs(hash(str(time.time()))) % 10000}"
68
-
69
- # Start session tracking with error handling
70
  try:
71
  session_analytics.start_session_tracking("default_user", st.session_state.session_id)
72
  except Exception as e:
@@ -99,10 +76,20 @@ with st.sidebar:
99
  # Show which provider will actually be used
100
  actual_provider = "Unknown"
101
  if st.session_state.selected_model_value == "auto":
102
- if config.hf_token and hf_monitor.get_endpoint_status()["available"]:
103
- actual_provider = "πŸ€— HF Endpoint"
104
- elif config.ollama_host:
105
- actual_provider = "πŸ¦™ Ollama"
 
 
 
 
 
 
 
 
 
 
106
  else:
107
  actual_provider = "πŸ€— HF Endpoint" if st.session_state.selected_model_value == "huggingface" else "πŸ¦™ Ollama"
108
 
@@ -197,7 +184,7 @@ with st.sidebar:
197
 
198
  # HF Endpoint Status (Enhanced with initialization info)
199
  try:
200
- from src.services.hf_monitor import hf_monitor
201
  status_message = hf_monitor.get_human_readable_status()
202
 
203
  # Display appropriate status icon
@@ -304,6 +291,7 @@ user_input = st.chat_input("Type your message here...", key="chat_input")
304
 
305
  # Process message when received
306
  if user_input and user_input.strip():
 
307
  if not st.session_state.get('is_processing', False):
308
  chat_handler.process_user_message(user_input, st.session_state.selected_model_value)
309
  else:
 
16
  from core.personality import personality
17
  from src.analytics.user_logger import user_logger
18
  from src.analytics.session_analytics import session_analytics
19
+ from src.llm.factory import llm_factory
20
  import logging
21
 
22
  # Set up logging
 
25
 
26
  st.set_page_config(page_title="CosmicCat AI Assistant", page_icon="🐱", layout="wide")
27
 
28
+ # Initialize session state properly
29
+ if "messages" not in st.session_state:
30
+ st.session_state.messages = []
31
+ if "is_processing" not in st.session_state:
32
+ st.session_state.is_processing = False
33
+ if "ngrok_url_temp" not in st.session_state:
34
+ st.session_state.ngrok_url_temp = st.session_state.get("ngrok_url", "https://7bcc180dffd1.ngrok-free.app")
35
+ if "cosmic_mode" not in st.session_state:
36
+ st.session_state.cosmic_mode = True
37
+ if "show_welcome" not in st.session_state:
38
+ st.session_state.show_welcome = True
39
+ if "last_processed_message" not in st.session_state:
40
+ st.session_state.last_processed_message = ""
41
+ if "session_id" not in st.session_state:
42
+ st.session_state.session_id = f"sess_{int(time.time())}_{abs(hash(str(time.time()))) % 10000}"
43
+ if "selected_model_value" not in st.session_state:
44
+ st.session_state.selected_model_value = "auto"
 
45
 
46
+ # Start session tracking
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
47
  try:
48
  session_analytics.start_session_tracking("default_user", st.session_state.session_id)
49
  except Exception as e:
 
76
  # Show which provider will actually be used
77
  actual_provider = "Unknown"
78
  if st.session_state.selected_model_value == "auto":
79
+ # Import HF monitor here to avoid undefined error
80
+ try:
81
+ from src.services.hf_endpoint_monitor import hf_monitor
82
+ if config.hf_token:
83
+ status = hf_monitor.get_endpoint_status()
84
+ if status["available"]:
85
+ actual_provider = "πŸ€— HF Endpoint"
86
+ elif config.ollama_host:
87
+ actual_provider = "πŸ¦™ Ollama"
88
+ elif config.ollama_host:
89
+ actual_provider = "πŸ¦™ Ollama"
90
+ except:
91
+ if config.ollama_host:
92
+ actual_provider = "πŸ¦™ Ollama"
93
  else:
94
  actual_provider = "πŸ€— HF Endpoint" if st.session_state.selected_model_value == "huggingface" else "πŸ¦™ Ollama"
95
 
 
184
 
185
  # HF Endpoint Status (Enhanced with initialization info)
186
  try:
187
+ from src.services.hf_endpoint_monitor import hf_monitor
188
  status_message = hf_monitor.get_human_readable_status()
189
 
190
  # Display appropriate status icon
 
291
 
292
  # Process message when received
293
  if user_input and user_input.strip():
294
+ # Handle user message display first
295
  if not st.session_state.get('is_processing', False):
296
  chat_handler.process_user_message(user_input, st.session_state.selected_model_value)
297
  else: