AI-Life-Coach-Streamlit2 / src /ui /chat_handler.py
rdune71's picture
Implement enhanced UI with proper response display and feedback
a3e0ade
raw
history blame
5.62 kB
import streamlit as st
import time
import logging
from typing import Optional
from src.llm.factory import llm_factory, ProviderNotAvailableError
from src.services.hf_monitor import hf_monitor
from core.session import session_manager
logger = logging.getLogger(__name__)
class ChatHandler:
"""Handles chat interactions with better UI feedback"""
def __init__(self):
self.is_processing = False
def process_user_message(self, user_input: str, selected_model: str):
"""Process user message with enhanced UI feedback"""
if not user_input or not user_input.strip():
st.warning("Please enter a message")
return
if self.is_processing:
st.warning("Still processing previous request...")
return
self.is_processing = True
try:
# Show user message immediately
timestamp = time.strftime("%H:%M:%S")
with st.chat_message("user"):
st.markdown(user_input)
st.caption(f"πŸ•’ {timestamp}")
# Add to session history
st.session_state.messages.append({
"role": "user",
"content": user_input,
"timestamp": timestamp
})
# Show processing status
with st.chat_message("assistant"):
status_placeholder = st.empty()
response_placeholder = st.empty()
try:
# Determine provider based on model selection
provider_name = self._get_provider_for_model(selected_model)
status_placeholder.info(f"πŸš€ Contacting {self._get_provider_display_name(provider_name)}...")
# Get response
response = self._get_ai_response(user_input, provider_name)
if response:
status_placeholder.success("βœ… Response received!")
response_placeholder.markdown(response)
# Add to session history
st.session_state.messages.append({
"role": "assistant",
"content": response,
"timestamp": time.strftime("%H:%M:%S"),
"provider": provider_name
})
else:
status_placeholder.error("❌ Empty response received")
response_placeholder.markdown("I received your message but couldn't generate a proper response.")
except ProviderNotAvailableError as e:
status_placeholder.error("❌ No AI providers available")
response_placeholder.markdown("No AI providers are configured. Please check your settings.")
logger.error(f"Provider not available: {e}")
except Exception as e:
status_placeholder.error(f"❌ Error: {str(e)[:100]}...")
response_placeholder.markdown(f"Sorry, I encountered an error: {str(e)[:100]}...")
logger.error(f"Chat processing error: {e}")
finally:
self.is_processing = False
time.sleep(0.1) # Small delay to ensure UI updates
# st.experimental_rerun() # Removed to prevent automatic rerun
def _get_provider_for_model(self, selected_model: str) -> str:
"""Determine which provider to use based on model selection"""
model_map = {
"Mistral 7B (Local)": "ollama",
"Llama 2 7B (Local)": "ollama",
"OpenChat 3.5 (Local)": "ollama"
}
return model_map.get(selected_model, "ollama")
def _get_provider_display_name(self, provider_name: str) -> str:
"""Get display name for provider"""
display_names = {
"ollama": "πŸ¦™ Ollama",
"huggingface": "πŸ€— HF Endpoint"
}
return display_names.get(provider_name, provider_name)
def _get_ai_response(self, user_input: str, provider_name: str) -> Optional[str]:
"""Get AI response from specified provider"""
try:
# Get session and conversation history
user_session = session_manager.get_session("default_user")
conversation_history = user_session.get("conversation", []).copy()
# Add current user message
conversation_history.append({"role": "user", "content": user_input})
# Get provider
provider = llm_factory.get_provider(provider_name)
# Generate response
response = provider.generate(user_input, conversation_history)
# Update session with conversation
if response:
conversation = user_session.get("conversation", []).copy()
conversation.append({"role": "user", "content": user_input})
conversation.append({"role": "assistant", "content": response})
session_manager.update_session("default_user", {"conversation": conversation})
return response
except Exception as e:
logger.error(f"AI response generation failed: {e}")
raise
# Global instance
chat_handler = ChatHandler()