rdune71's picture
Fix chat interface response issues: proper session handling, error handling, and UI display fixes
737aa03
raw
history blame
1.36 kB
import logging
from typing import List, Dict, Optional
from core.llm_factory import llm_factory, ProviderNotAvailableError
logger = logging.getLogger(__name__)
class LLMClient:
"""High-level LLM client that uses the factory pattern with improved error handling"""
def __init__(self):
try:
self.provider = llm_factory.get_provider()
except ProviderNotAvailableError:
self.provider = None
logger.error("No LLM providers available")
def generate(self, prompt: str, conversation_history: List[Dict], stream: bool = False) -> Optional[str]:
"""
Generate a response with robust error handling.
"""
if not self.provider:
raise ProviderNotAvailableError("No LLM provider available")
try:
if stream:
result = self.provider.stream_generate(prompt, conversation_history)
# For streaming, combine chunks into single response
if isinstance(result, list):
return "".join(result)
return result
else:
return self.provider.generate(prompt, conversation_history)
except Exception as e:
logger.error(f"LLM generation failed: {e}")
raise # Re-raise to let caller handle appropriately