Spaces:
Running
Running
Commit
·
f5423bc
1
Parent(s):
b95b076
Upd fallbacks
Browse files- api/chatbot.py +18 -2
- api/config.py +4 -1
- memory/memory.py +9 -1
- search/__init__.py +9 -9
- search/engines/__init__.py +3 -3
- search/processors/__init__.py +2 -2
api/chatbot.py
CHANGED
|
@@ -15,16 +15,32 @@ class GeminiClient:
|
|
| 15 |
"""Gemini API client for generating responses"""
|
| 16 |
|
| 17 |
def __init__(self):
|
| 18 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 19 |
|
| 20 |
def generate_content(self, prompt: str, model: str = "gemini-2.5-flash", temperature: float = 0.7) -> str:
|
| 21 |
"""Generate content using Gemini API"""
|
|
|
|
|
|
|
|
|
|
| 22 |
try:
|
| 23 |
response = self.client.models.generate_content(model=model, contents=prompt)
|
| 24 |
return response.text
|
| 25 |
except Exception as e:
|
| 26 |
logger.error(f"[LLM] ❌ Error calling Gemini API: {e}")
|
| 27 |
-
return
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 28 |
|
| 29 |
class CookingTutorChatbot:
|
| 30 |
"""Cooking tutor chatbot that uses only web search + memory."""
|
|
|
|
| 15 |
"""Gemini API client for generating responses"""
|
| 16 |
|
| 17 |
def __init__(self):
|
| 18 |
+
if not gemini_flash_api_key:
|
| 19 |
+
logger.warning("FlashAPI not set - Gemini client will use fallback responses")
|
| 20 |
+
self.client = None
|
| 21 |
+
else:
|
| 22 |
+
self.client = genai.Client(api_key=gemini_flash_api_key)
|
| 23 |
|
| 24 |
def generate_content(self, prompt: str, model: str = "gemini-2.5-flash", temperature: float = 0.7) -> str:
|
| 25 |
"""Generate content using Gemini API"""
|
| 26 |
+
if not self.client:
|
| 27 |
+
return self._generate_fallback_response(prompt)
|
| 28 |
+
|
| 29 |
try:
|
| 30 |
response = self.client.models.generate_content(model=model, contents=prompt)
|
| 31 |
return response.text
|
| 32 |
except Exception as e:
|
| 33 |
logger.error(f"[LLM] ❌ Error calling Gemini API: {e}")
|
| 34 |
+
return self._generate_fallback_response(prompt)
|
| 35 |
+
|
| 36 |
+
def _generate_fallback_response(self, prompt: str) -> str:
|
| 37 |
+
"""Generate a simple fallback response when Gemini API is not available"""
|
| 38 |
+
# Extract the user's cooking question from the prompt
|
| 39 |
+
if "User's cooking question:" in prompt:
|
| 40 |
+
question_part = prompt.split("User's cooking question:")[-1].split("\n")[0].strip()
|
| 41 |
+
return f"I'd be happy to help you with your cooking question: '{question_part}'. However, I'm currently unable to access my full cooking knowledge base. Please try again later or contact support."
|
| 42 |
+
else:
|
| 43 |
+
return "I'm a cooking tutor, but I'm currently unable to access my full knowledge base. Please try again later."
|
| 44 |
|
| 45 |
class CookingTutorChatbot:
|
| 46 |
"""Cooking tutor chatbot that uses only web search + memory."""
|
api/config.py
CHANGED
|
@@ -4,13 +4,16 @@ import logging
|
|
| 4 |
import psutil
|
| 5 |
from typing import List
|
| 6 |
|
|
|
|
|
|
|
| 7 |
# ✅ Environment Variables
|
| 8 |
gemini_flash_api_key = os.getenv("FlashAPI")
|
| 9 |
|
| 10 |
# Validate environment endpoint (only when actually running the app)
|
| 11 |
def validate_environment():
|
| 12 |
if not gemini_flash_api_key:
|
| 13 |
-
|
|
|
|
| 14 |
|
| 15 |
# ✅ Logging Configuration
|
| 16 |
def setup_logging():
|
|
|
|
| 4 |
import psutil
|
| 5 |
from typing import List
|
| 6 |
|
| 7 |
+
logger = logging.getLogger(__name__)
|
| 8 |
+
|
| 9 |
# ✅ Environment Variables
|
| 10 |
gemini_flash_api_key = os.getenv("FlashAPI")
|
| 11 |
|
| 12 |
# Validate environment endpoint (only when actually running the app)
|
| 13 |
def validate_environment():
|
| 14 |
if not gemini_flash_api_key:
|
| 15 |
+
logger.warning("⚠️ FlashAPI key not set - cooking tutor will use fallback responses")
|
| 16 |
+
# Don't raise error, just warn - app can run with fallback responses
|
| 17 |
|
| 18 |
# ✅ Logging Configuration
|
| 19 |
def setup_logging():
|
memory/memory.py
CHANGED
|
@@ -16,7 +16,15 @@ logger = logging.getLogger("rag-agent")
|
|
| 16 |
logging.basicConfig(level=logging.INFO, format="%(asctime)s — %(name)s — %(levelname)s — %(message)s", force=True) # Change INFO to DEBUG for full-ctx JSON loader
|
| 17 |
|
| 18 |
api_key = os.getenv("FlashAPI")
|
| 19 |
-
client
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 20 |
|
| 21 |
class MemoryManager:
|
| 22 |
def __init__(self, max_users=1000, history_per_user=20, max_chunks=60):
|
|
|
|
| 16 |
logging.basicConfig(level=logging.INFO, format="%(asctime)s — %(name)s — %(levelname)s — %(message)s", force=True) # Change INFO to DEBUG for full-ctx JSON loader
|
| 17 |
|
| 18 |
api_key = os.getenv("FlashAPI")
|
| 19 |
+
# Initialize client lazily to avoid errors during import
|
| 20 |
+
client = None
|
| 21 |
+
|
| 22 |
+
def get_genai_client():
|
| 23 |
+
"""Get or create Gemini client"""
|
| 24 |
+
global client
|
| 25 |
+
if client is None and api_key:
|
| 26 |
+
client = genai.Client(api_key=api_key)
|
| 27 |
+
return client
|
| 28 |
|
| 29 |
class MemoryManager:
|
| 30 |
def __init__(self, max_users=1000, history_per_user=20, max_chunks=60):
|
search/__init__.py
CHANGED
|
@@ -1,25 +1,25 @@
|
|
| 1 |
-
# Search package
|
| 2 |
-
from .search import WebSearcher, search_web, search_web_with_content,
|
| 3 |
from .coordinator import SearchCoordinator
|
| 4 |
-
from .engines import DuckDuckGoEngine,
|
| 5 |
from .extractors import ContentExtractor
|
| 6 |
-
from .processors import
|
| 7 |
|
| 8 |
__all__ = [
|
| 9 |
'WebSearcher',
|
| 10 |
'search_web',
|
| 11 |
'search_web_with_content',
|
| 12 |
-
'
|
| 13 |
-
'
|
| 14 |
'search_videos',
|
| 15 |
'search_comprehensive',
|
| 16 |
'SearchCoordinator',
|
| 17 |
'DuckDuckGoEngine',
|
| 18 |
-
'
|
| 19 |
-
'
|
| 20 |
'VideoSearchEngine',
|
| 21 |
'ContentExtractor',
|
| 22 |
-
'
|
| 23 |
'LanguageProcessor',
|
| 24 |
'SourceAggregator',
|
| 25 |
'EnhancedContentProcessor'
|
|
|
|
| 1 |
+
# Search package - Cooking Tutor
|
| 2 |
+
from .search import WebSearcher, search_web, search_web_with_content, search_cooking, search_multilingual_cooking, search_videos, search_comprehensive
|
| 3 |
from .coordinator import SearchCoordinator
|
| 4 |
+
from .engines import DuckDuckGoEngine, CookingSearchEngine, MultilingualCookingEngine, VideoSearchEngine
|
| 5 |
from .extractors import ContentExtractor
|
| 6 |
+
from .processors import CookingSearchProcessor, LanguageProcessor, SourceAggregator, EnhancedContentProcessor
|
| 7 |
|
| 8 |
__all__ = [
|
| 9 |
'WebSearcher',
|
| 10 |
'search_web',
|
| 11 |
'search_web_with_content',
|
| 12 |
+
'search_cooking',
|
| 13 |
+
'search_multilingual_cooking',
|
| 14 |
'search_videos',
|
| 15 |
'search_comprehensive',
|
| 16 |
'SearchCoordinator',
|
| 17 |
'DuckDuckGoEngine',
|
| 18 |
+
'CookingSearchEngine',
|
| 19 |
+
'MultilingualCookingEngine',
|
| 20 |
'VideoSearchEngine',
|
| 21 |
'ContentExtractor',
|
| 22 |
+
'CookingSearchProcessor',
|
| 23 |
'LanguageProcessor',
|
| 24 |
'SourceAggregator',
|
| 25 |
'EnhancedContentProcessor'
|
search/engines/__init__.py
CHANGED
|
@@ -1,6 +1,6 @@
|
|
| 1 |
from .duckduckgo import DuckDuckGoEngine
|
| 2 |
-
from .
|
| 3 |
-
from .multilingual import
|
| 4 |
from .video import VideoSearchEngine
|
| 5 |
|
| 6 |
-
__all__ = ['DuckDuckGoEngine', '
|
|
|
|
| 1 |
from .duckduckgo import DuckDuckGoEngine
|
| 2 |
+
from .cooking import CookingSearchEngine
|
| 3 |
+
from .multilingual import MultilingualCookingEngine
|
| 4 |
from .video import VideoSearchEngine
|
| 5 |
|
| 6 |
+
__all__ = ['DuckDuckGoEngine', 'CookingSearchEngine', 'MultilingualCookingEngine', 'VideoSearchEngine']
|
search/processors/__init__.py
CHANGED
|
@@ -1,6 +1,6 @@
|
|
| 1 |
-
from .
|
| 2 |
from .language import LanguageProcessor
|
| 3 |
from .sources import SourceAggregator
|
| 4 |
from .enhanced import EnhancedContentProcessor
|
| 5 |
|
| 6 |
-
__all__ = ['
|
|
|
|
| 1 |
+
from .cooking import CookingSearchProcessor
|
| 2 |
from .language import LanguageProcessor
|
| 3 |
from .sources import SourceAggregator
|
| 4 |
from .enhanced import EnhancedContentProcessor
|
| 5 |
|
| 6 |
+
__all__ = ['CookingSearchProcessor', 'LanguageProcessor', 'SourceAggregator', 'EnhancedContentProcessor']
|