|
|
import streamlit as st |
|
|
import time |
|
|
import os |
|
|
import sys |
|
|
import json |
|
|
from datetime import datetime |
|
|
from pathlib import Path |
|
|
sys.path.append(str(Path(__file__).parent)) |
|
|
|
|
|
|
|
|
from src.ui.chat_handler import chat_handler |
|
|
from utils.config import config |
|
|
from core.session import session_manager |
|
|
from core.memory import check_redis_health |
|
|
from core.errors import translate_error |
|
|
from core.personality import personality |
|
|
import logging |
|
|
|
|
|
|
|
|
logging.basicConfig(level=logging.INFO) |
|
|
logger = logging.getLogger(__name__) |
|
|
|
|
|
st.set_page_config(page_title="CosmicCat AI Assistant", page_icon="π±", layout="wide") |
|
|
|
|
|
|
|
|
if "messages" not in st.session_state: |
|
|
st.session_state.messages = [] |
|
|
if "is_processing" not in st.session_state: |
|
|
st.session_state.is_processing = False |
|
|
if "ngrok_url_temp" not in st.session_state: |
|
|
st.session_state.ngrok_url_temp = st.session_state.get("ngrok_url", "https://7bcc180dffd1.ngrok-free.app") |
|
|
if "cosmic_mode" not in st.session_state: |
|
|
st.session_state.cosmic_mode = True |
|
|
if "show_welcome" not in st.session_state: |
|
|
st.session_state.show_welcome = True |
|
|
|
|
|
|
|
|
with st.sidebar: |
|
|
st.title("π± CosmicCat AI Assistant") |
|
|
st.markdown("Your personal AI-powered assistant with a cosmic twist.") |
|
|
|
|
|
|
|
|
model_options = { |
|
|
"Mistral 7B (Local)": "mistral:latest", |
|
|
"Llama 2 7B (Local)": "llama2:latest", |
|
|
"OpenChat 3.5 (Local)": "openchat:latest" |
|
|
} |
|
|
selected_model_name = st.selectbox( |
|
|
"Select Model", |
|
|
options=list(model_options.keys()), |
|
|
index=0 |
|
|
) |
|
|
st.session_state.selected_model = model_options[selected_model_name] |
|
|
|
|
|
|
|
|
st.session_state.cosmic_mode = st.checkbox("Enable Cosmic Mode", value=st.session_state.cosmic_mode) |
|
|
|
|
|
st.divider() |
|
|
|
|
|
|
|
|
st.subheader("βοΈ Configuration") |
|
|
ngrok_url_input = st.text_input( |
|
|
"Ollama Server URL", |
|
|
value=st.session_state.ngrok_url_temp, |
|
|
help="Enter your ngrok URL" |
|
|
) |
|
|
|
|
|
if ngrok_url_input != st.session_state.ngrok_url_temp: |
|
|
st.session_state.ngrok_url_temp = ngrok_url_input |
|
|
st.success("β
URL updated!") |
|
|
|
|
|
if st.button("π‘ Test Connection"): |
|
|
try: |
|
|
from core.providers.ollama import OllamaProvider |
|
|
ollama_provider = OllamaProvider(st.session_state.selected_model) |
|
|
is_valid = ollama_provider.validate_model() |
|
|
if is_valid: |
|
|
st.success("β
Connection successful!") |
|
|
else: |
|
|
st.error("β Model validation failed") |
|
|
except Exception as e: |
|
|
st.error(f"β Error: {str(e)[:50]}...") |
|
|
|
|
|
if st.button("ποΈ Clear History"): |
|
|
st.session_state.messages = [] |
|
|
|
|
|
session_manager.clear_session("default_user") |
|
|
st.success("History cleared!") |
|
|
|
|
|
st.divider() |
|
|
|
|
|
|
|
|
with st.expander("π System Status", expanded=True): |
|
|
st.subheader("π Status") |
|
|
|
|
|
|
|
|
try: |
|
|
from services.ollama_monitor import check_ollama_status |
|
|
ollama_status = check_ollama_status() |
|
|
if ollama_status.get("running"): |
|
|
st.success("π¦ Ollama: Running") |
|
|
else: |
|
|
st.warning("π¦ Ollama: Not running") |
|
|
except: |
|
|
st.info("π¦ Ollama: Unknown") |
|
|
|
|
|
|
|
|
try: |
|
|
from src.services.hf_monitor import hf_monitor |
|
|
status_message = hf_monitor.get_human_readable_status() |
|
|
|
|
|
|
|
|
if "π’" in status_message: |
|
|
st.success(status_message) |
|
|
elif "π‘" in status_message: |
|
|
st.warning(status_message) |
|
|
elif "π΄" in status_message or "β" in status_message: |
|
|
st.error(status_message) |
|
|
elif "β³" in status_message: |
|
|
st.info(status_message) |
|
|
else: |
|
|
st.info(status_message) |
|
|
|
|
|
|
|
|
if "scaled to zero" in status_message.lower(): |
|
|
if st.button("β‘ Wake Up HF Endpoint", key="wake_up_hf"): |
|
|
with st.spinner("Waking up HF endpoint... This may take 2-4 minutes..."): |
|
|
if hf_monitor.attempt_wake_up(): |
|
|
st.success("β
HF endpoint is waking up! Try your request again in a moment.") |
|
|
time.sleep(2) |
|
|
st.experimental_rerun() |
|
|
else: |
|
|
st.error("β Failed to wake up HF endpoint. Please try again.") |
|
|
|
|
|
except Exception as e: |
|
|
st.info(f"π€ HF Endpoint: Error checking status - {str(e)}") |
|
|
|
|
|
|
|
|
try: |
|
|
if check_redis_health(): |
|
|
st.success("πΎ Redis: Connected") |
|
|
else: |
|
|
st.error("πΎ Redis: Disconnected") |
|
|
except: |
|
|
st.info("πΎ Redis: Unknown") |
|
|
|
|
|
st.divider() |
|
|
|
|
|
|
|
|
st.subheader("π Debug Info") |
|
|
st.markdown(f"**Environment:** {'HF Space' if config.is_hf_space else 'Local'}") |
|
|
st.markdown(f"**Model:** {st.session_state.selected_model}") |
|
|
|
|
|
|
|
|
st.title("π± CosmicCat AI Assistant") |
|
|
st.markdown("Ask me anything!") |
|
|
|
|
|
|
|
|
if st.session_state.show_welcome: |
|
|
with st.chat_message("assistant"): |
|
|
greeting = personality.get_greeting(cosmic_mode=st.session_state.cosmic_mode) |
|
|
st.markdown(greeting) |
|
|
st.session_state.show_welcome = False |
|
|
|
|
|
|
|
|
for message in st.session_state.messages: |
|
|
with st.chat_message(message["role"]): |
|
|
st.markdown(message["content"]) |
|
|
if "timestamp" in message: |
|
|
provider_info = f" (via {message.get('provider', 'unknown')})" if message["role"] == "assistant" else "" |
|
|
st.caption(f"π {message['timestamp']}{provider_info}") |
|
|
|
|
|
|
|
|
user_input = st.chat_input("Type your message here...", key="chat_input") |
|
|
|
|
|
if user_input: |
|
|
chat_handler.process_user_message(user_input, selected_model_name) |
|
|
|
|
|
|
|
|
st.divider() |
|
|
tab1, = st.tabs(["βΉοΈ About"]) |
|
|
|
|
|
with tab1: |
|
|
st.header("βΉοΈ About CosmicCat AI Assistant") |
|
|
st.markdown(""" |
|
|
The CosmicCat AI Assistant is a sophisticated conversational AI with a cosmic theme. |
|
|
|
|
|
### π§ Core Features |
|
|
- **Local AI processing** with Ollama models |
|
|
- **Persistent memory** using Redis |
|
|
- **Space-themed personality** for fun interactions |
|
|
- **HF Endpoint integration** for advanced capabilities |
|
|
|
|
|
### π Cosmic Mode |
|
|
When enabled, the AI responds with space-themed language and metaphors. |
|
|
|
|
|
### π οΈ Technical Architecture |
|
|
- **Primary model**: Ollama (local processing) |
|
|
- **Secondary model**: HF Endpoint (advanced processing) |
|
|
- **Memory system**: Redis-based session management |
|
|
""") |
|
|
|