rdune71 commited on
Commit
5420da2
·
1 Parent(s): 7b5f176

Add conversation history display and finalize Redis integration

Browse files
Files changed (3) hide show
  1. api/chat.py +1 -1
  2. app.py +18 -1
  3. core/memory.py +29 -0
api/chat.py CHANGED
@@ -1,8 +1,8 @@
 
1
  from fastapi import APIRouter, HTTPException
2
  from fastapi.responses import StreamingResponse
3
  from core.llm import LLMClient
4
  from core.memory import save_user_state, load_user_state
5
- import json
6
 
7
  router = APIRouter()
8
 
 
1
+ import json
2
  from fastapi import APIRouter, HTTPException
3
  from fastapi.responses import StreamingResponse
4
  from core.llm import LLMClient
5
  from core.memory import save_user_state, load_user_state
 
6
 
7
  router = APIRouter()
8
 
app.py CHANGED
@@ -1,6 +1,8 @@
1
  import streamlit as st
2
  from utils.config import config
3
  import requests
 
 
4
 
5
  # Set page config
6
  st.set_page_config(page_title="AI Life Coach", page_icon="🧘", layout="centered")
@@ -19,6 +21,13 @@ def get_ollama_status():
19
  except Exception:
20
  return {"running": False, "model_loaded": None}
21
 
 
 
 
 
 
 
 
22
  ollama_status = get_ollama_status()
23
 
24
  # Display Ollama status
@@ -34,6 +43,13 @@ st.markdown("Talk to your personal development assistant.")
34
  if not ollama_status["running"]:
35
  st.warning("⚠️ Ollama is not running. Please start Ollama to use the AI Life Coach.")
36
  else:
 
 
 
 
 
 
 
37
  # Chat input
38
  user_input = st.text_input("Your message...", key="input")
39
  if st.button("Send"):
@@ -51,7 +67,8 @@ else:
51
  json={"user_id": user, "message": user_input}
52
  )
53
  if response.status_code == 200:
54
- ai_response = response.text
 
55
  st.markdown(f"**AI Coach:** {ai_response}")
56
  else:
57
  st.error("Failed to get response from AI Coach.")
 
1
  import streamlit as st
2
  from utils.config import config
3
  import requests
4
+ import json
5
+ from core.memory import load_user_state
6
 
7
  # Set page config
8
  st.set_page_config(page_title="AI Life Coach", page_icon="🧘", layout="centered")
 
21
  except Exception:
22
  return {"running": False, "model_loaded": None}
23
 
24
+ # After user selects name, load conversation history
25
+ def get_conversation_history(user_id):
26
+ user_state = load_user_state(user_id)
27
+ if user_state and "conversation" in user_state:
28
+ return json.loads(user_state["conversation"])
29
+ return []
30
+
31
  ollama_status = get_ollama_status()
32
 
33
  # Display Ollama status
 
43
  if not ollama_status["running"]:
44
  st.warning("⚠️ Ollama is not running. Please start Ollama to use the AI Life Coach.")
45
  else:
46
+ # Display conversation history
47
+ conversation = get_conversation_history(user)
48
+ for msg in conversation:
49
+ role = msg["role"].capitalize()
50
+ content = msg["content"]
51
+ st.markdown(f"**{role}:** {content}")
52
+
53
  # Chat input
54
  user_input = st.text_input("Your message...", key="input")
55
  if st.button("Send"):
 
67
  json={"user_id": user, "message": user_input}
68
  )
69
  if response.status_code == 200:
70
+ response_data = response.json()
71
+ ai_response = response_data.get("response", "")
72
  st.markdown(f"**AI Coach:** {ai_response}")
73
  else:
74
  st.error("Failed to get response from AI Coach.")
core/memory.py CHANGED
@@ -0,0 +1,29 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import json
2
+ import redis
3
+ from utils.config import config
4
+
5
+ # Initialize Redis connection
6
+ redis_client = redis.Redis(
7
+ host=config.redis_host,
8
+ port=config.redis_port,
9
+ username=config.redis_username,
10
+ password=config.redis_password,
11
+ decode_responses=True
12
+ )
13
+
14
+ def save_user_state(user_id: str, state: dict):
15
+ """Save user state to Redis"""
16
+ try:
17
+ redis_client.hset(f"user:{user_id}", mapping=state)
18
+ return True
19
+ except Exception as e:
20
+ print(f"Error saving user state: {e}")
21
+ return False
22
+
23
+ def load_user_state(user_id: str):
24
+ """Load user state from Redis"""
25
+ try:
26
+ return redis_client.hgetall(f"user:{user_id}")
27
+ except Exception as e:
28
+ print(f"Error loading user state: {e}")
29
+ return {}