jocko commited on
Commit
97f12dc
Β·
1 Parent(s): 0a89a37
Files changed (2) hide show
  1. requirements.txt +4 -1
  2. src/streamlit_app.py +17 -8
requirements.txt CHANGED
@@ -5,4 +5,7 @@ torch
5
  transformers
6
  sentence-transformers
7
  datasets
8
- openai
 
 
 
 
5
  transformers
6
  sentence-transformers
7
  datasets
8
+ openai
9
+ opik
10
+ comet-llm==2.1.0
11
+ comet_ml==3.33.8
src/streamlit_app.py CHANGED
@@ -27,10 +27,14 @@ from transformers import CLIPProcessor, CLIPModel
27
  from datasets import load_dataset, get_dataset_split_names
28
  from PIL import Image
29
  import openai
 
 
 
30
 
31
  # ========== πŸ”‘ API Key ==========
32
  openai.api_key = os.getenv("OPENAI_API_KEY")
33
-
 
34
  # ========== πŸ“₯ Load Models ==========
35
  @st.cache_resource(show_spinner=False)
36
  def load_models():
@@ -89,6 +93,16 @@ st.title("🩺 Multimodal Medical Chatbot")
89
 
90
  query = st.text_input("Enter your medical question or symptom description:")
91
 
 
 
 
 
 
 
 
 
 
 
92
  if query:
93
  with st.spinner("Searching medical cases..."):
94
  text_embeddings = embed_texts(data[TEXT_COLUMN])
@@ -112,13 +126,8 @@ if query:
112
  from openai import OpenAI
113
  client = OpenAI(api_key=openai.api_key)
114
 
115
- response = client.chat.completions.create(
116
- model="gpt-4o", # or "gpt-4" if you need the older GPT-4
117
- messages=[{"role": "user", "content": prompt}],
118
- temperature=0.5,
119
- max_tokens=150
120
- )
121
- explanation = response.choices[0].message.content
122
 
123
  st.markdown(f"### πŸ€– Explanation by GPT:\n{explanation}")
124
  else:
 
27
  from datasets import load_dataset, get_dataset_split_names
28
  from PIL import Image
29
  import openai
30
+ import comet_llm
31
+
32
+
33
 
34
  # ========== πŸ”‘ API Key ==========
35
  openai.api_key = os.getenv("OPENAI_API_KEY")
36
+ os.environ["OPIK_API_KEY"] = os.getenv("OPIK_API_KEY")
37
+ os.environ["OPIK_WORKSPACE"] = os.getenv("OPIK_WORKSPACE")
38
  # ========== πŸ“₯ Load Models ==========
39
  @st.cache_resource(show_spinner=False)
40
  def load_models():
 
93
 
94
  query = st.text_input("Enter your medical question or symptom description:")
95
 
96
+ @track
97
+ def get_chat_completion_openai(prompt: str):
98
+ return client.chat.completions.create(
99
+ model="gpt-4o", # or "gpt-4" if you need the older GPT-4
100
+ messages=[{"role": "user", "content": prompt}],
101
+ temperature=0.5,
102
+ max_tokens=150
103
+ )
104
+
105
+
106
  if query:
107
  with st.spinner("Searching medical cases..."):
108
  text_embeddings = embed_texts(data[TEXT_COLUMN])
 
126
  from openai import OpenAI
127
  client = OpenAI(api_key=openai.api_key)
128
 
129
+ explanation = get_chat_completion_openai()
130
+ explanation = explanation.choices[0].message.content
 
 
 
 
 
131
 
132
  st.markdown(f"### πŸ€– Explanation by GPT:\n{explanation}")
133
  else: