arubaDev commited on
Commit
8721a61
·
verified ·
1 Parent(s): fb9cbfc

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +47 -9
app.py CHANGED
@@ -14,7 +14,6 @@ MODELS = {
14
  }
15
 
16
  DATASETS = ["The Stack", "CodeXGLUE"] # Dropdown for dataset selection
17
-
18
  HF_TOKEN = os.getenv("HF_TOKEN") # Set in your Space's Secrets
19
  DB_PATH = "history.db"
20
 
@@ -91,7 +90,8 @@ def get_messages(session_id: int):
91
  conn = db()
92
  cur = conn.cursor()
93
  cur.execute("""
94
- SELECT role, content FROM messages
 
95
  WHERE session_id = ?
96
  ORDER BY id ASC
97
  """, (session_id,))
@@ -184,6 +184,7 @@ FRONTEND_KEYWORDS = [
184
  def is_frontend_request(user_text: str) -> bool:
185
  text_lower = user_text.lower()
186
  return any(kw in text_lower for kw in FRONTEND_KEYWORDS)
 
187
  # --- Fixed send_cb to show user message ---
188
  def send_cb(user_text, selected_label, chatbot_msgs, system_message, max_tokens, temperature, top_p, model_choice, dataset_choice, *args):
189
  sid = label_to_id(selected_label)
@@ -214,6 +215,7 @@ def send_cb(user_text, selected_label, chatbot_msgs, system_message, max_tokens,
214
  client = get_client(model_choice)
215
  api_messages = build_api_messages(sid, system_message)
216
  partial = ""
 
217
  try:
218
  for chunk in client.chat_completion(
219
  messages=api_messages,
@@ -222,12 +224,23 @@ def send_cb(user_text, selected_label, chatbot_msgs, system_message, max_tokens,
222
  top_p=float(top_p),
223
  stream=True,
224
  ):
225
- delta = chunk.choices[0].delta.content or ""
 
 
 
 
 
 
 
 
 
226
  if delta:
227
  partial += delta
228
  display_msgs[-1]["content"] = partial
229
  yield (display_msgs, "", selected_label)
 
230
  add_message(sid, "assistant", partial)
 
231
  except Exception as e:
232
  display_msgs[-1]["content"] = f"⚠️ Error: {str(e)}"
233
  yield (display_msgs, "", selected_label)
@@ -241,13 +254,16 @@ def regenerate_cb(selected_label, system_message, max_tokens, temperature, top_p
241
  if not msgs:
242
  return [], ""
243
 
 
244
  if msgs and msgs[-1]["role"] == "assistant":
245
  conn = db()
246
  cur = conn.cursor()
247
  cur.execute("""
248
  DELETE FROM messages
249
  WHERE id = (
250
- SELECT id FROM messages WHERE session_id=? ORDER BY id DESC LIMIT 1
 
 
251
  )
252
  """, (sid,))
253
  conn.commit()
@@ -260,6 +276,7 @@ def regenerate_cb(selected_label, system_message, max_tokens, temperature, top_p
260
 
261
  client = get_client(model_choice)
262
  partial = ""
 
263
  try:
264
  for chunk in client.chat_completion(
265
  messages=api_messages,
@@ -268,12 +285,23 @@ def regenerate_cb(selected_label, system_message, max_tokens, temperature, top_p
268
  top_p=float(top_p),
269
  stream=True,
270
  ):
271
- delta = chunk.choices[0].delta.content or ""
 
 
 
 
 
 
 
 
 
272
  if delta:
273
  partial += delta
274
  display_msgs[-1]["content"] = partial
275
  yield display_msgs
 
276
  add_message(sid, "assistant", partial)
 
277
  except Exception as e:
278
  display_msgs[-1]["content"] = f"⚠️ Error: {str(e)}"
279
  yield display_msgs
@@ -286,14 +314,24 @@ labels, _ = list_sessions()
286
  if not labels:
287
  first_sid = create_session("New chat")
288
  labels, _ = list_sessions()
 
289
  default_selected = labels[0] if labels else None
290
 
291
  with gr.Blocks(title="Backend-Focused LLaMA/Mistral CRUD Assistant", theme=gr.themes.Soft()) as demo:
292
  gr.HTML("""
293
  <style>
294
- button { background-color: #22c55e !important; color: #ffffff !important; border: none !important; }
295
- button:hover { background-color: #16a34a !important; }
296
- button:focus { outline: 2px solid #166534 !important; outline-offset: 2px; }
 
 
 
 
 
 
 
 
 
297
  </style>
298
  """)
299
 
@@ -311,7 +349,7 @@ with gr.Blocks(title="Backend-Focused LLaMA/Mistral CRUD Assistant", theme=gr.th
311
  with gr.Row():
312
  new_btn = gr.Button("➕ New Chat", variant="primary")
313
  del_btn = gr.Button("🗑️ Delete", variant="stop")
314
- refresh_btn = gr.Button("🔄 Refresh", variant="secondary")
315
 
316
  gr.Markdown("### 🤖 Model Selection")
317
  model_choice = gr.Dropdown(
 
14
  }
15
 
16
  DATASETS = ["The Stack", "CodeXGLUE"] # Dropdown for dataset selection
 
17
  HF_TOKEN = os.getenv("HF_TOKEN") # Set in your Space's Secrets
18
  DB_PATH = "history.db"
19
 
 
90
  conn = db()
91
  cur = conn.cursor()
92
  cur.execute("""
93
+ SELECT role, content
94
+ FROM messages
95
  WHERE session_id = ?
96
  ORDER BY id ASC
97
  """, (session_id,))
 
184
  def is_frontend_request(user_text: str) -> bool:
185
  text_lower = user_text.lower()
186
  return any(kw in text_lower for kw in FRONTEND_KEYWORDS)
187
+
188
  # --- Fixed send_cb to show user message ---
189
  def send_cb(user_text, selected_label, chatbot_msgs, system_message, max_tokens, temperature, top_p, model_choice, dataset_choice, *args):
190
  sid = label_to_id(selected_label)
 
215
  client = get_client(model_choice)
216
  api_messages = build_api_messages(sid, system_message)
217
  partial = ""
218
+
219
  try:
220
  for chunk in client.chat_completion(
221
  messages=api_messages,
 
224
  top_p=float(top_p),
225
  stream=True,
226
  ):
227
+ # --- FIX: handle models that send empty chunks or use message instead of delta ---
228
+ if not hasattr(chunk, "choices") or not chunk.choices:
229
+ continue
230
+ choice = chunk.choices[0]
231
+ delta = ""
232
+ if hasattr(choice, "delta") and choice.delta and getattr(choice.delta, "content", None) is not None:
233
+ delta = choice.delta.content
234
+ elif hasattr(choice, "message") and getattr(choice.message, "content", None) is not None:
235
+ delta = choice.message.content
236
+
237
  if delta:
238
  partial += delta
239
  display_msgs[-1]["content"] = partial
240
  yield (display_msgs, "", selected_label)
241
+
242
  add_message(sid, "assistant", partial)
243
+
244
  except Exception as e:
245
  display_msgs[-1]["content"] = f"⚠️ Error: {str(e)}"
246
  yield (display_msgs, "", selected_label)
 
254
  if not msgs:
255
  return [], ""
256
 
257
+ # Remove the last assistant message if it exists (to regenerate it)
258
  if msgs and msgs[-1]["role"] == "assistant":
259
  conn = db()
260
  cur = conn.cursor()
261
  cur.execute("""
262
  DELETE FROM messages
263
  WHERE id = (
264
+ SELECT id FROM messages
265
+ WHERE session_id=?
266
+ ORDER BY id DESC LIMIT 1
267
  )
268
  """, (sid,))
269
  conn.commit()
 
276
 
277
  client = get_client(model_choice)
278
  partial = ""
279
+
280
  try:
281
  for chunk in client.chat_completion(
282
  messages=api_messages,
 
285
  top_p=float(top_p),
286
  stream=True,
287
  ):
288
+ # --- FIX: handle models that send empty chunks or use message instead of delta ---
289
+ if not hasattr(chunk, "choices") or not chunk.choices:
290
+ continue
291
+ choice = chunk.choices[0]
292
+ delta = ""
293
+ if hasattr(choice, "delta") and choice.delta and getattr(choice.delta, "content", None) is not None:
294
+ delta = choice.delta.content
295
+ elif hasattr(choice, "message") and getattr(choice.message, "content", None) is not None:
296
+ delta = choice.message.content
297
+
298
  if delta:
299
  partial += delta
300
  display_msgs[-1]["content"] = partial
301
  yield display_msgs
302
+
303
  add_message(sid, "assistant", partial)
304
+
305
  except Exception as e:
306
  display_msgs[-1]["content"] = f"⚠️ Error: {str(e)}"
307
  yield display_msgs
 
314
  if not labels:
315
  first_sid = create_session("New chat")
316
  labels, _ = list_sessions()
317
+
318
  default_selected = labels[0] if labels else None
319
 
320
  with gr.Blocks(title="Backend-Focused LLaMA/Mistral CRUD Assistant", theme=gr.themes.Soft()) as demo:
321
  gr.HTML("""
322
  <style>
323
+ button {
324
+ background-color: #22c55e !important;
325
+ color: #ffffff !important;
326
+ border: none !important;
327
+ }
328
+ button:hover {
329
+ background-color: #16a34a !important;
330
+ }
331
+ button:focus {
332
+ outline: 2px solid #166534 !important;
333
+ outline-offset: 2px;
334
+ }
335
  </style>
336
  """)
337
 
 
349
  with gr.Row():
350
  new_btn = gr.Button("➕ New Chat", variant="primary")
351
  del_btn = gr.Button("🗑️ Delete", variant="stop")
352
+ refresh_btn = gr.Button("🔄 Refresh", variant="secondary")
353
 
354
  gr.Markdown("### 🤖 Model Selection")
355
  model_choice = gr.Dropdown(