arubaDev commited on
Commit
b08e1ce
·
verified ·
1 Parent(s): 225355f

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +9 -73
app.py CHANGED
@@ -3,10 +3,6 @@ import sqlite3
3
  from datetime import datetime
4
  import gradio as gr
5
  from huggingface_hub import InferenceClient
6
- from datasets import load_dataset
7
- from sentence_transformers import SentenceTransformer
8
- from sklearn.metrics.pairwise import cosine_similarity
9
- import numpy as np
10
 
11
  # ---------------------------
12
  # Config
@@ -14,8 +10,9 @@ import numpy as np
14
  MODELS = {
15
  "Meta LLaMA 3.1 (8B Instruct)": "meta-llama/Llama-3.1-8B-Instruct",
16
  "Mistral 7B Instruct": "mistralai/Mistral-7B-Instruct-v0.3",
17
- # Add your backend-focused fine-tuned model here if available
18
- # "Backend-Finetuned Model": "your-username/backend-crud-model"
 
19
  }
20
 
21
  HF_TOKEN = os.getenv("HF_TOKEN") # Set in your Space's Secrets
@@ -26,9 +23,7 @@ SYSTEM_DEFAULT = (
26
  "Always prioritize database, API, authentication, routing, migrations, and CRUD logic. "
27
  "Provide full backend code scaffolds with files, paths, and commands. "
28
  "Only include frontend if required for framework integration "
29
- "(e.g., Laravel Blade, Django templates). Ignore other frontend/UI tasks. "
30
- "If user asks for excessive frontend work, politely respond: "
31
- "'I am a backend assistant and focus only on backend tasks.'"
32
  )
33
 
34
  # ---------------------------
@@ -126,23 +121,6 @@ def update_session_title_if_needed(session_id: int, first_user_text: str):
126
  conn.commit()
127
  conn.close()
128
 
129
- # ---------------------------
130
- # Dataset & Embeddings Setup
131
- # ---------------------------
132
- print("Loading dataset and embeddings... (this runs only once)")
133
- dataset = load_dataset("codeparrot/codeparrot-clean-python", split="train[:5%]") # small % for speed
134
- backend_snippets = [d["content"] for d in dataset if any(k in d["content"].lower() for k in
135
- ["db", "database", "api", "crud", "auth", "routing", "migration"])]
136
-
137
- embed_model = SentenceTransformer("sentence-transformers/all-MiniLM-L6-v2")
138
- snippet_embeddings = embed_model.encode(backend_snippets, convert_to_numpy=True)
139
-
140
- def get_relevant_snippets(user_text, top_k=3):
141
- user_emb = embed_model.encode([user_text], convert_to_numpy=True)
142
- sims = cosine_similarity(user_emb, snippet_embeddings)[0]
143
- top_indices = np.argsort(sims)[-top_k:][::-1]
144
- return "\n\n".join([backend_snippets[i] for i in top_indices])
145
-
146
  # ---------------------------
147
  # Helpers
148
  # ---------------------------
@@ -154,11 +132,9 @@ def label_to_id(label: str | None) -> int | None:
154
  except Exception:
155
  return None
156
 
157
- def build_api_messages(session_id: int, system_message: str, user_text: str):
158
- relevant_snippets = get_relevant_snippets(user_text)
159
  msgs = [{"role": "system", "content": system_message.strip()}]
160
  msgs.extend(get_messages(session_id))
161
- msgs.append({"role": "user", "content": relevant_snippets + "\n\n" + user_text})
162
  return msgs
163
 
164
  def get_client(model_choice: str):
@@ -203,7 +179,7 @@ def send_cb(user_text, selected_label, chatbot_msgs, system_message, max_tokens,
203
  add_message(sid, "user", user_text)
204
  update_session_title_if_needed(sid, user_text)
205
 
206
- api_messages = build_api_messages(sid, system_message, user_text)
207
  display_msgs = get_messages(sid)
208
  display_msgs.append({"role": "assistant", "content": ""})
209
 
@@ -325,46 +301,6 @@ with gr.Blocks(title="Backend-Focused LLaMA/Mistral CRUD Assistant", theme=gr.th
325
  interactive=True
326
  )
327
 
328
- gr.Markdown("### ⚙️ Generation Settings")
329
- system_box = gr.Textbox(
330
- value=SYSTEM_DEFAULT,
331
- label="System message",
332
- lines=4
333
- )
334
- max_tokens = gr.Slider(256, 4096, value=1200, step=16, label="Max tokens")
335
- temperature = gr.Slider(0.0, 2.0, value=0.25, step=0.05, label="Temperature")
336
- top_p = gr.Slider(0.1, 1.0, value=0.9, step=0.05, label="Top-p")
337
-
338
- with gr.Column(scale=3):
339
- chatbot = gr.Chatbot(label="Assistant", height=520, type="messages")
340
- with gr.Row():
341
- user_box = gr.Textbox(placeholder="Describe your CRUD/backend task…", lines=3, scale=5)
342
- with gr.Row():
343
- send_btn = gr.Button("Send ▶️", variant="primary")
344
- regen_btn = gr.Button("Regenerate 🔁", variant="secondary")
345
-
346
- refresh_btn.click(refresh_sessions_cb, outputs=session_list)
347
- new_btn.click(new_chat_cb, outputs=[session_list, chatbot, user_box])
348
- del_btn.click(delete_chat_cb, inputs=session_list, outputs=[session_list, chatbot])
349
- session_list.change(load_session_cb, inputs=session_list, outputs=chatbot)
350
-
351
- send_btn.click(
352
- send_cb,
353
- inputs=[user_box, session_list, chatbot, system_box, max_tokens, temperature, top_p, model_choice],
354
- outputs=[chatbot, user_box, session_list]
355
- )
356
-
357
- user_box.submit(
358
- send_cb,
359
- inputs=[user_box, session_list, chatbot, system_box, max_tokens, temperature, top_p, model_choice],
360
- outputs=[chatbot, user_box, session_list]
361
- )
362
-
363
- regen_btn.click(
364
- regenerate_cb,
365
- inputs=[session_list, system_box, max_tokens, temperature, top_p, model_choice],
366
- outputs=chatbot
367
- )
368
-
369
- if __name__ == "__main__":
370
- demo.launch()
 
3
  from datetime import datetime
4
  import gradio as gr
5
  from huggingface_hub import InferenceClient
 
 
 
 
6
 
7
  # ---------------------------
8
  # Config
 
10
  MODELS = {
11
  "Meta LLaMA 3.1 (8B Instruct)": "meta-llama/Llama-3.1-8B-Instruct",
12
  "Mistral 7B Instruct": "mistralai/Mistral-7B-Instruct-v0.3",
13
+ "The Stack (Multilingual)": "bigcode/the-stack",
14
+ "XLCoST (Multilingual)": "reddy-lab-code-research/XLCoST",
15
+ "CoMA (Multilingual)": "Denilah/CoMA"
16
  }
17
 
18
  HF_TOKEN = os.getenv("HF_TOKEN") # Set in your Space's Secrets
 
23
  "Always prioritize database, API, authentication, routing, migrations, and CRUD logic. "
24
  "Provide full backend code scaffolds with files, paths, and commands. "
25
  "Only include frontend if required for framework integration "
26
+ "(e.g., Laravel Blade, Django templates). Ignore other frontend/UI tasks."
 
 
27
  )
28
 
29
  # ---------------------------
 
121
  conn.commit()
122
  conn.close()
123
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
124
  # ---------------------------
125
  # Helpers
126
  # ---------------------------
 
132
  except Exception:
133
  return None
134
 
135
+ def build_api_messages(session_id: int, system_message: str):
 
136
  msgs = [{"role": "system", "content": system_message.strip()}]
137
  msgs.extend(get_messages(session_id))
 
138
  return msgs
139
 
140
  def get_client(model_choice: str):
 
179
  add_message(sid, "user", user_text)
180
  update_session_title_if_needed(sid, user_text)
181
 
182
+ api_messages = build_api_messages(sid, system_message)
183
  display_msgs = get_messages(sid)
184
  display_msgs.append({"role": "assistant", "content": ""})
185
 
 
301
  interactive=True
302
  )
303
 
304
+ gr.Markdown("###
305
+ ::contentReference[oaicite:0]{index=0}
306
+