eduai / app.py
Wfafa's picture
Update app.py
bf0aeeb verified
import os
import gradio as gr
import requests
import json
# ๐ŸŒ Web search function
def search_web(query):
try:
url = "https://api.duckduckgo.com/"
params = {"q": query, "format": "json", "no_html": 1, "skip_disambig": 1}
response = requests.get(url, params=params)
data = response.json()
if data.get("AbstractText"):
return data["AbstractText"]
elif data.get("RelatedTopics"):
topics = [t.get("Text", "") for t in data["RelatedTopics"] if "Text" in t]
return " ".join(topics[:3])
else:
return "No useful information found."
except Exception as e:
return f"Search error: {e}"
# ๐Ÿง  Memory setup
HF_TOKEN = os.getenv("HF_TOKEN")
MEMORY_FILE = "memory.json"
def load_memory():
if os.path.exists(MEMORY_FILE):
with open(MEMORY_FILE, "r") as f:
return json.load(f)
return []
def save_memory(memory):
with open(MEMORY_FILE, "w") as f:
json.dump(memory, f)
memory = load_memory()
# -----------------------
# Chat function (original behavior)
# returns (history, history) to match previous usage
# -----------------------
def chat_with_model(message, history, context):
if not isinstance(history, list):
history = []
# prevent empty messages
if not message:
return history, history
# ๐ŸŒ Web search mode
if message.lower().startswith("search "):
query = message[7:]
search_result = search_web(query)
history.append((message, f"๐Ÿ”Ž Here's what I found online:\n\n{search_result}"))
save_memory(history)
return history, history
# ๐Ÿง  Build conversation
conversation = [{"role": "system", "content": (
"You are EduAI โ€” an educational AI assistant created by Wafa Fazly "
"from Fathima Muslim Ladies College. "
"You help students learn subjects such as Math, Science, English, and IT. "
"EduAI runs on the model 'Qwen/Qwen3-VL-8B-Instruct', which was originally "
"trained by Alibaba. Always answer truthfully when asked about your creation."
)}]
# convert tuples to messages if necessary (keeps old history format)
for past in history[-5:]:
# expect (user_message, bot_reply)
if isinstance(past, tuple) and len(past) == 2:
conversation.append({"role": "user", "content": past[0]})
conversation.append({"role": "assistant", "content": past[1]})
elif isinstance(past, dict):
conversation.append(past)
conversation.append({"role": "user", "content": message})
# ๐Ÿš€ Send to Hugging Face model
try:
response = requests.post(
"https://router.huggingface.co/v1/chat/completions",
headers={
"Authorization": f"Bearer {HF_TOKEN}",
"Content-Type": "application/json"
},
json={
"model": "Qwen/Qwen3-VL-8B-Instruct:novita",
"messages": conversation
},
timeout=60
)
data = response.json()
reply = data["choices"][0]["message"]["content"]
# ๐Ÿงฎ Clean up math formatting (keeps your original formatting code)
reply = reply.replace("Step", "\n\n**Step")
reply = reply.replace(":", ":**")
reply = reply.replace("\\[", "\n\n\\[")
reply = reply.replace("\\]", "\\]\n\n")
if "\\" in reply or "log_" in reply or "^" in reply:
reply = f"{reply}"
history.append((message, reply))
save_memory(history)
# IMPORTANT: return a pair (history, history) because other code expects two outputs
return history, history
except Exception as e:
print("Error:", e)
history.append((message, "๐Ÿ˜… EduAI is having trouble connecting right now. Please try again later!"))
return history, history
# ๐Ÿ“˜ Sidebar context update
def update_context(choice):
if not choice:
return "๐Ÿ“˜ **You are in General Mode.** Ask EduAI anything about your studies!"
return f"๐Ÿ“˜ **You selected {choice} mode.** Ask anything related to this topic!"
# ๐Ÿงน Clear chat memory
def clear_memory():
if os.path.exists(MEMORY_FILE):
os.remove(MEMORY_FILE)
return [], "๐Ÿงน Chat memory cleared! Start fresh."
# -----------------------
# Pause / Send wrappers (FIXED)
# -----------------------
# send handler that respects paused state (ALWAYS returns (chat_history, textbox_clear))
def send_handler(message, history, context, paused_state):
if paused_state:
# Do not call the model when paused โ€” append a friendly hint
if not isinstance(history, list):
history = []
history.append((None, "โธ๏ธ Chat is paused. Click Resume to continue."))
return history, "" # update chatbot and clear textbox
# Not paused: call original chat handler and adapt its returns to (chat_history, textbox_clear)
hist_pair = chat_with_model(message, history, context) # returns (history, history)
# unpack safely
if isinstance(hist_pair, tuple) and len(hist_pair) == 2:
hist = hist_pair[0]
else:
hist = hist_pair
return hist, ""
# toggle pause/resume and update UI (state + chat + button text + send button disabled)
def toggle_pause(paused_state, history):
new_state = not bool(paused_state)
if not isinstance(history, list):
history = []
if new_state:
# now paused
history.append((None, "โธ๏ธ Chat paused. Send is disabled."))
pause_btn_update = gr.Button.update(value="โ–ถ Resume")
send_btn_update = gr.Button.update(disabled=True)
else:
# resumed
history.append((None, "โ–ถ๏ธ Chat resumed. You can send messages now."))
pause_btn_update = gr.Button.update(value="โธ Pause")
send_btn_update = gr.Button.update(disabled=False)
# return new pause state, updated chat history, and two UI updates (pause button & send button)
return new_state, history, pause_btn_update, send_btn_update
# -----------------------
# Build UI (unchanged layout; pause added)
# -----------------------
with gr.Blocks(theme=gr.themes.Soft(primary_hue="violet")) as iface:
gr.Markdown(
"""
# ๐ŸŽ“ **EduAI โ€” Your Smart Study Companion**
Welcome to **EduAI**, your friendly study assistant! ๐Ÿ’ฌ
Get help in **Science, ICT, English, Mathematics**, and more.
"""
)
with gr.Row():
with gr.Column(scale=1, min_width=230):
gr.Markdown("### ๐Ÿงญ **Main Menu**")
with gr.Accordion("๐Ÿ“š Subject Tutor", open=False):
subj = gr.Radio(
["Science ๐Ÿงช", "ICT ๐Ÿ’ป", "English ๐Ÿ“˜", "Mathematics โž—"],
label="Choose a subject"
)
with gr.Accordion("๐Ÿ—“ Study Planner", open=False):
planner = gr.Radio(
["View Plan ๐Ÿ“…", "Add Task โœ๏ธ", "Study Tips ๐Ÿ’ก"],
label="Planner Options"
)
with gr.Accordion("๐ŸŒ Languages", open=False):
lang = gr.Radio(
["Learn Sinhala ๐Ÿ‡ฑ๐Ÿ‡ฐ", "Learn Tamil ๐Ÿ‡ฎ๐Ÿ‡ณ", "Learn English ๐Ÿ‡ฌ๐Ÿ‡ง", "Learn Spanish ๐Ÿ‡ช๐Ÿ‡ธ"],
label="Language Options"
)
with gr.Accordion("โš™๏ธ Settings", open=False):
clear_btn = gr.Button("๐Ÿงน Clear Memory")
with gr.Accordion("๐Ÿ‘ฉโ€๐ŸŽ“ About", open=False):
gr.Markdown(
"""
EduAI was designed and fine-tuned by **Wafa Fazly**,
a passionate Sri Lankan student ๐Ÿ‘ฉโ€๐Ÿ’ป
to help learners explore **Science, ICT, English, and more** โ€”
in a smart and friendly way! ๐ŸŒŸ
"""
)
with gr.Column(scale=4):
context_display = gr.Markdown("๐Ÿ“˜ **You are in General Mode.** Ask EduAI anything about your studies!")
chatbot = gr.Chatbot(
label="๐Ÿ’ฌ EduAI Chat Window",
height=450,
render_markdown=True,
bubble_full_width=False,
latex_delimiters=[
{"left": "$$", "right": "$$", "display": True},
{"left": "\\[", "right": "\\]", "display": True}
]
)
msg = gr.Textbox(
label="๐Ÿ’ญ Type your question here...",
placeholder="Ask EduAI anything about your studies..."
)
with gr.Row():
send = gr.Button("โœจ Send Message")
pause = gr.Button("โธ Pause", variant="secondary")
# state to keep track of pause (False = running, True = paused)
pause_state = gr.State(False)
# ๐Ÿช„ Event handlers
subj.change(update_context, inputs=subj, outputs=context_display)
planner.change(update_context, inputs=planner, outputs=context_display)
lang.change(update_context, inputs=lang, outputs=context_display)
# send now uses send_handler and respects pause_state; outputs: chatbot and clears textbox
send.click(send_handler, inputs=[msg, chatbot, context_display, pause_state], outputs=[chatbot, msg])
clear_btn.click(clear_memory, outputs=[chatbot, context_display])
# pause toggles pause_state, updates chatbot with a message, updates pause button label and disables/enables send
pause.click(toggle_pause, inputs=[pause_state, chatbot], outputs=[pause_state, chatbot, pause, send])
iface.launch()