|
|
import os |
|
|
import gradio as gr |
|
|
import requests |
|
|
import json |
|
|
|
|
|
|
|
|
def search_web(query): |
|
|
try: |
|
|
url = "https://api.duckduckgo.com/" |
|
|
params = {"q": query, "format": "json", "no_html": 1, "skip_disambig": 1} |
|
|
response = requests.get(url, params=params) |
|
|
data = response.json() |
|
|
|
|
|
if data.get("AbstractText"): |
|
|
return data["AbstractText"] |
|
|
elif data.get("RelatedTopics"): |
|
|
topics = [t.get("Text", "") for t in data["RelatedTopics"] if "Text" in t] |
|
|
return " ".join(topics[:3]) |
|
|
else: |
|
|
return "No useful information found." |
|
|
except Exception as e: |
|
|
return f"Search error: {e}" |
|
|
|
|
|
|
|
|
HF_TOKEN = os.getenv("HF_TOKEN") |
|
|
MEMORY_FILE = "memory.json" |
|
|
|
|
|
def load_memory(): |
|
|
if os.path.exists(MEMORY_FILE): |
|
|
with open(MEMORY_FILE, "r") as f: |
|
|
return json.load(f) |
|
|
return [] |
|
|
|
|
|
def save_memory(memory): |
|
|
with open(MEMORY_FILE, "w") as f: |
|
|
json.dump(memory, f) |
|
|
|
|
|
memory = load_memory() |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def chat_with_model(message, history, context): |
|
|
if not isinstance(history, list): |
|
|
history = [] |
|
|
|
|
|
|
|
|
if not message: |
|
|
return history, history |
|
|
|
|
|
|
|
|
if message.lower().startswith("search "): |
|
|
query = message[7:] |
|
|
search_result = search_web(query) |
|
|
history.append((message, f"๐ Here's what I found online:\n\n{search_result}")) |
|
|
save_memory(history) |
|
|
return history, history |
|
|
|
|
|
|
|
|
conversation = [{"role": "system", "content": ( |
|
|
"You are EduAI โ an educational AI assistant created by Wafa Fazly " |
|
|
"from Fathima Muslim Ladies College. " |
|
|
"You help students learn subjects such as Math, Science, English, and IT. " |
|
|
"EduAI runs on the model 'Qwen/Qwen3-VL-8B-Instruct', which was originally " |
|
|
"trained by Alibaba. Always answer truthfully when asked about your creation." |
|
|
)}] |
|
|
|
|
|
|
|
|
for past in history[-5:]: |
|
|
|
|
|
if isinstance(past, tuple) and len(past) == 2: |
|
|
conversation.append({"role": "user", "content": past[0]}) |
|
|
conversation.append({"role": "assistant", "content": past[1]}) |
|
|
elif isinstance(past, dict): |
|
|
conversation.append(past) |
|
|
|
|
|
conversation.append({"role": "user", "content": message}) |
|
|
|
|
|
|
|
|
try: |
|
|
response = requests.post( |
|
|
"https://router.huggingface.co/v1/chat/completions", |
|
|
headers={ |
|
|
"Authorization": f"Bearer {HF_TOKEN}", |
|
|
"Content-Type": "application/json" |
|
|
}, |
|
|
json={ |
|
|
"model": "Qwen/Qwen3-VL-8B-Instruct:novita", |
|
|
"messages": conversation |
|
|
}, |
|
|
timeout=60 |
|
|
) |
|
|
|
|
|
data = response.json() |
|
|
reply = data["choices"][0]["message"]["content"] |
|
|
|
|
|
|
|
|
reply = reply.replace("Step", "\n\n**Step") |
|
|
reply = reply.replace(":", ":**") |
|
|
reply = reply.replace("\\[", "\n\n\\[") |
|
|
reply = reply.replace("\\]", "\\]\n\n") |
|
|
|
|
|
if "\\" in reply or "log_" in reply or "^" in reply: |
|
|
reply = f"{reply}" |
|
|
|
|
|
history.append((message, reply)) |
|
|
save_memory(history) |
|
|
|
|
|
return history, history |
|
|
|
|
|
except Exception as e: |
|
|
print("Error:", e) |
|
|
history.append((message, "๐
EduAI is having trouble connecting right now. Please try again later!")) |
|
|
return history, history |
|
|
|
|
|
|
|
|
def update_context(choice): |
|
|
if not choice: |
|
|
return "๐ **You are in General Mode.** Ask EduAI anything about your studies!" |
|
|
return f"๐ **You selected {choice} mode.** Ask anything related to this topic!" |
|
|
|
|
|
|
|
|
def clear_memory(): |
|
|
if os.path.exists(MEMORY_FILE): |
|
|
os.remove(MEMORY_FILE) |
|
|
return [], "๐งน Chat memory cleared! Start fresh." |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def send_handler(message, history, context, paused_state): |
|
|
if paused_state: |
|
|
|
|
|
if not isinstance(history, list): |
|
|
history = [] |
|
|
history.append((None, "โธ๏ธ Chat is paused. Click Resume to continue.")) |
|
|
return history, "" |
|
|
|
|
|
hist_pair = chat_with_model(message, history, context) |
|
|
|
|
|
if isinstance(hist_pair, tuple) and len(hist_pair) == 2: |
|
|
hist = hist_pair[0] |
|
|
else: |
|
|
hist = hist_pair |
|
|
return hist, "" |
|
|
|
|
|
|
|
|
|
|
|
def toggle_pause(paused_state, history): |
|
|
new_state = not bool(paused_state) |
|
|
if not isinstance(history, list): |
|
|
history = [] |
|
|
|
|
|
if new_state: |
|
|
|
|
|
history.append((None, "โธ๏ธ Chat paused. Send is disabled.")) |
|
|
pause_btn_update = gr.Button.update(value="โถ Resume") |
|
|
send_btn_update = gr.Button.update(disabled=True) |
|
|
else: |
|
|
|
|
|
history.append((None, "โถ๏ธ Chat resumed. You can send messages now.")) |
|
|
pause_btn_update = gr.Button.update(value="โธ Pause") |
|
|
send_btn_update = gr.Button.update(disabled=False) |
|
|
|
|
|
|
|
|
return new_state, history, pause_btn_update, send_btn_update |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
with gr.Blocks(theme=gr.themes.Soft(primary_hue="violet")) as iface: |
|
|
gr.Markdown( |
|
|
""" |
|
|
# ๐ **EduAI โ Your Smart Study Companion** |
|
|
Welcome to **EduAI**, your friendly study assistant! ๐ฌ |
|
|
Get help in **Science, ICT, English, Mathematics**, and more. |
|
|
""" |
|
|
) |
|
|
|
|
|
with gr.Row(): |
|
|
with gr.Column(scale=1, min_width=230): |
|
|
gr.Markdown("### ๐งญ **Main Menu**") |
|
|
|
|
|
with gr.Accordion("๐ Subject Tutor", open=False): |
|
|
subj = gr.Radio( |
|
|
["Science ๐งช", "ICT ๐ป", "English ๐", "Mathematics โ"], |
|
|
label="Choose a subject" |
|
|
) |
|
|
|
|
|
with gr.Accordion("๐ Study Planner", open=False): |
|
|
planner = gr.Radio( |
|
|
["View Plan ๐
", "Add Task โ๏ธ", "Study Tips ๐ก"], |
|
|
label="Planner Options" |
|
|
) |
|
|
|
|
|
with gr.Accordion("๐ Languages", open=False): |
|
|
lang = gr.Radio( |
|
|
["Learn Sinhala ๐ฑ๐ฐ", "Learn Tamil ๐ฎ๐ณ", "Learn English ๐ฌ๐ง", "Learn Spanish ๐ช๐ธ"], |
|
|
label="Language Options" |
|
|
) |
|
|
|
|
|
with gr.Accordion("โ๏ธ Settings", open=False): |
|
|
clear_btn = gr.Button("๐งน Clear Memory") |
|
|
|
|
|
with gr.Accordion("๐ฉโ๐ About", open=False): |
|
|
gr.Markdown( |
|
|
""" |
|
|
EduAI was designed and fine-tuned by **Wafa Fazly**, |
|
|
a passionate Sri Lankan student ๐ฉโ๐ป |
|
|
to help learners explore **Science, ICT, English, and more** โ |
|
|
in a smart and friendly way! ๐ |
|
|
""" |
|
|
) |
|
|
|
|
|
with gr.Column(scale=4): |
|
|
context_display = gr.Markdown("๐ **You are in General Mode.** Ask EduAI anything about your studies!") |
|
|
chatbot = gr.Chatbot( |
|
|
label="๐ฌ EduAI Chat Window", |
|
|
height=450, |
|
|
render_markdown=True, |
|
|
bubble_full_width=False, |
|
|
latex_delimiters=[ |
|
|
{"left": "$$", "right": "$$", "display": True}, |
|
|
{"left": "\\[", "right": "\\]", "display": True} |
|
|
] |
|
|
) |
|
|
msg = gr.Textbox( |
|
|
label="๐ญ Type your question here...", |
|
|
placeholder="Ask EduAI anything about your studies..." |
|
|
) |
|
|
|
|
|
with gr.Row(): |
|
|
send = gr.Button("โจ Send Message") |
|
|
pause = gr.Button("โธ Pause", variant="secondary") |
|
|
|
|
|
pause_state = gr.State(False) |
|
|
|
|
|
|
|
|
subj.change(update_context, inputs=subj, outputs=context_display) |
|
|
planner.change(update_context, inputs=planner, outputs=context_display) |
|
|
lang.change(update_context, inputs=lang, outputs=context_display) |
|
|
|
|
|
|
|
|
send.click(send_handler, inputs=[msg, chatbot, context_display, pause_state], outputs=[chatbot, msg]) |
|
|
|
|
|
clear_btn.click(clear_memory, outputs=[chatbot, context_display]) |
|
|
|
|
|
|
|
|
pause.click(toggle_pause, inputs=[pause_state, chatbot], outputs=[pause_state, chatbot, pause, send]) |
|
|
|
|
|
iface.launch() |
|
|
|