File size: 9,698 Bytes
235e024
2e78e50
235e024
 
a39fa98
9b284e5
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
235e024
9b284e5
 
 
 
 
 
 
 
 
 
 
 
 
 
bf0aeeb
 
 
 
9b284e5
 
 
 
bf0aeeb
 
 
 
 
9b284e5
 
 
 
 
 
 
bf0aeeb
9b284e5
bf0aeeb
 
 
 
 
e2443b6
9b284e5
bf0aeeb
 
 
 
 
 
 
 
9b284e5
 
235e024
bf0aeeb
235e024
 
 
 
 
9b284e5
235e024
 
 
9b284e5
bf0aeeb
 
235e024
 
 
 
 
bf0aeeb
9b284e5
 
 
 
ca92c8c
9b284e5
 
235e024
9b284e5
 
bf0aeeb
9b284e5
ca92c8c
9b284e5
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
bf0aeeb
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
9b284e5
235e024
 
 
 
9b284e5
235e024
 
baacc26
577009b
9b284e5
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
bf0aeeb
 
9b284e5
 
 
 
 
bf0aeeb
 
 
 
9b284e5
 
bf0aeeb
 
 
9b284e5
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
import os
import gradio as gr
import requests
import json

# ๐ŸŒ Web search function
def search_web(query):
    try:
        url = "https://api.duckduckgo.com/"
        params = {"q": query, "format": "json", "no_html": 1, "skip_disambig": 1}
        response = requests.get(url, params=params)
        data = response.json()

        if data.get("AbstractText"):
            return data["AbstractText"]
        elif data.get("RelatedTopics"):
            topics = [t.get("Text", "") for t in data["RelatedTopics"] if "Text" in t]
            return " ".join(topics[:3])
        else:
            return "No useful information found."
    except Exception as e:
        return f"Search error: {e}"

# ๐Ÿง  Memory setup
HF_TOKEN = os.getenv("HF_TOKEN")
MEMORY_FILE = "memory.json"

def load_memory():
    if os.path.exists(MEMORY_FILE):
        with open(MEMORY_FILE, "r") as f:
            return json.load(f)
    return []

def save_memory(memory):
    with open(MEMORY_FILE, "w") as f:
        json.dump(memory, f)

memory = load_memory()

# -----------------------
# Chat function (original behavior)
# returns (history, history) to match previous usage
# -----------------------
def chat_with_model(message, history, context):
    if not isinstance(history, list):
        history = []

    # prevent empty messages
    if not message:
        return history, history

    # ๐ŸŒ Web search mode
    if message.lower().startswith("search "):
        query = message[7:]
        search_result = search_web(query)
        history.append((message, f"๐Ÿ”Ž Here's what I found online:\n\n{search_result}"))
        save_memory(history)
        return history, history

    # ๐Ÿง  Build conversation
    conversation = [{"role": "system", "content": (
    "You are EduAI โ€” an educational AI assistant created by Wafa Fazly "
    "from Fathima Muslim Ladies College. "
    "You help students learn subjects such as Math, Science, English, and IT. "
    "EduAI runs on the model 'Qwen/Qwen3-VL-8B-Instruct', which was originally "
    "trained by Alibaba. Always answer truthfully when asked about your creation."
    )}]

    # convert tuples to messages if necessary (keeps old history format)
    for past in history[-5:]:
        # expect (user_message, bot_reply)
        if isinstance(past, tuple) and len(past) == 2:
            conversation.append({"role": "user", "content": past[0]})
            conversation.append({"role": "assistant", "content": past[1]})
        elif isinstance(past, dict):
            conversation.append(past)

    conversation.append({"role": "user", "content": message})

    # ๐Ÿš€ Send to Hugging Face model
    try:
        response = requests.post(
            "https://router.huggingface.co/v1/chat/completions",
            headers={
                "Authorization": f"Bearer {HF_TOKEN}",
                "Content-Type": "application/json"
            },
            json={
                "model": "Qwen/Qwen3-VL-8B-Instruct:novita",
                "messages": conversation
            },
            timeout=60
        )

        data = response.json()
        reply = data["choices"][0]["message"]["content"]

        # ๐Ÿงฎ Clean up math formatting (keeps your original formatting code)
        reply = reply.replace("Step", "\n\n**Step")
        reply = reply.replace(":", ":**")
        reply = reply.replace("\\[", "\n\n\\[")
        reply = reply.replace("\\]", "\\]\n\n")

        if "\\" in reply or "log_" in reply or "^" in reply:
            reply = f"{reply}"

        history.append((message, reply))
        save_memory(history)
        # IMPORTANT: return a pair (history, history) because other code expects two outputs
        return history, history

    except Exception as e:
        print("Error:", e)
        history.append((message, "๐Ÿ˜… EduAI is having trouble connecting right now. Please try again later!"))
        return history, history

# ๐Ÿ“˜ Sidebar context update
def update_context(choice):
    if not choice:
        return "๐Ÿ“˜ **You are in General Mode.** Ask EduAI anything about your studies!"
    return f"๐Ÿ“˜ **You selected {choice} mode.** Ask anything related to this topic!"

# ๐Ÿงน Clear chat memory
def clear_memory():
    if os.path.exists(MEMORY_FILE):
        os.remove(MEMORY_FILE)
    return [], "๐Ÿงน Chat memory cleared! Start fresh."

# -----------------------
# Pause / Send wrappers (FIXED)
# -----------------------

# send handler that respects paused state (ALWAYS returns (chat_history, textbox_clear))
def send_handler(message, history, context, paused_state):
    if paused_state:
        # Do not call the model when paused โ€” append a friendly hint
        if not isinstance(history, list):
            history = []
        history.append((None, "โธ๏ธ Chat is paused. Click Resume to continue."))
        return history, ""  # update chatbot and clear textbox
    # Not paused: call original chat handler and adapt its returns to (chat_history, textbox_clear)
    hist_pair = chat_with_model(message, history, context)  # returns (history, history)
    # unpack safely
    if isinstance(hist_pair, tuple) and len(hist_pair) == 2:
        hist = hist_pair[0]
    else:
        hist = hist_pair
    return hist, ""


# toggle pause/resume and update UI (state + chat + button text + send button disabled)
def toggle_pause(paused_state, history):
    new_state = not bool(paused_state)
    if not isinstance(history, list):
        history = []

    if new_state:
        # now paused
        history.append((None, "โธ๏ธ Chat paused. Send is disabled."))
        pause_btn_update = gr.Button.update(value="โ–ถ Resume")
        send_btn_update = gr.Button.update(disabled=True)
    else:
        # resumed
        history.append((None, "โ–ถ๏ธ Chat resumed. You can send messages now."))
        pause_btn_update = gr.Button.update(value="โธ Pause")
        send_btn_update = gr.Button.update(disabled=False)

    # return new pause state, updated chat history, and two UI updates (pause button & send button)
    return new_state, history, pause_btn_update, send_btn_update

# -----------------------
# Build UI (unchanged layout; pause added)
# -----------------------
with gr.Blocks(theme=gr.themes.Soft(primary_hue="violet")) as iface:
    gr.Markdown(
        """
        # ๐ŸŽ“ **EduAI โ€” Your Smart Study Companion**
        Welcome to **EduAI**, your friendly study assistant! ๐Ÿ’ฌ  
        Get help in **Science, ICT, English, Mathematics**, and more.  
        """
    )

    with gr.Row():
        with gr.Column(scale=1, min_width=230):
            gr.Markdown("### ๐Ÿงญ **Main Menu**")

            with gr.Accordion("๐Ÿ“š Subject Tutor", open=False):
                subj = gr.Radio(
                    ["Science ๐Ÿงช", "ICT ๐Ÿ’ป", "English ๐Ÿ“˜", "Mathematics โž—"],
                    label="Choose a subject"
                )

            with gr.Accordion("๐Ÿ—“ Study Planner", open=False):
                planner = gr.Radio(
                    ["View Plan ๐Ÿ“…", "Add Task โœ๏ธ", "Study Tips ๐Ÿ’ก"],
                    label="Planner Options"
                )

            with gr.Accordion("๐ŸŒ Languages", open=False):
                lang = gr.Radio(
                    ["Learn Sinhala ๐Ÿ‡ฑ๐Ÿ‡ฐ", "Learn Tamil ๐Ÿ‡ฎ๐Ÿ‡ณ", "Learn English ๐Ÿ‡ฌ๐Ÿ‡ง", "Learn Spanish ๐Ÿ‡ช๐Ÿ‡ธ"],
                    label="Language Options"
                )

            with gr.Accordion("โš™๏ธ Settings", open=False):
                clear_btn = gr.Button("๐Ÿงน Clear Memory")

            with gr.Accordion("๐Ÿ‘ฉโ€๐ŸŽ“ About", open=False):
                gr.Markdown(
                    """
                    EduAI was designed and fine-tuned by **Wafa Fazly**,  
                    a passionate Sri Lankan student ๐Ÿ‘ฉโ€๐Ÿ’ป  
                    to help learners explore **Science, ICT, English, and more** โ€”  
                    in a smart and friendly way! ๐ŸŒŸ
                    """
                )

        with gr.Column(scale=4):
            context_display = gr.Markdown("๐Ÿ“˜ **You are in General Mode.** Ask EduAI anything about your studies!")
            chatbot = gr.Chatbot(
                label="๐Ÿ’ฌ EduAI Chat Window",
                height=450,
                render_markdown=True,
                bubble_full_width=False,
                latex_delimiters=[
                    {"left": "$$", "right": "$$", "display": True},
                    {"left": "\\[", "right": "\\]", "display": True}
                ]
            )
            msg = gr.Textbox(
                label="๐Ÿ’ญ Type your question here...",
                placeholder="Ask EduAI anything about your studies..."
            )

            with gr.Row():
                send = gr.Button("โœจ Send Message")
                pause = gr.Button("โธ Pause", variant="secondary")
                # state to keep track of pause (False = running, True = paused)
                pause_state = gr.State(False)

    # ๐Ÿช„ Event handlers
    subj.change(update_context, inputs=subj, outputs=context_display)
    planner.change(update_context, inputs=planner, outputs=context_display)
    lang.change(update_context, inputs=lang, outputs=context_display)

    # send now uses send_handler and respects pause_state; outputs: chatbot and clears textbox
    send.click(send_handler, inputs=[msg, chatbot, context_display, pause_state], outputs=[chatbot, msg])

    clear_btn.click(clear_memory, outputs=[chatbot, context_display])

    # pause toggles pause_state, updates chatbot with a message, updates pause button label and disables/enables send
    pause.click(toggle_pause, inputs=[pause_state, chatbot], outputs=[pause_state, chatbot, pause, send])

iface.launch()