File size: 10,777 Bytes
d7a5e89
fd1b271
e1e6ddb
4646386
 
7290ba6
d7a5e89
4646386
fd1b271
4646386
9d8a7d0
 
4646386
 
bec50ac
 
 
 
 
 
fd1b271
4646386
fd1b271
 
 
4646386
fd1b271
 
 
 
 
 
2a96fbf
 
 
 
 
 
 
 
 
 
 
7339f63
fd1b271
 
4646386
 
 
 
 
 
 
 
 
 
 
 
 
fd1b271
7339f63
4646386
 
 
 
 
 
bec50ac
fd1b271
4646386
fd1b271
7339f63
 
 
 
fd1b271
4646386
4591f5e
91c2066
f12accf
 
 
91c2066
4591f5e
eff7563
9bf7fc2
4646386
914dd1a
 
 
 
4591f5e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
7290ba6
4591f5e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
9bf7fc2
f12accf
 
 
 
4591f5e
 
 
 
 
7290ba6
4591f5e
 
 
 
 
 
 
 
f12accf
 
 
 
c4f7fa9
4591f5e
 
 
 
 
 
 
 
 
ec6bd64
4591f5e
 
 
 
 
 
 
2a96fbf
 
 
221ecd3
 
914dd1a
 
221ecd3
 
 
 
 
 
 
914dd1a
 
 
12446b3
 
914dd1a
 
8aca96f
914dd1a
221ecd3
4591f5e
 
 
 
 
 
 
 
914dd1a
4591f5e
914dd1a
4591f5e
91c2066
914dd1a
 
 
4591f5e
 
bec50ac
 
 
 
 
 
 
914dd1a
bec50ac
4591f5e
 
914dd1a
4591f5e
 
221ecd3
bec50ac
 
221ecd3
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
914dd1a
 
 
 
 
221ecd3
 
 
914dd1a
221ecd3
 
 
 
 
 
 
 
 
 
 
bec50ac
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
import json
import os
import random
import asyncio
import logging
import time
import traceback
from html import escape
import gradio as gr
from dotenv import load_dotenv
from langchain_core.messages.ai import AIMessageChunk, AIMessage
from langchain_core.messages.system import SystemMessage
from langchain_core.messages.tool import ToolMessage

from chat_utils import (
    MAX_MESSAGES_IN_CONVERSATION,
    chat_wrapper,
    init_session,
    limited_chat_wrapper,
)
from config import SanatanConfig
from db import SanatanDatabase
from drive_downloader import ZipDownloader
from graph_helper import generate_graph

# Logging
logging.basicConfig()
logger = logging.getLogger()
logger.setLevel(logging.INFO)

graph = generate_graph()

import pycountry


def get_all_languages():
    """
    Returns a sorted list of all languages by their English names.
    Uses ISO 639 data from pycountry.
    """
    languages = [lang.name for lang in pycountry.languages if hasattr(lang, "name")]
    return sorted(set(languages))  # remove duplicates and sort alphabetically


def init():
    load_dotenv(override=True)
    try:
        SanatanDatabase().test_sanity()
    except Exception as e:
        logger.warning("Sanity Test Failed - %s", e)
        logger.info("Downloading database ...")
        downloader = ZipDownloader(
            service_account_json=os.getenv("GOOGLE_SERVICE_ACCOUNT_JSON")
        )
        zip_path = downloader.download_zip_from_drive(
            file_id=os.getenv("CHROMADB_FILE_ID"),
            output_path=SanatanConfig.dbStorePath,
        )
        downloader.unzip(zip_path, extract_to="./")


def render_message_with_tooltip(content: str, max_chars=200):
    short = escape(content[:max_chars]) + ("…" if len(content) > max_chars else "")
    return f"<div title='{escape(content)}'>{short}</div>"


# UI Elements
message_count = gr.State(0)
thread_id = gr.State(init_session)

supported_scriptures = "\n - ".join(
    [
        f"πŸ“– **{scripture['title']}** [source]({scripture['source']})"
        for scripture in SanatanConfig.scriptures
    ]
)

init()

message_textbox = gr.Textbox(
    placeholder="Search the scriptures ...", submit_btn=True, stop_btn=True
)

with gr.Blocks(
    theme=gr.themes.Citrus(),
    title="Sanatan-AI",
    css="""
    /* hide the additional inputs row under the textbox */
    .gr-chat-interface .gr-form {
        display: none !important;
    }
    table {
    border-collapse: collapse;
    width: 90%;
    }

    table, th, td {
    border: 1px solid #ddd;
    padding: 6px;
    font-size: small;
    }

    td {
    word-wrap: break-word;
    white-space: pre-wrap; /* preserves line breaks but wraps long lines */
    max-width: 300px; /* control width */
    vertical-align: top;
    }

    .spinner {
    display: inline-block;
    width: 1em;
    height: 1em;
    border: 2px solid transparent;
    border-top: 2px solid #333;
    border-radius: 50%;
    animation: spin 0.8s linear infinite;
    vertical-align: middle;
    margin-left: 0.5em;
    }


    @keyframes spin {
    0% { transform: rotate(0deg); }
    100% { transform: rotate(360deg); }
    }


    .thinking-bubble {
    opacity: 0.5;
    font-style: italic;
    animation: pulse 1.5s infinite;
    margin-bottom: 5px;
    }
    @keyframes pulse {
    0% { opacity: 0.3; }
    50% { opacity: 1; }
    100% { opacity: 0.3; }
    }

    .node-label {
    cursor: help;
    border-bottom: 1px dotted #aaa;
    }

    .intermediate-output {
        opacity: 0.4;
        font-style: italic;

        white-space: nowrap;
        overflow: hidden;
        text-overflow: ellipsis;
    }
    """,
) as gradio_app:
    show_sidebar = gr.State(True)

    # with gr.Column(scale=1, visible=show_sidebar.value) as sidebar_container:
    with gr.Sidebar(open=show_sidebar.value) as sidebar:
        # session_id = gr.Textbox(value=f"Thread: {thread_id}")
        # gr.Markdown(value=f"{'\n'.join([msg['content'] for msg in intro_messages])}")
        gr.Markdown(
            value="Namaskaram πŸ™ I am Sanatan-Bot and I can help you explore the following scriptures:\n\n"
        )

        async def populate_chat_input(text: str):
            buffer = ""
            for c in text:
                buffer += c
                yield buffer
                await asyncio.sleep(0.05)
            return

        def close_side_bar():
            print("close_side_bar invoked")
            yield gr.update(open=False)

        for scripture in sorted(SanatanConfig.scriptures, key=lambda d: d.get("title")):
            with gr.Accordion(label=f"{scripture['title']}", open=False):
                gr.Markdown(f"* Source: [πŸ”— click here]({scripture['source']})")
                gr.Markdown(f"* Language: {scripture['language']}")
                gr.Markdown(f"* Examples :")
                with gr.Row():
                    for example_label, example_text in zip(
                        scripture["example_labels"], scripture["examples"]
                    ):
                        btn = gr.Button(value=f"{example_label}", size="sm")
                        btn.click(close_side_bar, outputs=[sidebar]).then(
                            populate_chat_input,
                            inputs=[gr.State(example_text)],
                            outputs=[message_textbox],
                        )

        gr.Markdown(value="------")
        debug_checkbox = gr.Checkbox(label="Debug (Streaming)", value=True)
        preferred_language = gr.Dropdown(
            choices=get_all_languages(), value="English", label="Preferred Language"
        )

    navigation_followup_shortcuts = {
        "⬅️ Prev verse": "show me the previous verse",
        "➑️ Next verse": "show me the next verse",
        "⬅️ Prev Chapter": "From the same prabandham as above, show the first pasuram from the previous chapter of the same decade",
        "➑️ Next Chapter": "From the same prabandham as above, show the first pasuram from the next chapter of the same decade",
        "⬅️ Prev Decade": "From the same prabandham as above, show the first pasuram from the previous decade",
        "➑️ Next Decade": "From the same prabandham as above, show the first pasuram from the next decade",
    }

    further_questions_followup_shortcuts = {
        "🧹 Sanitize": "sanitize the native verses",
        "πŸ“œ Explain": "provide explanatory notes if available for the above verses",
        "πŸ“œ Detailed Meaning": "provide word by word meaning if available for the above verses",
        "πŸ“œ Explore Divya Desam": "From the same divya desam as the above pasuram, show me other pasurams",
        "πŸ“œ Explore Ashwar": "From the same azhwar as the above pasuram, show me other pasurams",
        "πŸ›οΈ Another divya desam (same āzhwār)": "show pasuram from another divya desam by the same azhwar",
        "πŸ‘€ Another āzhwār (same divya desam)": "show pasuram from the same divya desam by another azhwar",
        "❓ Quiz": "Pick any pasuram. Frame a question to ask me related to that pasuram based on its explanatory notes and word by word meanings. Output ONLY the pasuram title, the verse number, the pasuram native lyrics, the question you framed and the answer to that question.",
    }

    chatbot = gr.Chatbot(
        elem_id="chatbot",
        avatar_images=("assets/avatar_user.png", "assets/adiyen_bot.png"),
        # value=intro_messages,
        label="Sanatan-AI-Bot",
        show_copy_button=True,
        show_copy_all_button=True,
        type="messages",
        height=600,
        render_markdown=True,
        placeholder="Search the scriptures ...",
    )

    additional_inputs = gr.Accordion(
        label="Additional Inputs", open=False, visible=False
    )
    chatInterface = gr.ChatInterface(
        title="Sanatan-AI",
        fn=limited_chat_wrapper,
        additional_inputs=[
            thread_id,
            debug_checkbox,
            preferred_language,
            message_count,
        ],
        additional_inputs_accordion=additional_inputs,
        additional_outputs=[thread_id, message_count],
        chatbot=chatbot,
        textbox=message_textbox,
        type="messages",
    )

    with gr.Column(visible=False) as followup_examples:
        with gr.Row():
            followup_count_textbox = gr.Markdown(container=False, show_label=False)
        with gr.Row():
            gr.Examples(
                label="Quick Navigation Follow-ups",
                example_labels=[key for key in navigation_followup_shortcuts.keys()],
                examples=[value for value in navigation_followup_shortcuts.values()],
                inputs=[message_textbox],
                examples_per_page=len(
                    navigation_followup_shortcuts
                ),  ## Show all examples in the same page.
            )
        with gr.Row():
            gr.Examples(
                label="Further Questions Follow-ups",
                example_labels=[
                    key for key in further_questions_followup_shortcuts.keys()
                ],
                examples=[
                    value for value in further_questions_followup_shortcuts.values()
                ],
                inputs=[message_textbox],
                examples_per_page=len(
                    further_questions_followup_shortcuts
                ),  ## Show all examples in the same page.
            )

    # Function to toggle visibility once chat has started
    def toggle_examples(history):
        return gr.update(visible=len(history) > 0)

    def hide_examples_while_processing(is_processing: bool):
        return gr.update(visible=not is_processing)

    # Whenever chatbot updates β†’ toggle examples row
    # chatbot.change(toggle_examples, chatbot, followup_examples)
    message_textbox.submit(
        hide_examples_while_processing,
        inputs=[gr.State(True)],
        outputs=[followup_examples],
    )
    chatbot.change(
        hide_examples_while_processing,
        inputs=[gr.State(False)],
        outputs=[followup_examples],
    )

    def update_followup_counter(count):
        remaining_followups = MAX_MESSAGES_IN_CONVERSATION - count
        no_more_followups = False
        if remaining_followups > 1:
            text = f"✨ `{remaining_followups}` more follow-ups to go."
        elif remaining_followups == 1:
            text = "🌟 Just one more follow-up to go!"
        else:
            text = "βœ… That was the last follow-up."
            no_more_followups = True
        return gr.update(value=text), gr.update(visible=not no_more_followups)

    message_count.change(
        update_followup_counter,
        inputs=[message_count],
        outputs=[followup_count_textbox, followup_examples],
    )