Spaces:
GIZ
/
Running on CPU Upgrade

File size: 20,520 Bytes
7231f12
08f161e
 
 
 
 
 
 
 
 
 
 
 
 
7231f12
08f161e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
b28955a
08f161e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2dcf249
08f161e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2dcf249
 
08f161e
 
 
 
 
 
 
 
 
 
 
2dcf249
08f161e
 
2dcf249
08f161e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2dcf249
 
08f161e
 
 
 
 
 
 
 
 
2dcf249
 
 
 
 
 
08f161e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
import gradio as gr
import time
import pandas as pd
import asyncio
from uuid import uuid4
from gradio_client import Client, handle_file
from utils.retriever import retrieve_paragraphs
from utils.generator import generate
from utils.logger import ChatLogger
from huggingface_hub import CommitScheduler
import json
import ast
import os
from pathlib import Path

# Set up dataset directory and HuggingFace integration
# JSON_DATASET_DIR = Path("json_dataset")

# JSON_DATASET_DIR.mkdir(parents=True, exist_ok=True)
# JSON_DATASET_PATH = JSON_DATASET_DIR / f"logs-{uuid4()}.jsonl"


# Set up dataset directory and HuggingFace integration
JSON_DATASET_DIR = Path("json_dataset")

# Check if directory exists and create if needed
if not JSON_DATASET_DIR.exists():
    try:
        JSON_DATASET_DIR.mkdir(parents=True, exist_ok=True)
        print(f"Created dataset directory at {JSON_DATASET_DIR}")
    except Exception as e:
        print(f"Error creating dataset directory: {str(e)}")
        raise
else:
    print(f"Using existing dataset directory at {JSON_DATASET_DIR}")

# Get HuggingFace token from environment
SPACES_LOG = os.environ.get("GINA_SPACES_LOG")
if not SPACES_LOG:
    print("Warning: GINA_SPACES_LOG not found in environment, using local storage only")

# Initialize scheduler with proper dataset configuration
scheduler = CommitScheduler(
    repo_id="GIZ/spaces_logs",
    repo_type="dataset",
    folder_path=JSON_DATASET_DIR,
    path_in_repo="gina_chatbot",
    token=SPACES_LOG if SPACES_LOG else None,
    #every=60  # Sync every 60 seconds
)

# Initialize logger with configured scheduler
chat_logger = ChatLogger(scheduler = scheduler )

# Sample questions for examples 
SAMPLE_QUESTIONS = {
    "Fundamentos y tendencias internacionales de EC": [
        "¿Cómo se diferencia el modelo de economía circular del modelo lineal tradicional de 'tomar, hacer, desechar'?",
        "¿Cuáles son algunos de los principios clave de la economía circular y cómo se aplican en la práctica?",
        "¿Podrías dar ejemplos de industrias o empresas que estén implementando con éxito prácticas de economía circular?"
    ],
    "EC en Colombia": [
        "¿Qué políticas y normativas vigentes en Colombia impulsan la adopción de la economía circular?",
        "¿Cómo pueden las regulaciones colombianas incentivar la innovación en el ecodiseño y la gestión de residuos?",
        "¿Qué papel tienen los instrumentos económicos y fiscales en la promoción de la circularidad en el sector productivo de Colombia?"]
}

# Global variable to cache API results and prevent double calls
geojson_analysis_cache = {}

# Initialize Chat
def start_chat(query, history):
    """Start a new chat interaction"""
    history = history + [(query, None)]
    return gr.update(interactive=False), gr.update(selected=1), history

def finish_chat():
    """Finish chat and reset input"""
    return gr.update(interactive=True, value="")

def make_html_source(source,i):
    """
    takes the text and converts it into html format for display in "source" side tab
    """
    meta = source['answer_metadata']
    content = source['answer'].strip()

    name = meta['filename']
    card = f"""
        <div class="card" id="doc{i}">
            <div class="card-content">
                <h2>Doc {i} - {meta['filename']} - Page {int(meta['page'])}</h2>
                <p>{content}</p>
            </div>
            <div class="card-footer">
                <span>{name}</span>
                <a href="{meta['filename']}#page={int(meta['page'])}" target="_blank" class="pdf-link">
                    <span role="img" aria-label="Open PDF">🔗</span>
                </a>
            </div>
        </div>
        """

    return card

BEGINNING_TEXT = "**Respuesta generada mediante inteligencia artificíal:** \n\n" 
async def chat_response(query, history, category, request=None):
    """Generate chat response based on method and inputs"""
    
    try:
        retrieved_paragraphs = retrieve_paragraphs(query, category)
        context_retrieved = ast.literal_eval(retrieved_paragraphs)
        
        # Build list of only content, no metadata
        context_retrieved_formatted = "||".join(doc['answer'] for doc in context_retrieved)
        context_retrieved_lst = [doc['answer'] for doc in context_retrieved]
        
        # Prepare HTML for displaying source documents
        docs_html = []
        for i, d in enumerate(context_retrieved, 1):
            docs_html.append(make_html_source(d, i))
        docs_html = "".join(docs_html)
        
        # Generate response
        response = await generate(query=query, context=context_retrieved_lst)
        
        # Add disclaimer to the response
        response_with_disclaimer = BEGINNING_TEXT + response
        # Log the interaction
        try:
            chat_logger.log(
                query=query,
                answer=response,
                retrieved_content=context_retrieved_lst,
                request=request
            )
        except Exception as e:
            print(f"Logging error: {str(e)}")
        

        # Stream response character by character
        displayed_response = ""
        for i, char in enumerate(response_with_disclaimer):
            displayed_response += char
            history[-1] = (query, displayed_response)
            yield history, docs_html
            # Only add delay every few characters to avoid being too slow
            if i % 3 == 0:
                await asyncio.sleep(0.02)
                
    except Exception as e:
        error_message = f"Error processing request: {str(e)}"
        history[-1] = (query, error_message)
        yield history, ""
 

    # # Stream response word by word into the chat
    # words = response.split()

    # for i in range(len(words)):
    #     history[-1] = (query, " ".join(words[:i+1]))
    #     yield history, "**Sources:** Sample source documents would appear here..."
    #     await asyncio.sleep(0.05)

# def auto_analyze_file(file, history):
#     """Automatically analyze uploaded GeoJSON file and add results to chat"""
#     if file is not None:
#         try:
#             # Call API immediately and cache results
#             file_key = f"{file.name}_{file.size if hasattr(file, 'size') else 'unknown'}"
            
#             if file_key not in geojson_analysis_cache:
#                 formatted_stats = "This is to be removed"
#                 geojson_analysis_cache[file_key] = formatted_stats
            
#             # Add analysis results directly to chat (no intermediate message)
#             analysis_query = "📄 Análisis del GeoJSON cargado"
#             cached_result = geojson_analysis_cache[file_key]
            
#             # Add both query and response to history
#             history = history + [(analysis_query, cached_result)]
#             return history, "**Sources:** WhispAPI Analysis Results"
            
#         except Exception as e:
#             error_msg = f"❌ Error processing GeoJSON file: {str(e)}"
#             history = history + [("📄 Error en análisis GeoJSON", error_msg)]
#             return history, ""
    
#     return history, ""

def toggle_search_method(method):
    """Toggle between GeoJSON upload and country selection"""
    # if method == "Subir GeoJson":
    #     return (
    #         gr.update(visible=True),   # geojson_section
    #         gr.update(visible=False),  # reports_section
    #         gr.update(value=None),     # dropdown_country
    #     )
    # else:  # "Talk to Reports"
    return (
            #gr.update(visible=False),  # geojson_section 
            gr.update(visible=True),   # reports_section
            gr.update(),               # dropdown_country
        )

def change_sample_questions(key):
    """Update visible examples based on selected category"""
    keys = list(SAMPLE_QUESTIONS.keys())
    index = keys.index(key)
    visible_bools = [False] * len(keys)
    visible_bools[index] = True
    return [gr.update(visible=visible_bools[i]) for i in range(len(keys))]

# Set up Gradio Theme
theme = gr.themes.Base(
    primary_hue="green",
    secondary_hue="blue",
    font=[gr.themes.GoogleFont("Poppins"), "ui-sans-serif", "system-ui", "sans-serif"],
    text_size=gr.themes.utils.sizes.text_sm,
)



init_prompt = """
        Hola, soy Gina, una asistente conversacional con IA diseñada para ayudarte a comprender conceptos y ayudarte con el tema de la Economía Circular. Responderé a tus preguntas usando la base de datos de documentos sobre economía circular.
        💡 **Cómo usarla (pestañas a la derecha)**

        **Enfoque:** Selecciona la sección de informes/documentos.
        **Ejemplos:** Selecciona entre ejemplos de preguntas de diferentes categorías.
        **Fuentes:** Consulta las fuentes de contenido utilizadas para generar las respuestas para la verificación de datos.
        ⚠️ Para conocer las limitaciones e información sobre la recopilación de datos, consulta la pestaña "Aviso legal"
        """

with gr.Blocks(title="Gina Bot", theme=theme, css="style.css") as demo:
    
    # Main Chat Interface
    with gr.Tab("Gina Bot"):
        with gr.Row():
            # Left column - Chat interface (2/3 width)
            with gr.Column(scale=2):
                chatbot = gr.Chatbot(
                    value=[(None, init_prompt)],
                    show_copy_button=True,
                    show_label=False,
                    layout="panel",
                    avatar_images=(None, "chatbot_icon_2.png"),
                    height="auto"
                )
                
                # Feedback UI
                with gr.Column():
                    with gr.Row(visible=False) as feedback_row:
                        gr.Markdown("¿Te ha sido útil esta respuesta?")
                        with gr.Row():
                            okay_btn = gr.Button("👍 De acuerdo", size="sm")
                            not_okay_btn = gr.Button("👎 No según lo esperado", size="sm")
                    feedback_thanks = gr.Markdown("Gracias por los comentarios.", visible=False)
                
                # Input textbox
                with gr.Row():
                    textbox = gr.Textbox(
                        placeholder="Pregúntame cualquier cosa sobre Economía Circular",
                        show_label=False,
                        scale=7,
                        lines=1,
                        interactive=True
                    )
            
            # Right column - Controls and tabs (1/3 width)
            with gr.Column(scale=1, variant="panel"):
                with gr.Tabs() as tabs:
                    
                    # Data Sources Tab
                    with gr.Tab("Fuentes de datos", id=2):
                        with gr.Group(visible=True) as reports_section:
                            dropdown_category = gr.Dropdown(
                                ["Fundamentos y tendencias internacionales de EC", "Financiamiento en EC", "EC en Colombia"],
                                # label="Selecciona país", 
                                label="Especifica tu área de interés",  
                                multiselect =True,                            
                                value=["Fundamentos y tendencias internacionales de EC", "Financiamiento en EC", "EC en Colombia"],
                                interactive=True,
                            )
                        
                        # # GeoJSON Upload Section
                        # with gr.Group(visible=True) as geojson_section:
                        #     uploaded_file = gr.File(
                        #         label="Subir GeoJson",
                        #         file_types=[".geojson", ".json"],
                        #         file_count="single"
                        #     )
                        #     upload_status = gr.Markdown("", visible=False)
                            
                        #     # Results table for WHISP API response
                        #     results_table = gr.DataFrame(
                        #         label="Resultados del análisis",
                        #         visible=False,
                        #         interactive=False,
                        #         wrap=True,
                        #         elem_classes="dataframe"
                        #     )
                        
                        # Talk to Reports Section
                        
                    
                    # Examples Tab
                    with gr.Tab("Ejemplos", id=0):
                        examples_hidden = gr.Textbox(visible=False)
                        
                        first_key = list(SAMPLE_QUESTIONS.keys())[0]
                        dropdown_samples = gr.Dropdown(
                            SAMPLE_QUESTIONS.keys(),
                            value=first_key,
                            interactive=True,
                            show_label=True,
                            label="Seleccione una categoría de preguntas de muestra."
                        )
                        
                        # Create example sections
                        sample_groups = []
                        for i, (key, questions) in enumerate(SAMPLE_QUESTIONS.items()):
                            examples_visible = True if i == 0 else False
                            with gr.Row(visible=examples_visible) as group_examples:
                                gr.Examples(
                                    questions,
                                    [examples_hidden],
                                    examples_per_page=8,
                                    run_on_click=False,
                                )
                            sample_groups.append(group_examples)
                    
                    # Sources Tab
                    with gr.Tab("Fuentes", id=1, elem_id="sources-textbox"):
                        sources_textbox = gr.HTML(
                            show_label=False,
                            value="Los documentos originales aparecerán aquí después de que hagas una pregunta..."
                        )
    
    # Guidelines Tab
    with gr.Tab("Orientacion"):
        gr.Markdown("""
        #### Welcome to Gina Q&A! 
        
        This AI-powered assistant helps you understand Circular Economy.
        
        ## 💬 How to Ask Effective Questions
        
        | ❌ Less Effective | ✅ More Effective |
        |------------------|-------------------|
        | "What is economy?" | "What are impact of circular economy on businesses?" |
        | "Tell me about compliance" | "What are country guidelines on circular economy" |
        | "Show me data" | "What is the trend on waste and how circular economy is helping in resolving this?" |
        
        ## 🔍 Using Data Sources
        
        **Talk to Reports:** Select reports sections "Trend and fundamentals", "Financing Mechanisms", "Country Resource"
        
        ## ⭐ Best Practices
        
        - Be specific about regions, commodities, or time periods
        - Ask one question at a time for clearer answers
        - Use follow-up questions to explore topics deeper
        - Provide context when possible
        """)
    
    # About Tab
    with gr.Tab("sobre Gina"):
        gr.Markdown("""
        ## About Gina Q&A
        
        The **Circular Economy** places some obligations on the manufacturers and business.
        
        This AI-powered tool helps stakeholders:
        - Understand circular Economy concepts and regulations
        - Assess supply chain issues
        - Navigate complex regulatory landscapes
        
        **Developed by GIZ** for project in Colombia to enhance accessibility and understanding of circular Economy requirements 
        through advanced AI and geographic data processing capabilities.
        
        ### Key Features:
        - Country-specific compliance guidance
        - Real-time question answering with source citations
        - User-friendly interface for complex regulatory information
        """)
    
    # Disclaimer Tab
    with gr.Tab("Disclaimer"):
        gr.Markdown("""
        ## Important Disclaimers
        
        ⚠️ **Scope & Limitations:**
        - This tool is designed for Circular Economy assistance and geographic data analysis
        - Responses should not be considered official legal or compliance advice
        - Always consult qualified professionals for official compliance decisions
        
        ⚠️ **Data & Privacy:**
        - We collect usage statistics to improve the tool. 
        - No personal data is collected when using this tool.
        
        ⚠️ **AI Limitations:**
        - Responses are AI-generated and may contain inaccuracies
        - The tool is a prototype under continuous development
        - Always verify important information with authoritative sources
        
        **Data Collection:** We collect questions, answers, feedback, and anonymized usage statistics 
        to improve tool performance based on legitimate interest in service enhancement.
        
        By using this chatbot, you agree to these terms and acknowledge that you are solely responsible for any reliance on or actions taken based on its responses.

        **Technical Information**: User can read more about the technical information about the tool in [Readme](https://huggingface.co/spaces/GIZ/gina/blob/main/README.md) of this tool.
            
        This is just a prototype and being tested and worked upon, so its not perfect and may sometimes give irrelevant answers. If you are not satisfied with the answer, please ask a more specific question or report your feedback to help us improve the system.
        
        """)
    
    # Event Handlers
    
    # Toggle search method
    # search_method.change(
    #     fn=toggle_search_method,
    #     inputs=[search_method],
    #     outputs=[reports_section, dropdown_category]
    # )
    
    # File upload - automatically analyze and display in chat (SIMPLIFIED)
    # uploaded_file.change(
    #     fn=auto_analyze_file,
    #     inputs=[uploaded_file, chatbot],
    #     outputs=[chatbot, sources_textbox],
    #     queue=False
    # )
    
    # Chat functionality
    textbox.submit(
        start_chat,
        [textbox, chatbot],
        [textbox, tabs, chatbot],
        queue=False
    ).then(
        chat_response,
        [textbox, chatbot, dropdown_category],
        [chatbot, sources_textbox]
    ).then(
        lambda: gr.update(visible=True),
        outputs=[feedback_row]
    ).then(
        finish_chat,
        outputs=[textbox]
    )
    
    # Examples functionality
    examples_hidden.change(
        start_chat,
        [examples_hidden, chatbot],
        [textbox, tabs, chatbot],
        queue=False
    ).then(
        chat_response,
        [examples_hidden, chatbot, dropdown_category],
        [chatbot, sources_textbox]
    ).then(
        lambda: gr.update(visible=True),
        outputs=[feedback_row]
    ).then(
        finish_chat,
        outputs=[textbox]
    )
    
    # Sample questions dropdown
    dropdown_samples.change(
        change_sample_questions,
        [dropdown_samples],
        sample_groups
    )


    
    # Feedback buttons
    # Feedback handlers with logging
    def handle_feedback(feedback):
        try:
            # Get the last interaction from history
            if chatbot.value:
                last_query = chatbot.value[-1][0]
                last_response = chatbot.value[-1][1]
                
                # Log the feedback
                chat_logger.log(
                    query=last_query,
                    answer=last_response,
                    retrieved_content=[],  # Empty since this is feedback
                    feedback=feedback
                )
        except Exception as e:
            print(f"Feedback logging error: {str(e)}")
        return gr.update(visible=False), gr.update(visible=True)
    
    okay_btn.click(
        lambda: handle_feedback("positive"),
        outputs=[feedback_row, feedback_thanks]
    )
    
    not_okay_btn.click(
        lambda: handle_feedback("negative"),
        outputs=[feedback_row, feedback_thanks]
    )

# Launch the app
if __name__ == "__main__":
    demo.launch()