myspace134v / app.py
rdune71's picture
new1
ab6d29f
raw
history blame
2.02 kB
import gradio as gr
from modules.input_handler import validate_input
from modules.retriever import perform_search
from modules.context_enhancer import add_weather_context, add_space_weather_context
from modules.analyzer import analyze_with_model
from modules.formatter import format_output
from modules.citation import generate_citations
from modules.visualizer import render_output
from modules.server_cache import get_cached_result, cache_result
from modules.status_logger import log_request
def research_assistant(query):
log_request("Research started", query=query)
# Check cache first
cached = get_cached_result(query)
if cached:
log_request("Cache hit", query=query)
return cached
# Input validation
validated_query = validate_input(query)
# Context enhancement
weather_data = add_weather_context()
space_weather_data = add_space_weather_context()
# Web search
search_results = perform_search(validated_query)
# Combine context
enriched_input = f"{validated_query}\n\nWeather: {weather_data}\nSpace Weather: {space_weather_data}\n\nSearch Results:\n{search_results}"
# LLM Analysis
analysis = analyze_with_model(enriched_input)
# Formatting and citations
formatted_output = format_output(analysis)
citations = generate_citations(search_results)
# Final output
final_output = render_output(formatted_output, citations)
# Cache result
cache_result(query, final_output)
log_request("Research completed", result_length=len(final_output))
return final_output
# Gradio Interface
demo = gr.Interface(
fn=research_assistant,
inputs=gr.Textbox(label="Enter your research question"),
outputs=gr.Markdown(label="Research Summary"),
title="AI Research Assistant",
description="An AI-powered research assistant that gathers and analyzes information with web search, weather, and space weather context.",
allow_flagging="never"
)
if __name__ == "__main__":
demo.launch()