Spaces:
Runtime error
Runtime error
File size: 6,450 Bytes
742b2a5 d5f869d 53eb1ca 742b2a5 8c5cf5d d5f869d 53eb1ca 742b2a5 53eb1ca 92eb899 4a45067 53eb1ca 4a45067 53eb1ca 4a45067 53eb1ca 4a45067 53eb1ca 4a45067 92eb899 53eb1ca 4a45067 53eb1ca 4a45067 53eb1ca 882318e 4a45067 882318e 53eb1ca 0b8aba9 53eb1ca 92eb899 53eb1ca 92eb899 53eb1ca 92eb899 53eb1ca 142a635 53eb1ca 4a45067 53eb1ca 4a45067 53eb1ca 742b2a5 4a45067 742b2a5 e2a70cb 742b2a5 92eb899 742b2a5 4a45067 742b2a5 37c8f0a 742b2a5 0b8aba9 742b2a5 064a3e4 8c5cf5d 92eb899 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 |
# app.py
import gradio as gr
import logging
from modules.input_handler import InputHandler
from modules.retriever import Retriever
from modules.analyzer import Analyzer
from modules.citation import CitationManager
from modules.formatter import OutputFormatter
import os
# Configure logging
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
class ResearchOrchestrator:
def __init__(self, input_handler, retriever, analyzer, citation_manager, formatter):
self.input_handler = input_handler
self.retriever = retriever
self.analyzer = analyzer
self.citation_manager = citation_manager
self.formatter = formatter
def run(self, query, progress=gr.Progress()):
"""Execute the research pipeline with streaming updates"""
try:
progress(0.0, desc="Starting research...")
logging.info(f"Starting research for query: {query}")
# Step 1: Process input
progress(0.1, desc="🔍 Processing your query...")
processed_query = self.input_handler.process_query(query)
logging.info("Query processed successfully")
# Step 2: Retrieve data
progress(0.3, desc="🌐 Searching for relevant information...")
search_results = self.retriever.search(processed_query)
if not search_results:
yield "⚠️ No relevant information found for your query. Please try rephrasing."
logging.warning("No search results found")
return
logging.info(f"Retrieved {len(search_results)} results")
# Step 3: Analyze content
progress(0.5, desc="🧠 Analyzing search results...")
yield "🧠 Analyzing search results...\n\n⏳ The AI model may be initializing. This could take a few minutes if it's the first request..."
analysis = self.analyzer.analyze(query, search_results)
logging.info("Analysis completed")
# Step 4: Manage citations
progress(0.8, desc="📎 Adding citations...")
cited_analysis = self.citation_manager.add_citations(analysis, search_results)
logging.info("Citations added")
# Step 5: Format output
progress(0.9, desc="✨ Formatting response...")
formatted_output = self.formatter.format_response(cited_analysis, search_results)
logging.info("Response formatted successfully")
# Add completion notification
progress(1.0, desc="✅ Research complete!")
if len(search_results) >= 3:
completion_message = "\n\n---\n[ANALYSIS COMPLETE] ✅ Research finished with sufficient sources."
else:
completion_message = "\n\n---\n[RECOMMEND FURTHER ANALYSIS] ⚠️ Limited sources found. Consider refining your query."
yield formatted_output + completion_message
except Exception as e:
error_msg = f"❌ An error occurred: {str(e)}"
logging.error(f"Error in research pipeline: {str(e)}", exc_info=True)
yield error_msg
# Configuration
CONFIG = {
"hf_api_base": "https://zxzbfrlg3ssrk7d9.us-east-1.aws.endpoints.huggingface.cloud/v1/",
"hf_api_key": os.getenv("HF_TOKEN"),
"tavily_api_key": os.getenv("TAVILY_API_KEY"),
}
# Initialize modules with error handling
def initialize_modules():
"""Initialize all modules with proper error handling"""
try:
if not CONFIG["tavily_api_key"]:
raise ValueError("TAVILY_API_KEY environment variable is not set")
if not CONFIG["hf_api_key"]:
raise ValueError("HF_TOKEN environment variable is not set")
input_handler = InputHandler()
retriever = Retriever(api_key=CONFIG["tavily_api_key"])
analyzer = Analyzer(base_url=CONFIG["hf_api_base"], api_key=CONFIG["hf_api_key"])
citation_manager = CitationManager()
formatter = OutputFormatter()
return ResearchOrchestrator(
input_handler,
retriever,
analyzer,
citation_manager,
formatter
)
except Exception as e:
logging.error(f"Failed to initialize modules: {str(e)}")
raise
# Initialize orchestrator
orchestrator = initialize_modules()
# Custom CSS for spinner
custom_css = """
.spinner {
border: 4px solid #f3f3f3;
border-top: 4px solid #3498db;
border-radius: 50%;
width: 24px;
height: 24px;
animation: spin 1s linear infinite;
display: inline-block;
margin-right: 8px;
}
@keyframes spin {
0% { transform: rotate(0deg); }
100% { transform: rotate(360deg); }
}
"""
def research_assistant(query, progress=gr.Progress()):
"""Main entry point for the research assistant with streaming"""
logging.info(f"Research assistant called with query: {query}")
for step in orchestrator.run(query, progress):
yield step
# Create Gradio interface
with gr.Blocks(css=custom_css, title="Research Assistant") as demo:
gr.Markdown("# 🧠 AI Research Assistant")
gr.Markdown("Enter a research topic to get a structured analysis with sources")
with gr.Row():
with gr.Column():
query_input = gr.Textbox(
label="Research Query",
placeholder="Enter your research question...",
lines=3
)
submit_btn = gr.Button("Research", variant="primary")
with gr.Column():
output = gr.Markdown(label="Analysis Results")
# Status indicator with spinner
status_indicator = gr.HTML("<div id='status'><span class='spinner'></span> Ready for your research query</div>")
examples = gr.Examples(
examples=[
"Latest advancements in quantum computing",
"Impact of climate change on global agriculture",
"Recent developments in Alzheimer's treatment research"
],
inputs=query_input
)
submit_btn.click(
fn=research_assistant,
inputs=query_input,
outputs=output
)
query_input.submit(
fn=research_assistant,
inputs=query_input,
outputs=output
)
if __name__ == "__main__":
demo.launch() |