Spaces:
				
			
			
	
			
			
		Sleeping
		
	
	
	
			
			
	
	
	
	
		
		
		Sleeping
		
	| import os | |
| import gradio as gr | |
| from scripts.router_simple import build_router_chain | |
| OPENAI_KEY = os.getenv("OPENAI_API_KEY", None) | |
| MODEL_NAME = os.getenv("OPENAI_MODEL", "gpt-4o-mini") | |
| if not OPENAI_KEY: | |
| print("WARNING: OPENAI_API_KEY not set. The app may fail at runtime.") | |
| # Build the router once (keeps vectorstore & models in memory) | |
| router = build_router_chain(model_name=MODEL_NAME) | |
| def chat_fn(message, history): | |
| if not message: | |
| return history, "" | |
| # call router | |
| result = router.invoke({"input": message}) | |
| # RetrievalQA returns dict with 'result' key (and maybe 'source_documents') | |
| answer = result.get("result") if isinstance(result, dict) else str(result) | |
| # append sources if present | |
| sources = None | |
| if isinstance(result, dict) and "source_documents" in result and result["source_documents"]: | |
| try: | |
| sources = list({str(d.metadata.get("source", "unknown")) for d in result["source_documents"]}) | |
| except Exception: | |
| sources = None | |
| if sources: | |
| answer = f"{answer}\n\nπ Sources: {', '.join(sources)}" | |
| history.append((message, answer)) | |
| return history, "" | |
| with gr.Blocks() as demo: | |
| gr.Markdown("## π Course Assistant β Chat with your course files") | |
| chatbot = gr.Chatbot(elem_id="chatbot") | |
| txt = gr.Textbox(show_label=False, placeholder="Ask about the course...") | |
| txt.submit(chat_fn, [txt, chatbot], [chatbot, txt]) | |
| txt.submit(lambda: None, None, txt) # clear input | |
| if __name__ == "__main__": | |
| demo.launch(server_port=int(os.getenv("PORT", 7860))) | |