Spaces:
Sleeping
Sleeping
| import gradio as gr | |
| import os | |
| from smolagents import CodeAgent, HfApiModel, LiteLLMModel | |
| from retriever import load_guest_dataset | |
| from tools import DuckDuckGoSearchTool, WeatherInfoTool, HubStatsTool | |
| is_running_on_space = os.environ.get('SPACE_ID') is not None | |
| if is_running_on_space: | |
| model = HfApiModel() | |
| else: | |
| model = LiteLLMModel( | |
| model_id="ollama_chat/gemma3", | |
| api_base="http://127.0.0.1:11434", | |
| num_ctx=8192,) | |
| guest_info_tool = load_guest_dataset() | |
| # Initialize the web search tool | |
| search_tool = DuckDuckGoSearchTool() | |
| # Initialize the weather tool | |
| weather_info_tool = WeatherInfoTool() | |
| # Initialize the Hub stats tool | |
| hub_stats_tool = HubStatsTool() | |
| # Create Alfred, our gala agent, with the guest info tool | |
| alfred = CodeAgent( | |
| tools=[guest_info_tool, hub_stats_tool, weather_info_tool, search_tool], | |
| model=model, | |
| add_base_tools=True, | |
| planning_interval=3 | |
| ) | |
| def query_guest(guest_name): | |
| return alfred.run(f"Tell me about our guest named '{guest_name}'.") | |
| def ai_builder(builder): | |
| return alfred.run(f"What is {builder} most downloaded model on huggingface hub?") | |
| def query(query): | |
| return alfred.run(query) | |
| demo = gr.Interface(fn=query, inputs="text", outputs="text") | |
| demo.launch() | |