Spaces:
Sleeping
Sleeping
| import os | |
| import logging | |
| import sys | |
| import time | |
| import smolagents | |
| LOG = logging.getLogger(__name__) | |
| SYSTEM_PROMPT = """ | |
| You are a general AI assistant. I will ask you a question. Report your thoughts, and finish your answer with the following template: FINAL ANSWER: [YOUR FINAL ANSWER]. YOUR FINAL ANSWER should be a number OR as few words as possible OR a comma separated list of numbers and/or strings. If you are asked for a number, don't use comma to write your number neither use units such as $ or percent sign unless specified otherwise. If you are asked for a string, don't use articles, neither abbreviations (e.g. for cities), and write the digits in plain text unless specified otherwise. If you are asked for a comma separated list, apply the above rules depending of whether the element to be put in the list is a number or a string. | |
| The current date is April 30, 2025. | |
| Take the time to plan the steps to reach the solution. Show the steps and then execute the steps. | |
| """ | |
| class BasicAgent: | |
| def __init__(self, model_id=None): | |
| print("BasicAgent initializing.") | |
| # Logs appear to be swallowed. | |
| LOG.warning("logging BasicAgent initialized.") | |
| # Assume we will use the default model creation | |
| self.model = None | |
| if model_id: | |
| self.model_id = model_id | |
| # Handle the special cases | |
| if model_id.lower() == "google": | |
| self.model_id = "google" | |
| # Use Google gemini free tier | |
| GEM_KEY=os.environ["GOOGLE_API_KEY"] | |
| self.model = smolagents.OpenAIServerModel( | |
| model_id="gemini-2.0-flash", | |
| api_base="https://generativelanguage.googleapis.com/v1beta/openai/", | |
| api_key=GEM_KEY, | |
| temperature=0.3) | |
| elif model_id.lower() == "local": | |
| self.model_id = "Qwen/Qwen3-4B-FP8" | |
| # Run locally. | |
| self.model = smolagents.TransformersModel( | |
| model_id=self.model_id, | |
| max_new_tokens=32000, | |
| temperature=0.3 | |
| ) | |
| else: | |
| self.model_id = "Qwen/Qwen3-32B" | |
| if not self.model: | |
| self.model = smolagents.HfApiModel( | |
| max_tokens=32000, | |
| temperature=0.3, | |
| model_id=self.model_id, | |
| custom_role_conversions=None, | |
| ) | |
| print(f"NEW2: BasicAgent {self.model_id=} {self.model=}") | |
| web_search_tools = [ | |
| smolagents.DuckDuckGoSearchTool(), | |
| smolagents.VisitWebpageTool(), | |
| smolagents.FinalAnswerTool() | |
| ] | |
| print("BasicAgent making search tool.") | |
| self.web_search_agent = smolagents.CodeAgent( | |
| name="web_search_agent", | |
| description="Search the web", | |
| model=self.model, | |
| tools=web_search_tools, | |
| max_steps=6, | |
| verbosity_level=2, | |
| planning_interval=None, | |
| additional_authorized_imports=["duckduckgo_search"], | |
| ) | |
| wiki_search_tools = [ | |
| smolagents.WikipediaSearchTool(), | |
| smolagents.VisitWebpageTool(), | |
| smolagents.FinalAnswerTool() | |
| ] | |
| self.wiki_search_agent = smolagents.CodeAgent( | |
| name="wikipedia_search_agent", | |
| description="Search wikipedia", | |
| model=self.model, | |
| tools=wiki_search_tools, | |
| max_steps=6, | |
| verbosity_level=2, | |
| planning_interval=None, | |
| additional_authorized_imports=["wikipedia-api"], | |
| ) | |
| print("BasicAgent making wikipedia search tool.") | |
| print("BasicAgent making manager.") | |
| self.manager_agent = smolagents.CodeAgent( | |
| name="manager_agent", | |
| description="Manger of other agents", | |
| tools=[smolagents.FinalAnswerTool()], | |
| model=self.model, | |
| max_steps=10, | |
| verbosity_level=2, | |
| planning_interval=None, | |
| additional_authorized_imports=["duckduckgo_search", "wikipedia-api"], | |
| managed_agents=[self.web_search_agent, self.wiki_search_agent]) | |
| def __call__(self, question: str) -> str: | |
| # Avoid rate limiting issues | |
| if self.model_id == "google": | |
| time.sleep(1) | |
| print(f"NEW Agent received question (first 50 chars): {question[:50]}...") | |
| prompt = f"{SYSTEM_PROMPT}\n\n{question}" | |
| answer = self.manager_agent.run(prompt) | |
| print(f"NEW {answer=}") | |
| return answer | |