Spaces:
Paused
Paused
| # pylint: disable=line-too-long,missing-module-docstring,missing-class-docstring,missing-function-docstring,broad-exception-caught, unused-variable, too-many-statements,too-many-return-statements,too-many-locals,redefined-builtin,unused-import | |
| # ruff: noqa: F401 | |
| import os | |
| import typing | |
| from dataclasses import dataclass, field | |
| import pandas as pd | |
| import requests | |
| import rich | |
| import smolagents | |
| import wikipediaapi | |
| from loguru import logger | |
| from mcp import StdioServerParameters | |
| from smolagents import CodeAgent, DuckDuckGoSearchTool, FinalAnswerTool, Tool, ToolCollection, VisitWebpageTool | |
| from smolagents import InferenceClientModel as HfApiModel | |
| from get_model import get_model | |
| from litellm_model import litellm_model | |
| from openai_model import openai_model | |
| print = rich.get_console().print | |
| DEFAULT_API_URL = "https://agents-course-unit4-scoring.hf.space" | |
| SPACE_ID = os.getenv("SPACE_ID", "mikeee/final-assignment") | |
| AUTHORIZED_IMPORTS = [ | |
| "requests", | |
| "zipfile", | |
| "pandas", | |
| "numpy", | |
| "sympy", | |
| "json", | |
| "bs4", | |
| "pubchempy", | |
| "xml", | |
| "yahoo_finance", | |
| "Bio", | |
| "sklearn", | |
| "scipy", | |
| "pydub", | |
| "PIL", | |
| "chess", | |
| "PyPDF2", | |
| "pptx", | |
| "torch", | |
| "datetime", | |
| "fractions", | |
| "csv", | |
| "io", | |
| "glob", | |
| "chess", | |
| ] | |
| class WikipediaSearchTool(Tool): | |
| name = "wikipedia_search" | |
| description = "Fetches a summary of a Wikipedia page based on a given search query (only one word or group of words)." | |
| inputs = { | |
| "query": {"type": "string", "description": "The search term for the Wikipedia page (only one word or group of words)."} | |
| } | |
| output_type = "string" | |
| def __init__(self, lang="en"): | |
| super().__init__() | |
| self.wiki = wikipediaapi.Wikipedia( | |
| language=lang, user_agent="MinimalAgent/1.0") | |
| def forward(self, query: str): | |
| page = self.wiki.page(query) | |
| if not page.exists(): | |
| return "No Wikipedia page found." | |
| return page.summary[:1000] | |
| class BasicAgent: | |
| model: smolagents.models.Model = HfApiModel() | |
| tools: list = field(default_factory=lambda: []) | |
| verbosity_level: int = 0 | |
| additional_authorized_imports: list = field(default_factory=lambda: AUTHORIZED_IMPORTS) | |
| planning_interval: int = 4 | |
| # def __init__(self): | |
| def __post_init__(self): | |
| """Run post_init.""" | |
| logger.debug("BasicAgent initialized.") | |
| self.agent = CodeAgent( | |
| tools=self.tools, | |
| model=self.model, | |
| verbosity_level=self.verbosity_level, | |
| additional_authorized_imports=self.additional_authorized_imports, | |
| planning_interval=self.planning_interval, | |
| ) | |
| def get_answer(self, question: str): | |
| return f"ans to {question[:220]}..." | |
| def __call__(self, question: str) -> str: | |
| # print(f"Agent received question (first 50 chars): {question[:50]}...") | |
| # print(f"Agent received question: {question}...") | |
| # fixed_answer = "This is a default answer." | |
| # print(f"Agent returning fixed answer: {fixed_answer}") | |
| # return fixed_answer | |
| try: | |
| # answer = self.get_answer(question) | |
| answer = self.agent.run(question) | |
| except Exception as e: | |
| logger.error(e) | |
| answer = str(e)[:110] + "..." | |
| return answer | |
| def main(): | |
| api_url = DEFAULT_API_URL | |
| questions_url = f"{api_url}/questions" | |
| submit_url = f"{api_url}/submit" # noqa | |
| # username = "mikeee" | |
| # repo_name = "final-assignment" | |
| username, _, repo_name = SPACE_ID.partition("/") | |
| space_id = f"{username}/{repo_name}" | |
| # In the case of an app running as a hugging Face space, this link points toward your codebase ( usefull for others so please keep it public) | |
| agent_code = f"https://huggingface.co/spaces/{space_id}/tree/main" | |
| print(agent_code) | |
| # 2. Fetch Questions: fetch before openai_model() which my set proxy | |
| print(f"Fetching questions from: {questions_url}") | |
| try: | |
| response = requests.get(questions_url, timeout=120) | |
| response.raise_for_status() | |
| questions_data = response.json() | |
| if not questions_data: | |
| print("Fetched questions list is empty.") | |
| return "Fetched questions list is empty or invalid format.", None | |
| print(f"Fetched {len(questions_data)} questions.") | |
| except requests.exceptions.JSONDecodeError as e: | |
| print(f"Error decoding JSON response from questions endpoint: {e}") | |
| print(f"Response text: {response.text[:500]}") | |
| return f"Error decoding server response for questions: {e}", None | |
| except requests.exceptions.RequestException as e: | |
| print(f"Error fetching questions: {e}") | |
| return f"Error fetching questions: {e}", None | |
| except Exception as e: | |
| print(f"An unexpected error occurred fetching questions: {e}") | |
| return f"An unexpected error occurred fetching questions: {e}", None | |
| # model = get_model(cat="gemini") | |
| _ = ( | |
| "gemini-2.5-flash-preview-04-17", | |
| # "https://api-proxy.me/gemini/v1beta", | |
| "https://generativelanguage.googleapis.com/v1beta", | |
| os.getenv("GEMINI_API_KEY"), | |
| ) | |
| _ = ( | |
| "grok-3-beta", | |
| "https://api.x.ai/v1", | |
| os.getenv("XAI_API_KEY"), | |
| ) | |
| # model = litellm_model(*_) | |
| # model = get_model() | |
| model = openai_model() # defautl llama4 scout | |
| # messages = [{'role': 'user', 'content': 'Say this is a test.'}] | |
| # print(model(messages)) | |
| # raise SystemExit("By intention") | |
| mcp_searxng_params = StdioServerParameters( | |
| **{ | |
| "command": "npx", | |
| "args": [ | |
| "-y", | |
| "mcp-searxng" | |
| ], | |
| "env": { | |
| "SEARXNG_URL": os.getenv("SEARXNG_URL", "https://searx.be") | |
| } | |
| } | |
| ) | |
| # with ToolCollection.from_mcp(mcp_searxng_params, trust_remote_code=True) as searxng_tool_collection, ToolCollection.from_mcp(mcp_markitdown_params, trust_remote_code=True) as markitdown_tools: | |
| with ToolCollection.from_mcp(mcp_searxng_params, trust_remote_code=True) as searxng_tool_collection: | |
| # 1. Instantiate Agent ( modify this part to create your agent) | |
| try: | |
| # agent = BasicAgent() | |
| agent = BasicAgent( | |
| model=model, | |
| tools=[ | |
| *searxng_tool_collection.tools, | |
| # DuckDuckGoSearchTool(), | |
| VisitWebpageTool(), | |
| WikipediaSearchTool(), | |
| FinalAnswerTool(), | |
| ], | |
| verbosity_level=1, | |
| ) | |
| agent.agent.visualize() | |
| except Exception as e: | |
| print(f"Error instantiating agent: {e}") | |
| return f"Error initializing agent: {e}", None | |
| # 3. Run your Agent | |
| results_log = [] | |
| answers_payload = [] | |
| print(f"Running agent on {len(questions_data)} questions...") | |
| # for item in questions_data: | |
| # for item in questions_data[-1:]: | |
| # for item in questions_data[14:15]: | |
| for item in questions_data[-6:]: | |
| task_id = item.get("task_id") | |
| question_text = item.get("question") | |
| if not task_id or question_text is None: | |
| print(f"Skipping item with missing task_id or question: {item}") | |
| continue | |
| try: | |
| submitted_answer = agent(question_text) | |
| answers_payload.append({"task_id": task_id, "submitted_answer": submitted_answer}) | |
| results_log.append({"Task ID": task_id, "Question": question_text, "Submitted Answer": submitted_answer}) | |
| except Exception as e: | |
| print(f"Error running agent on task {task_id}: {e}") | |
| results_log.append({"Task ID": task_id, "Question": question_text, "Submitted Answer": f"AGENT ERROR: {e}"}) | |
| if not answers_payload: | |
| print("Agent did not produce any answers to submit.") | |
| return "Agent did not produce any answers to submit.", pd.DataFrame(results_log) | |
| # 4. Prepare Submission | |
| submission_data = {"username": username.strip(), "agent_code": agent_code, "answers": answers_payload} # noqa | |
| status_update = f"Agent finished. Submitting {len(answers_payload)} answers for user '{username}'..." | |
| print(status_update) | |
| print(answers_payload) | |
| agent.agent.visualize() | |
| return None, None | |
| if __name__ == "__main__": | |
| main() | |