File size: 2,929 Bytes
bc2210a
0b1e28b
914c29e
8415ab1
914c29e
 
bc2210a
bef0dce
bc2210a
 
 
 
 
bef0dce
bc2210a
 
370dfae
914c29e
 
370dfae
914c29e
 
 
 
 
 
4cb605b
914c29e
af212bc
4cb605b
af212bc
 
 
 
914c29e
 
 
 
 
 
 
1c60b2b
0b1e28b
 
bc2210a
 
 
0b1e28b
7813102
0b1e28b
bc2210a
 
416f072
 
 
0b1e28b
 
 
416f072
 
 
62e00df
bef0dce
 
5aa076e
34f4dd8
bc2210a
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
# --- Gaia Agent Definition ---
from smolagents import CodeAgent, DuckDuckGoSearchTool, VisitWebpageTool
from smolagents import LiteLLMModel, InferenceClientModel
import os
from smolagents import HfApiModel
#from smolagents import OpenAIServerModel #uncoment in case of using OpenAI API, or Hugging Face API

# ----- THIS IS WERE YOU CAN BUILD WHAT YOU WANT ------
# (Keep Constants as is)
# --- Constants ---
DEFAULT_API_URL = "https://agents-course-unit4-scoring.hf.space"

class GaiaAgent:
    def __init__(self):
        print("GaiaAgent initialized.")

     
        if not os.getenv("SPACE_ID"):
            # Initialize local model
            model_id = "ollama/qwen2.5:7b"
            self.model = LiteLLMModel(
                model_id=model_id, 
                api_base="http://localhost:11434"
                #api_key=os.getenv("GEMINI_API_KEY")
            )
        else: # Run with Hugging Face Inference API
            model_id = "meta-llama/Llama-4-Scout-17B-16E-Instruct"
            #self.model = InferenceClientModel(model_id=model_id)
            self.model = HfApiModel(
                model_id=model_id,
                provider="novita", 
                temperature=0.0,
                api_key=os.getenv("HUGGINGFACEHUB_API_TOKEN")
            )

            """self.model = OpenAIServerModel(
                model_id="gpt-4o-mini",
                api_base="https://api.openai.com/v1",
                api_key=os.environ["OPENAI_API_KEY"],
                temperature=0.0
            )"""

        search= DuckDuckGoSearchTool()
        web_browser = VisitWebpageTool()

        self.agent = CodeAgent(
            model=self.model,
            tools=[search, web_browser],
            add_base_tools=True,
            additional_authorized_imports=["pandas", "re"]
        )

        system_prompt = """You are a general AI assistant. I will ask you a question. Report your thoughts, and
finish your answer with the following template: FINAL ANSWER: [YOUR FINAL ANSWER].
YOUR FINAL ANSWER should be a number OR as few words as possible OR a comma separated
list of numbers and/or strings. If you are asked for a number, don’t use comma to write your number neither use units such as $ or
percent sign unless specified otherwise. If you are asked for a string, don’t use articles, neither abbreviations (e.g. for cities), and write the
digits in plain text unless specified otherwise. If you are asked for a comma separated list, apply the above rules depending of whether the element
to be put in the list is a number or a string."""

        self.agent.prompt_templates["system_prompt"] = self.agent.prompt_templates["system_prompt"] + system_prompt

    def __call__(self, question: str) -> str:
        print(f"Agent received question (first 50 chars): {question[:50]}...")
        answer = self.agent.run(question)
        print(f"Agent returning answer: {answer}")
        return answer