|
|
""" |
|
|
app.py — robust multimodel agent |
|
|
-------------------------------- |
|
|
* Supports **OpenAI** (if `OPENAI_API_KEY`) or **Gemini** (if `GOOGLE_API_KEY`) via |
|
|
`LiteLLMModel`. |
|
|
* Otherwise falls back to a free HF Inference chat model |
|
|
(`microsoft/Phi-3-mini-4k-instruct`). |
|
|
* No version‑specific imports (avoids `OpenAIChat` errors). |
|
|
""" |
|
|
import os, pathlib, gradio as gr |
|
|
from mcp import StdioServerParameters |
|
|
from smolagents import MCPClient, CodeAgent, InferenceClientModel, LiteLLMModel |
|
|
|
|
|
|
|
|
SERVER_PATH = pathlib.Path(__file__).with_name("mcp_server.py") |
|
|
|
|
|
|
|
|
OPENAI_KEY = os.getenv("OPENAI_API_KEY") |
|
|
GEMINI_KEY = os.getenv("GOOGLE_API_KEY") |
|
|
HF_MODEL_ID = os.getenv("HF_MODEL_ID", "microsoft/Phi-3-mini-4k-instruct") |
|
|
|
|
|
if OPENAI_KEY: |
|
|
BASE_MODEL = LiteLLMModel(model_id="openai/gpt-4o-preview", api_key=OPENAI_KEY) |
|
|
MODEL_NAME = "openai/gpt-4o-preview" |
|
|
elif GEMINI_KEY: |
|
|
BASE_MODEL = LiteLLMModel(model_id="google/gemini-pro", api_key=GEMINI_KEY) |
|
|
MODEL_NAME = "google/gemini-pro" |
|
|
else: |
|
|
BASE_MODEL = InferenceClientModel(model_id=HF_MODEL_ID, timeout=90) |
|
|
MODEL_NAME = HF_MODEL_ID |
|
|
|
|
|
|
|
|
def respond(message: str, history: list): |
|
|
params = StdioServerParameters(command="python", args=[str(SERVER_PATH)]) |
|
|
with MCPClient(params) as tools: |
|
|
agent = CodeAgent(tools=tools, model=BASE_MODEL) |
|
|
answer = agent.run(message) |
|
|
history += [ |
|
|
{"role": "user", "content": message}, |
|
|
{"role": "assistant", "content": answer}, |
|
|
] |
|
|
return history, history |
|
|
|
|
|
|
|
|
with gr.Blocks(title="Enterprise SQL Agent") as demo: |
|
|
state = gr.State([]) |
|
|
gr.Markdown("## Enterprise SQL Agent — natural‑language to SQL via MCP") |
|
|
chat = gr.Chatbot(type="messages", label="Chat") |
|
|
box = gr.Textbox(show_label=False, placeholder="Ask: Who are my inactive Northeast customers?") |
|
|
box.submit(respond, [box, state], [chat, state]) |
|
|
|
|
|
with gr.Accordion("Example prompts"): |
|
|
gr.Markdown("""* Who are my **Northeast** customers with no orders in 6 months?\n* List customers sorted by **LastOrderDate**.\n* Draft re‑engagement emails for inactive accounts.""") |
|
|
|
|
|
gr.Markdown(f"_Powered by MCP + smolagents • Model: **{MODEL_NAME}**_") |
|
|
|
|
|
if __name__ == "__main__": |
|
|
demo.launch() |
|
|
|