AI_SQL / app.py
mgbam's picture
Update app.py
3b69291 verified
raw
history blame
3.04 kB
"""
Gradio front-end + smolagents CodeAgent
---------------------------------------
β€’ If you set an OPENAI_API_KEY the agent will call OpenAI (GPT-4o by default).
β€’ Otherwise it falls back to a free Hugging Face chat-completion model
(defaults to microsoft/Phi-3-mini-4k-instruct which the public Inference
API exposes for the Chat Completion task).
β€’ You can override the fallback by defining HF_MODEL_ID and, if needed,
HF_API_TOKEN in the Space β†’ Settings β†’ Secrets.
"""
import os, pathlib, gradio as gr
from mcp import StdioServerParameters
from smolagents import MCPClient, CodeAgent, InferenceClientModel
# Path to your MCP tool server
SERVER_PATH = pathlib.Path(__file__).with_name("mcp_server.py")
# Decide which base model to use
OPENAI_KEY = os.getenv("OPENAI_API_KEY")
HF_MODEL_ID = os.getenv("HF_MODEL_ID", "microsoft/Phi-3-mini-4k-instruct")
if OPENAI_KEY: # --- OpenAI branch -------------
from smolagents.models import OpenAIChat # <- works in all versions
BASE_MODEL = OpenAIChat( # gpt-4o by default
model=os.getenv("OPENAI_MODEL", "gpt-4o-preview"),
temperature=0.3,
)
else: # --- Hugging Face branch ----
BASE_MODEL = InferenceClientModel( # uses HF Inference API
model_id=HF_MODEL_ID,
hf_api_token=os.getenv("HF_API_TOKEN"), # optional for gated repos
timeout=90,
)
# ----------------- callback ---------------------------------------------------
def respond(message: str, history: list):
"""Send user prompt β†’ CodeAgent β†’ SQL tools β†’ natural-language answer."""
params = StdioServerParameters(command="python", args=[str(SERVER_PATH)])
with MCPClient(params) as tools:
agent = CodeAgent(tools=tools, model=BASE_MODEL)
answer = agent.run(message)
history += [
{"role": "user", "content": message},
{"role": "assistant", "content": answer},
]
return history, history
# ----------------- UI ---------------------------------------------------------
with gr.Blocks(title="Enterprise SQL Agent") as demo:
state = gr.State([])
gr.Markdown("## Enterprise SQL Agent β€” ask questions about your data πŸ’βž‘οΈπŸ“Š")
chat = gr.Chatbot(type="messages", label="Chat")
box = gr.Textbox(
placeholder="e.g. Who are my inactive Northeast customers?",
show_label=False,
)
box.submit(respond, [box, state], [chat, state])
with gr.Accordion("Example prompts"):
gr.Markdown(
"* Who are my **Northeast** customers with no orders in 6 months?\n"
"* List customers sorted by **LastOrderDate**.\n"
"* Draft re-engagement emails for inactive accounts."
)
footer = (
f"_Powered by MCP + smolagents + Gradio β€’ Model: "
f"{'OpenAI' if OPENAI_KEY else HF_MODEL_ID}_"
)
gr.Markdown(footer)
if __name__ == "__main__":
demo.launch()