Spaces:
Sleeping
Sleeping
File size: 5,610 Bytes
bb12219 0a3060f 83af178 0a3060f bb12219 9c16cbf 0a3060f f70d966 0a3060f 83af178 0a3060f 83af178 0a3060f 83af178 bb12219 83af178 5fbb27d 0a3060f 83af178 0a3060f 83af178 d48566f 3c9086c f0502f7 bb12219 0a3060f 83af178 0a3060f 5fbb27d 83af178 0a3060f 83af178 0a3060f 83af178 9c16cbf 83af178 9c16cbf 83af178 9c16cbf 5fbb27d 0a3060f 83af178 0a3060f 83af178 0a3060f 83af178 5fbb27d 83af178 0a3060f 83af178 0a3060f 83af178 0a3060f 83af178 0a3060f 83af178 5fbb27d 83af178 5fbb27d 83af178 5fbb27d 83af178 5fbb27d 83af178 5fbb27d 83af178 0a3060f 83af178 0a3060f 83af178 3c9086c 83af178 3c9086c 83af178 bb12219 83af178 f0502f7 83af178 c27fb7c 83af178 9ec1122 5fbb27d 83af178 5fbb27d 83af178 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 |
import os
import logging
import io
from fastapi import FastAPI, Request, Header, HTTPException, UploadFile, File
from fastapi.responses import JSONResponse
from pydantic import BaseModel
from transformers import pipeline
from PIL import Image
from smebuilder_vector import query_vector
# ==============================
# Logging Setup
# ==============================
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger("AgriCopilot")
# ==============================
# App Initialization
# ==============================
app = FastAPI(title="AgriCopilot AI API", version="2.0")
@app.get("/")
async def root():
return {"status": "AgriCopilot AI Backend is running smoothly ✅"}
# ==============================
# AUTH CONFIGURATION
# ==============================
PROJECT_API_KEY = os.getenv("PROJECT_API_KEY", "agricopilot404")
def check_auth(authorization: str | None):
if not PROJECT_API_KEY:
return
if not authorization or not authorization.startswith("Bearer "):
raise HTTPException(status_code=401, detail="Missing bearer token")
token = authorization.split(" ", 1)[1]
if token != PROJECT_API_KEY:
raise HTTPException(status_code=403, detail="Invalid token")
# ==============================
# Exception Handling
# ==============================
@app.exception_handler(Exception)
async def global_exception_handler(request: Request, exc: Exception):
logger.error(f"Unhandled error: {exc}")
return JSONResponse(status_code=500, content={"error": str(exc)})
# ==============================
# Request Models
# ==============================
class ChatRequest(BaseModel):
query: str
class DisasterRequest(BaseModel):
report: str
class MarketRequest(BaseModel):
product: str
class VectorRequest(BaseModel):
query: str
# ==============================
# Load Hugging Face Pipelines
# ==============================
HF_TOKEN = os.getenv("HUGGINGFACEHUB_API_TOKEN")
if not HF_TOKEN:
logger.warning("⚠️ No Hugging Face token found. Gated models may fail.")
else:
logger.info("✅ Hugging Face token loaded successfully.")
# General text-generation model for chat, disaster, market endpoints
default_model = "meta-llama/Llama-3.1-8B-Instruct"
vision_model = "meta-llama/Llama-3.2-11B-Vision-Instruct"
chat_pipe = pipeline("text-generation", model=default_model, token=HF_TOKEN)
disaster_pipe = pipeline("text-generation", model=default_model, token=HF_TOKEN)
market_pipe = pipeline("text-generation", model=default_model, token=HF_TOKEN)
# Multimodal crop diagnostic model
try:
crop_pipe = pipeline("image-text-to-text", model=vision_model, token=HF_TOKEN)
except Exception as e:
logger.warning(f"Crop model load failed: {e}")
crop_pipe = None
# ==============================
# Helper Functions
# ==============================
def run_conversational(pipe, prompt: str):
try:
output = pipe(prompt, max_new_tokens=200)
if isinstance(output, list) and len(output) > 0:
return output[0].get("generated_text", str(output))
return str(output)
except Exception as e:
logger.error(f"Pipeline error: {e}")
return f"⚠️ Model error: {str(e)}"
def run_crop_doctor(image_bytes: bytes, symptoms: str):
"""
Diagnose crop issues using Meta's multimodal LLaMA Vision model.
"""
if not crop_pipe:
return "⚠️ Crop analysis temporarily unavailable (model not loaded)."
try:
image = Image.open(io.BytesIO(image_bytes)).convert("RGB")
prompt = (
f"The farmer reports: {symptoms}. "
"Analyze the image and diagnose the likely crop disease. "
"Then explain it simply and recommend possible treatment steps."
)
output = crop_pipe(image, prompt)
if isinstance(output, list) and len(output) > 0:
return output[0].get("generated_text", str(output))
return str(output)
except Exception as e:
logger.error(f"Crop Doctor pipeline error: {e}")
return f"⚠️ Unexpected model error: {str(e)}"
# ==============================
# API ROUTES
# ==============================
@app.post("/multilingual-chat")
async def multilingual_chat(req: ChatRequest, authorization: str | None = Header(None)):
check_auth(authorization)
reply = run_conversational(chat_pipe, req.query)
return {"reply": reply}
@app.post("/disaster-summarizer")
async def disaster_summarizer(req: DisasterRequest, authorization: str | None = Header(None)):
check_auth(authorization)
summary = run_conversational(disaster_pipe, req.report)
return {"summary": summary}
@app.post("/marketplace")
async def marketplace(req: MarketRequest, authorization: str | None = Header(None)):
check_auth(authorization)
recommendation = run_conversational(market_pipe, req.product)
return {"recommendation": recommendation}
@app.post("/vector-search")
async def vector_search(req: VectorRequest, authorization: str | None = Header(None)):
check_auth(authorization)
try:
results = query_vector(req.query)
return {"results": results}
except Exception as e:
logger.error(f"Vector search error: {e}")
return {"error": f"Vector search error: {str(e)}"}
@app.post("/crop-doctor")
async def crop_doctor(
symptoms: str = Header(...),
image: UploadFile = File(...),
authorization: str | None = Header(None)
):
check_auth(authorization)
image_bytes = await image.read()
diagnosis = run_crop_doctor(image_bytes, symptoms)
return {"diagnosis": diagnosis}
|