import os import json import faiss import numpy as np from fastapi import FastAPI, UploadFile, File, Form from fastapi.middleware.cors import CORSMiddleware from sentence_transformers import SentenceTransformer from PIL import Image import io import requests # Fix caching permissions for Hugging Face os.environ["HF_HOME"] = "./cache" os.environ["TRANSFORMERS_CACHE"] = "./cache" os.environ["SENTENCE_TRANSFORMERS_HOME"] = "./cache" app = FastAPI() # Enable CORS (so Netlify frontend can call) app.add_middleware( CORSMiddleware, allow_origins=["*"], # can restrict to Netlify domain later allow_credentials=True, allow_methods=["*"], allow_headers=["*"], ) # Load products with open("products.json", "r", encoding="utf-8") as f: products = json.load(f) print(f"📦 Loaded {len(products)} products") # Load FAISS index index = faiss.read_index("products.index") # Load CLIP model print("🧠 Loading CLIP model...") model = SentenceTransformer("sentence-transformers/clip-ViT-B-32", cache_folder="./cache") @app.get("/") def root(): return {"message": "🚀 Visual Product Matcher API is running!"} @app.post("/search_text") def search_text(query: str = Form(...), top_k: int = 5, min_score: float = 0.0): """ Search products using text query. """ query_emb = model.encode([query], convert_to_numpy=True, normalize_embeddings=True) sims, indices = index.search(query_emb, top_k) results = [] for sim, idx in zip(sims[0], indices[0]): score = float(sim) # already cosine similarity (0–1) if score >= min_score: item = products[idx].copy() item["score"] = score results.append(item) return {"matches": results} @app.post("/match") # image search async def search_image( file: UploadFile = File(None), image_url: str = Form(None), top_k: int = 5, min_score: float = 0.0 ): """ Search products using image query (upload or URL). """ if file: image_bytes = await file.read() image = Image.open(io.BytesIO(image_bytes)).convert("RGB") elif image_url: response = requests.get(image_url) image = Image.open(io.BytesIO(response.content)).convert("RGB") else: return {"error": "No image provided"} image_emb = model.encode([image], convert_to_numpy=True, normalize_embeddings=True) sims, indices = index.search(image_emb, top_k) results = [] for sim, idx in zip(sims[0], indices[0]): score = float(sim) # cosine similarity if score >= min_score: item = products[idx].copy() item["score"] = score results.append(item) return {"matches": results}