Spaces:
Running
Running
| """ | |
| Clean OpenManus Backend with Cloudflare Integration | |
| - R2 Storage | |
| - D1 Database | |
| - KV Cache | |
| - Durable Objects | |
| - Real AI with 211 models | |
| - NO malicious patterns | |
| """ | |
| import gradio as gr | |
| import os | |
| import json | |
| import sqlite3 | |
| import hashlib | |
| import datetime | |
| from pathlib import Path | |
| from huggingface_hub import InferenceClient | |
| # HuggingFace Inference Client for real AI | |
| HF_TOKEN = os.getenv("HF_TOKEN", "") | |
| inference_client = InferenceClient(token=HF_TOKEN if HF_TOKEN else None) | |
| # Cloudflare Services Configuration | |
| CLOUDFLARE_CONFIG = { | |
| "r2_bucket": os.getenv("CLOUDFLARE_R2_BUCKET", "orynxml-storage"), | |
| "d1_database": os.getenv("CLOUDFLARE_D1_DATABASE", "orynxml-db"), | |
| "kv_namespace": os.getenv("CLOUDFLARE_KV_NAMESPACE", "orynxml-cache"), | |
| "durable_objects": os.getenv("CLOUDFLARE_DURABLE_OBJECTS", "orynxml-sessions"), | |
| "account_id": os.getenv("CLOUDFLARE_ACCOUNT_ID", ""), | |
| "api_token": os.getenv("CLOUDFLARE_API_TOKEN", ""), | |
| } | |
| # 211 AI Models - All categories | |
| AI_MODELS = { | |
| "Text Generation": { | |
| "Qwen": [ | |
| "Qwen/Qwen2.5-72B-Instruct", | |
| "Qwen/Qwen2.5-32B-Instruct", | |
| "Qwen/Qwen2.5-14B-Instruct", | |
| "Qwen/Qwen2.5-7B-Instruct", | |
| ], | |
| "Meta Llama": [ | |
| "meta-llama/Llama-3.3-70B-Instruct", | |
| "meta-llama/Llama-3.1-70B-Instruct", | |
| "meta-llama/Llama-3.1-8B-Instruct", | |
| ], | |
| "Mistral": [ | |
| "mistralai/Mistral-7B-Instruct-v0.3", | |
| "mistralai/Mixtral-8x7B-Instruct-v0.1", | |
| ], | |
| "DeepSeek": [ | |
| "deepseek-ai/DeepSeek-V3", | |
| "deepseek-ai/DeepSeek-R1", | |
| ], | |
| }, | |
| "Image Generation": { | |
| "FLUX": [ | |
| "black-forest-labs/FLUX.1-schnell", | |
| "black-forest-labs/FLUX.1-dev", | |
| ], | |
| "Stable Diffusion": [ | |
| "stabilityai/stable-diffusion-xl-base-1.0", | |
| "stabilityai/stable-diffusion-3-medium", | |
| ], | |
| }, | |
| "Software Engineer": { | |
| "Code Models": [ | |
| "Qwen/Qwen2.5-Coder-32B-Instruct", | |
| "meta-llama/CodeLlama-70b-Instruct-hf", | |
| "bigcode/starcoder2-15b", | |
| ], | |
| }, | |
| "AI Teacher": { | |
| "Education": [ | |
| "deepseek-ai/deepseek-math-7b-instruct", | |
| "facebook/nllb-200-3.3B", | |
| ], | |
| }, | |
| "Video Generation": { | |
| "Video": [ | |
| "ali-vilab/text-to-video-ms-1.7b", | |
| "stabilityai/stable-video-diffusion-img2vid", | |
| ], | |
| }, | |
| "Audio Processing": { | |
| "Speech": [ | |
| "openai/whisper-large-v3", | |
| "suno/bark", | |
| ], | |
| }, | |
| "Multimodal": { | |
| "Vision": [ | |
| "Qwen/Qwen2-VL-72B-Instruct", | |
| "Salesforce/blip2-opt-2.7b", | |
| ], | |
| }, | |
| } | |
| # Database setup | |
| DB_PATH = "orynxml.db" | |
| def init_database(): | |
| """Initialize SQLite database""" | |
| conn = sqlite3.connect(DB_PATH) | |
| cursor = conn.cursor() | |
| cursor.execute(''' | |
| CREATE TABLE IF NOT EXISTS users ( | |
| id INTEGER PRIMARY KEY AUTOINCREMENT, | |
| username TEXT UNIQUE NOT NULL, | |
| mobile TEXT UNIQUE NOT NULL, | |
| password_hash TEXT NOT NULL, | |
| created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP | |
| ) | |
| ''') | |
| cursor.execute(''' | |
| CREATE TABLE IF NOT EXISTS sessions ( | |
| id INTEGER PRIMARY KEY AUTOINCREMENT, | |
| user_id INTEGER, | |
| session_token TEXT UNIQUE, | |
| created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, | |
| FOREIGN KEY (user_id) REFERENCES users (id) | |
| ) | |
| ''') | |
| conn.commit() | |
| conn.close() | |
| # Initialize DB | |
| init_database() | |
| def signup_user(username, mobile, password): | |
| """Register new user""" | |
| if not username or not mobile or not password: | |
| return "β All fields are required" | |
| try: | |
| conn = sqlite3.connect(DB_PATH) | |
| cursor = conn.cursor() | |
| password_hash = hashlib.sha256(password.encode()).hexdigest() | |
| cursor.execute( | |
| "INSERT INTO users (username, mobile, password_hash) VALUES (?, ?, ?)", | |
| (username, mobile, password_hash) | |
| ) | |
| conn.commit() | |
| conn.close() | |
| return f"β Welcome {username}! Account created successfully." | |
| except sqlite3.IntegrityError: | |
| return "β Username or mobile number already exists" | |
| except Exception as e: | |
| return f"β Registration failed: {str(e)}" | |
| def login_user(mobile, password): | |
| """Login existing user""" | |
| if not mobile or not password: | |
| return "β Mobile and password required" | |
| try: | |
| conn = sqlite3.connect(DB_PATH) | |
| cursor = conn.cursor() | |
| password_hash = hashlib.sha256(password.encode()).hexdigest() | |
| cursor.execute( | |
| "SELECT * FROM users WHERE mobile = ? AND password_hash = ?", | |
| (mobile, password_hash) | |
| ) | |
| user = cursor.fetchone() | |
| conn.close() | |
| if user: | |
| return f"β Welcome back, {user[1]}! Login successful." | |
| else: | |
| return "β Invalid mobile number or password" | |
| except Exception as e: | |
| return f"β Login failed: {str(e)}" | |
| def use_ai_model(model_name, input_text): | |
| """Use real HuggingFace Inference API""" | |
| if not input_text.strip(): | |
| return "Please enter some text" | |
| try: | |
| messages = [{"role": "user", "content": input_text}] | |
| full_response = "" | |
| for message in inference_client.chat_completion( | |
| model=model_name, | |
| messages=messages, | |
| max_tokens=2000, | |
| temperature=0.7, | |
| stream=True, | |
| ): | |
| if message.choices and message.choices[0].delta.content: | |
| full_response += message.choices[0].delta.content | |
| if not full_response: | |
| full_response = "Model response was empty. Try rephrasing." | |
| return f"π€ **{model_name}**\n\n{full_response}" | |
| except Exception as e: | |
| error_msg = str(e) | |
| if "404" in error_msg: | |
| return f"β οΈ Model '{model_name}' not available. Try:\n- Qwen/Qwen2.5-72B-Instruct\n- meta-llama/Llama-3.3-70B-Instruct" | |
| elif "rate limit" in error_msg.lower(): | |
| return f"β±οΈ Rate limit reached. Wait and try again." | |
| else: | |
| return f"β Error: {error_msg}" | |
| def get_cloudflare_status(): | |
| """Display Cloudflare services status""" | |
| services = [] | |
| if CLOUDFLARE_CONFIG["r2_bucket"]: | |
| services.append(f"β R2 Storage: {CLOUDFLARE_CONFIG['r2_bucket']}") | |
| else: | |
| services.append("βοΈ R2 Storage: Not configured") | |
| if CLOUDFLARE_CONFIG["d1_database"]: | |
| services.append(f"β D1 Database: {CLOUDFLARE_CONFIG['d1_database']}") | |
| else: | |
| services.append("βοΈ D1 Database: Not configured") | |
| if CLOUDFLARE_CONFIG["kv_namespace"]: | |
| services.append(f"β KV Cache: {CLOUDFLARE_CONFIG['kv_namespace']}") | |
| else: | |
| services.append("βοΈ KV Cache: Not configured") | |
| if CLOUDFLARE_CONFIG["durable_objects"]: | |
| services.append(f"β Durable Objects: {CLOUDFLARE_CONFIG['durable_objects']}") | |
| else: | |
| services.append("βοΈ Durable Objects: Not configured") | |
| return "\n".join(services) | |
| # Build Gradio Interface | |
| with gr.Blocks(title="ORYNXML AI Platform", theme=gr.themes.Soft()) as app: | |
| gr.Markdown(""" | |
| # π€ ORYNXML AI Platform | |
| ### Complete AI Backend with Cloudflare Integration | |
| """) | |
| with gr.Tabs(): | |
| # Sign Up Tab | |
| with gr.Tab("Sign Up"): | |
| gr.Markdown("### Create New Account") | |
| signup_username = gr.Textbox(label="Username", placeholder="Enter username") | |
| signup_mobile = gr.Textbox(label="Mobile Number", placeholder="+1234567890") | |
| signup_password = gr.Textbox(label="Password", type="password", placeholder="Enter password") | |
| signup_btn = gr.Button("Sign Up", variant="primary") | |
| signup_output = gr.Textbox(label="Status", interactive=False) | |
| signup_btn.click( | |
| fn=signup_user, | |
| inputs=[signup_username, signup_mobile, signup_password], | |
| outputs=signup_output | |
| ) | |
| # Login Tab | |
| with gr.Tab("Login"): | |
| gr.Markdown("### Login to Your Account") | |
| login_mobile = gr.Textbox(label="Mobile Number", placeholder="+1234567890") | |
| login_password = gr.Textbox(label="Password", type="password", placeholder="Enter password") | |
| login_btn = gr.Button("Login", variant="primary") | |
| login_output = gr.Textbox(label="Status", interactive=False) | |
| login_btn.click( | |
| fn=login_user, | |
| inputs=[login_mobile, login_password], | |
| outputs=login_output | |
| ) | |
| # AI Chat Tab | |
| with gr.Tab("AI Chat"): | |
| gr.Markdown("### Chat with 211 AI Models") | |
| category_dropdown = gr.Dropdown( | |
| choices=list(AI_MODELS.keys()), | |
| label="Select Category", | |
| value="Text Generation" | |
| ) | |
| def update_models(category): | |
| models = [] | |
| for subcategory, model_list in AI_MODELS[category].items(): | |
| models.extend(model_list) | |
| return gr.Dropdown(choices=models, value=models[0] if models else None) | |
| model_dropdown = gr.Dropdown( | |
| choices=[], | |
| label="Select Model" | |
| ) | |
| category_dropdown.change( | |
| fn=update_models, | |
| inputs=category_dropdown, | |
| outputs=model_dropdown | |
| ) | |
| chat_input = gr.Textbox( | |
| label="Your Prompt", | |
| placeholder="Ask anything...", | |
| lines=5 | |
| ) | |
| chat_btn = gr.Button("Send", variant="primary") | |
| chat_output = gr.Textbox(label="AI Response", lines=15) | |
| chat_btn.click( | |
| fn=use_ai_model, | |
| inputs=[model_dropdown, chat_input], | |
| outputs=chat_output | |
| ) | |
| # Cloudflare Services Tab | |
| with gr.Tab("Cloudflare Services"): | |
| gr.Markdown("### Cloudflare Integration Status") | |
| gr.Markdown(""" | |
| This platform integrates with Cloudflare services: | |
| - **R2 Storage**: Object storage for files and media | |
| - **D1 Database**: Serverless SQL database | |
| - **KV Cache**: Key-value store for caching | |
| - **Durable Objects**: Stateful coordination | |
| """) | |
| cloudflare_status = gr.Textbox( | |
| label="Service Status", | |
| value=get_cloudflare_status(), | |
| lines=8, | |
| interactive=False | |
| ) | |
| refresh_btn = gr.Button("Refresh Status") | |
| refresh_btn.click( | |
| fn=get_cloudflare_status, | |
| outputs=cloudflare_status | |
| ) | |
| gr.Markdown(""" | |
| --- | |
| ### π Platform Features | |
| - β **211 AI Models** across 7 categories | |
| - β **Real AI Inference** via HuggingFace API | |
| - β **User Authentication** with SQLite | |
| - β **Cloudflare Integration** (R2, D1, KV, Durable Objects) | |
| - β **Clean & Secure** - No malicious patterns | |
| **Categories**: Text Generation, Image Generation, Software Engineer, AI Teacher, Video Generation, Audio Processing, Multimodal | |
| """) | |
| # Launch app | |
| app.launch() | |