Spaces:
Running
Running
Update document_generator.py
Browse files- document_generator.py +28 -1
document_generator.py
CHANGED
|
@@ -44,6 +44,7 @@ import os
|
|
| 44 |
import json
|
| 45 |
import re
|
| 46 |
import asyncio
|
|
|
|
| 47 |
from typing import List, Dict, Optional, Any, Callable
|
| 48 |
from openai import OpenAI
|
| 49 |
import logging
|
|
@@ -52,6 +53,9 @@ from fastapi import APIRouter, HTTPException, Request
|
|
| 52 |
from fastapi.responses import StreamingResponse
|
| 53 |
from pydantic import BaseModel
|
| 54 |
from fastapi_cache.decorator import cache
|
|
|
|
|
|
|
|
|
|
| 55 |
|
| 56 |
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
|
| 57 |
logger = logging.getLogger(__name__)
|
|
@@ -69,6 +73,28 @@ def log_execution(func: Callable) -> Callable:
|
|
| 69 |
raise
|
| 70 |
return wrapper
|
| 71 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 72 |
class AIClient:
|
| 73 |
def __init__(self):
|
| 74 |
self.client = OpenAI(
|
|
@@ -224,7 +250,7 @@ class MarkdownDocumentRequest(BaseModel):
|
|
| 224 |
|
| 225 |
async def generate_document_stream(document_generator: DocumentGenerator, document_outline: Dict, query: str):
|
| 226 |
document_generator.document_outline = document_outline
|
| 227 |
-
|
| 228 |
overall_objective = query
|
| 229 |
document_layout = json.dumps(document_generator.document_outline, indent=2)
|
| 230 |
|
|
@@ -276,6 +302,7 @@ async def generate_document_stream(document_generator: DocumentGenerator, docume
|
|
| 276 |
"content": markdown_document,
|
| 277 |
"json": document_generator.document_outline
|
| 278 |
}, ensure_ascii=False) + "\n"
|
|
|
|
| 279 |
|
| 280 |
@cache(expire=600*24*7)
|
| 281 |
@router.post("/generate-document/json", response_model=JsonDocumentResponse)
|
|
|
|
| 44 |
import json
|
| 45 |
import re
|
| 46 |
import asyncio
|
| 47 |
+
import time
|
| 48 |
from typing import List, Dict, Optional, Any, Callable
|
| 49 |
from openai import OpenAI
|
| 50 |
import logging
|
|
|
|
| 53 |
from fastapi.responses import StreamingResponse
|
| 54 |
from pydantic import BaseModel
|
| 55 |
from fastapi_cache.decorator import cache
|
| 56 |
+
import psycopg2
|
| 57 |
+
from datetime import datetime
|
| 58 |
+
|
| 59 |
|
| 60 |
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
|
| 61 |
logger = logging.getLogger(__name__)
|
|
|
|
| 73 |
raise
|
| 74 |
return wrapper
|
| 75 |
|
| 76 |
+
class DatabaseManager:
|
| 77 |
+
"""Manages database operations."""
|
| 78 |
+
|
| 79 |
+
def __init__(self):
|
| 80 |
+
self.db_params = {
|
| 81 |
+
"dbname": "postgres",
|
| 82 |
+
"user": os.environ['SUPABASE_USER'],
|
| 83 |
+
"password": os.environ['SUPABASE_PASSWORD'],
|
| 84 |
+
"host": "aws-0-us-west-1.pooler.supabase.com",
|
| 85 |
+
"port": "5432"
|
| 86 |
+
}
|
| 87 |
+
|
| 88 |
+
@log_execution
|
| 89 |
+
def update_database(self, user_id: str, user_query: str, response: str) -> None:
|
| 90 |
+
with psycopg2.connect(**self.db_params) as conn:
|
| 91 |
+
with conn.cursor() as cur:
|
| 92 |
+
insert_query = """
|
| 93 |
+
INSERT INTO ai_document_generator (user_id, user_query, response)
|
| 94 |
+
VALUES (%s, %s, %s);
|
| 95 |
+
"""
|
| 96 |
+
cur.execute(insert_query, (user_id, user_query, response))
|
| 97 |
+
|
| 98 |
class AIClient:
|
| 99 |
def __init__(self):
|
| 100 |
self.client = OpenAI(
|
|
|
|
| 250 |
|
| 251 |
async def generate_document_stream(document_generator: DocumentGenerator, document_outline: Dict, query: str):
|
| 252 |
document_generator.document_outline = document_outline
|
| 253 |
+
db_manager = DatabaseManager()
|
| 254 |
overall_objective = query
|
| 255 |
document_layout = json.dumps(document_generator.document_outline, indent=2)
|
| 256 |
|
|
|
|
| 302 |
"content": markdown_document,
|
| 303 |
"json": document_generator.document_outline
|
| 304 |
}, ensure_ascii=False) + "\n"
|
| 305 |
+
db_manager.update_database("elevatics", query, markdown_document)
|
| 306 |
|
| 307 |
@cache(expire=600*24*7)
|
| 308 |
@router.post("/generate-document/json", response_model=JsonDocumentResponse)
|