karthikeya1212 commited on
Commit
a02846d
·
verified ·
1 Parent(s): f0458ed

Update api/server.py

Browse files
Files changed (1) hide show
  1. api/server.py +207 -61
api/server.py CHANGED
@@ -87,15 +87,112 @@
87
  # @app.get("/")
88
  # async def health():
89
  # return {"status": "running"}
90
- import logging
91
- from fastapi import FastAPI, HTTPException
92
- from fastapi.middleware.cors import CORSMiddleware
93
- from pydantic import BaseModel
94
- from services import queue_manager
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
95
  import os
96
  from pathlib import Path
97
- from typing import Optional
98
-
99
 
100
  # CACHE PATCH BLOCK: place FIRST in pipeline.py!
101
  HF_CACHE_DIR = Path("/tmp/hf_cache")
@@ -126,61 +223,110 @@ def safe_expanduser(path):
126
  return os.path.expanduser_original(path)
127
  os.path.expanduser = safe_expanduser
128
 
 
 
 
 
 
 
 
 
 
 
129
 
130
- logging.basicConfig(level=logging.INFO, format="%(asctime)s [%(levelname)s] %(message)s")
 
 
 
131
 
132
- app = FastAPI(title="AI ADD Generator", version="1.0")
 
 
 
133
 
134
- app.add_middleware(
135
- CORSMiddleware,
136
- allow_origins=["*"],
137
- allow_credentials=True,
138
- allow_methods=["*"],
139
- allow_headers=["*"],
140
- )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
141
 
142
- # ---------------------------
143
- # Pydantic models
144
- # ---------------------------
145
- class IdeaRequest(BaseModel):
146
- idea: str
147
-
148
- class ConfirmationRequest(BaseModel):
149
- task_id: str
150
- confirm: bool
151
- edited_script: Optional[str] = None
152
-
153
- # ---------------------------
154
- # API endpoints
155
- # ---------------------------
156
- @app.post("/submit_idea")
157
- async def submit_idea(request: IdeaRequest):
158
- task_id = await queue_manager.add_task(request.idea)
159
- return {"status": "submitted", "task_id": task_id}
160
-
161
- @app.post("/confirm")
162
- async def confirm_task(request: ConfirmationRequest):
163
- task = queue_manager.get_task_status(request.task_id)
164
- if not task:
165
- raise HTTPException(status_code=404, detail="Task not found")
166
- # status values are stored as strings by queue_manager/pipeline
167
- if task["status"] != queue_manager.TaskStatus.WAITING_CONFIRMATION.value:
168
- raise HTTPException(status_code=400, detail="Task not waiting for confirmation")
169
-
170
- # if frontend supplied an edited script, persist it before unblocking the pipeline
171
- if request.edited_script:
172
- task["result"]["script"] = request.edited_script
173
-
174
- await queue_manager.confirm_task(request.task_id)
175
- return {"status": "confirmed", "task": task}
176
-
177
- @app.get("/status/{task_id}")
178
- async def status(task_id: str):
179
- task = queue_manager.get_task_status(task_id)
180
- if not task:
181
- raise HTTPException(status_code=404, detail="Task not found")
182
- return task
183
-
184
- @app.get("/")
185
- async def health():
186
- return {"status": "running"}
 
87
  # @app.get("/")
88
  # async def health():
89
  # return {"status": "running"}
90
+
91
+
92
+ # import logging
93
+ # from fastapi import FastAPI, HTTPException
94
+ # from fastapi.middleware.cors import CORSMiddleware
95
+ # from pydantic import BaseModel
96
+ # from services import queue_manager
97
+ # import os
98
+ # from pathlib import Path
99
+ # from typing import Optional
100
+
101
+
102
+ # # CACHE PATCH BLOCK: place FIRST in pipeline.py!
103
+ # HF_CACHE_DIR = Path("/tmp/hf_cache")
104
+ # HF_CACHE_DIR.mkdir(parents=True, exist_ok=True)
105
+ # os.environ.update({
106
+ # "HF_HOME": str(HF_CACHE_DIR),
107
+ # "HF_HUB_CACHE": str(HF_CACHE_DIR),
108
+ # "DIFFUSERS_CACHE": str(HF_CACHE_DIR),
109
+ # "TRANSFORMERS_CACHE": str(HF_CACHE_DIR),
110
+ # "XDG_CACHE_HOME": str(HF_CACHE_DIR),
111
+ # "HF_DATASETS_CACHE": str(HF_CACHE_DIR),
112
+ # "HF_MODULES_CACHE": str(HF_CACHE_DIR),
113
+ # "TMPDIR": str(HF_CACHE_DIR),
114
+ # "CACHE_DIR": str(HF_CACHE_DIR),
115
+ # "TORCH_HOME": str(HF_CACHE_DIR),
116
+ # "HOME": str(HF_CACHE_DIR)
117
+ # })
118
+ # import os.path
119
+ # if not hasattr(os.path, "expanduser_original"):
120
+ # os.path.expanduser_original = os.path.expanduser
121
+ # def safe_expanduser(path):
122
+ # if (
123
+ # path.startswith("~") or
124
+ # path.startswith("/.cache") or
125
+ # path.startswith("/root/.cache")
126
+ # ):
127
+ # return str(HF_CACHE_DIR)
128
+ # return os.path.expanduser_original(path)
129
+ # os.path.expanduser = safe_expanduser
130
+
131
+
132
+ # logging.basicConfig(level=logging.INFO, format="%(asctime)s [%(levelname)s] %(message)s")
133
+
134
+ # app = FastAPI(title="AI ADD Generator", version="1.0")
135
+
136
+ # app.add_middleware(
137
+ # CORSMiddleware,
138
+ # allow_origins=["*"],
139
+ # allow_credentials=True,
140
+ # allow_methods=["*"],
141
+ # allow_headers=["*"],
142
+ # )
143
+
144
+ # # ---------------------------
145
+ # # Pydantic models
146
+ # # ---------------------------
147
+ # class IdeaRequest(BaseModel):
148
+ # idea: str
149
+
150
+ # class ConfirmationRequest(BaseModel):
151
+ # task_id: str
152
+ # confirm: bool
153
+ # edited_script: Optional[str] = None
154
+
155
+ # # ---------------------------
156
+ # # API endpoints
157
+ # # ---------------------------
158
+ # @app.post("/submit_idea")
159
+ # async def submit_idea(request: IdeaRequest):
160
+ # task_id = await queue_manager.add_task(request.idea)
161
+ # return {"status": "submitted", "task_id": task_id}
162
+
163
+ # @app.post("/confirm")
164
+ # async def confirm_task(request: ConfirmationRequest):
165
+ # task = queue_manager.get_task_status(request.task_id)
166
+ # if not task:
167
+ # raise HTTPException(status_code=404, detail="Task not found")
168
+ # # status values are stored as strings by queue_manager/pipeline
169
+ # if task["status"] != queue_manager.TaskStatus.WAITING_CONFIRMATION.value:
170
+ # raise HTTPException(status_code=400, detail="Task not waiting for confirmation")
171
+
172
+ # # if frontend supplied an edited script, persist it before unblocking the pipeline
173
+ # if request.edited_script:
174
+ # task["result"]["script"] = request.edited_script
175
+
176
+ # await queue_manager.confirm_task(request.task_id)
177
+ # return {"status": "confirmed", "task": task}
178
+
179
+ # @app.get("/status/{task_id}")
180
+ # async def status(task_id: str):
181
+ # task = queue_manager.get_task_status(task_id)
182
+ # if not task:
183
+ # raise HTTPException(status_code=404, detail="Task not found")
184
+ # return task
185
+
186
+ # @app.get("/")
187
+ # async def health():
188
+ # return {"status": "running"}
189
+
190
+
191
+
192
+
193
+
194
  import os
195
  from pathlib import Path
 
 
196
 
197
  # CACHE PATCH BLOCK: place FIRST in pipeline.py!
198
  HF_CACHE_DIR = Path("/tmp/hf_cache")
 
223
  return os.path.expanduser_original(path)
224
  os.path.expanduser = safe_expanduser
225
 
226
+ import asyncio
227
+ import logging
228
+ import core.script_gen as script_gen
229
+ import core.story_script as story_script
230
+ # IMAGE: enabled by default (uncommented) — pipeline will run image generation and return images
231
+ import core.image_generator as image_gen
232
+ # VIDEO / MUSIC / ASSEMBLER placeholders: uncomment to enable those stages
233
+ # import core.video_gen as video_gen
234
+ # import core.music_gen as music_gen
235
+ # import core.assemble as assemble
236
 
237
+ logging.basicConfig(
238
+ level=logging.INFO,
239
+ format="%(asctime)s [%(levelname)s] %(message)s"
240
+ )
241
 
242
+ async def run_pipeline(task: dict, confirmation_event: asyncio.Event):
243
+ """
244
+ Executes the workflow and updates task['result'] after each stage so frontend
245
+ can poll /status/{task_id} to get intermediate outputs.
246
 
247
+ Behavior:
248
+ - generate_script -> write task.result['script'] and set status to waiting_for_confirmation
249
+ Frontend can edit and confirm the script.
250
+ - on confirm -> generate story, write task.result['story_script']
251
+ - generate images (image_gen) -> write task.result['images']
252
+ - optional stages (video_gen, music_gen, assemble) are executed only if their modules are imported.
253
+ Each stage updates task['result'] and task['status'] so frontend can receive intermediate outputs.
254
+ """
255
+ task_id = task["id"]
256
+ idea = task["idea"]
257
+
258
+ logging.info(f"[Pipeline] Starting script generation for task {task_id}")
259
+ # 1) Script generation — update result so frontend can read it immediately
260
+ script = await script_gen.generate_script(idea)
261
+ task["result"]["script"] = script
262
+ task["status"] = "waiting_for_confirmation"
263
+ task["confirmation_required"] = True
264
+ logging.info(f"[Pipeline] Script ready for task {task_id}; waiting for confirmation/edit...")
265
+
266
+ # Wait for frontend confirmation (may include edited script saved into task["result"]["script"])
267
+ await confirmation_event.wait()
268
+
269
+ # confirmed, proceed
270
+ task["status"] = "confirmed"
271
+ task["confirmation_required"] = False
272
+ logging.info(f"[Pipeline] Task {task_id} confirmed. Generating story based on confirmed script...")
273
+
274
+ # 2) Story generation — use possibly edited/confirmed script
275
+ confirmed_script = task["result"].get("script", script)
276
+ story = await story_script.generate_story(confirmed_script)
277
+ task["result"]["story_script"] = story
278
+ task["status"] = "story_generated"
279
+ logging.info(f"[Pipeline] Story ready for task {task_id}")
280
+
281
+ # 3) Image generation (enabled)
282
+ try:
283
+ logging.info(f"[Pipeline] Generating images for task {task_id}")
284
+ images = await image_gen.generate_images(story)
285
+ task["result"]["images"] = images
286
+ task["status"] = "images_generated"
287
+ logging.info(f"[Pipeline] Images ready for task {task_id}")
288
+ except Exception as e:
289
+ # keep pipeline going; store error for this stage
290
+ task.setdefault("stage_errors", {})["images"] = str(e)
291
+ logging.exception(f"[Pipeline] Image generation failed for task {task_id}: {e}")
292
+
293
+ # 4) Optional: video generation (uncomment import to enable)
294
+ # if "video_gen" in globals():
295
+ # try:
296
+ # logging.info(f"[Pipeline] Generating video for task {task_id}")
297
+ # video = await video_gen.generate_video(task["result"].get("images"))
298
+ # task["result"]["video"] = video
299
+ # task["status"] = "video_generated"
300
+ # logging.info(f"[Pipeline] Video ready for task {task_id}")
301
+ # except Exception as e:
302
+ # task.setdefault("stage_errors", {})["video"] = str(e)
303
+ # logging.exception(f"[Pipeline] Video generation failed for task {task_id}: {e}")
304
+
305
+ # 5) Optional: music generation (uncomment import to enable)
306
+ # if "music_gen" in globals():
307
+ # try:
308
+ # logging.info(f"[Pipeline] Generating music for task {task_id}")
309
+ # music = await music_gen.generate_music(story)
310
+ # task["result"]["music"] = music
311
+ # task["status"] = "music_generated"
312
+ # logging.info(f"[Pipeline] Music ready for task {task_id}")
313
+ # except Exception as e:
314
+ # task.setdefault("stage_errors", {})["music"] = str(e)
315
+ # logging.exception(f"[Pipeline] Music generation failed for task {task_id}: {e}")
316
+
317
+ # 6) Optional: assembler (uncomment import to enable)
318
+ # if "assemble" in globals():
319
+ # try:
320
+ # logging.info(f"[Pipeline] Assembling final output for task {task_id}")
321
+ # final_output = await assemble.create_final(task["result"])
322
+ # task["result"]["final_output"] = final_output
323
+ # task["status"] = "assembled"
324
+ # logging.info(f"[Pipeline] Assembly complete for task {task_id}")
325
+ # except Exception as e:
326
+ # task.setdefault("stage_errors", {})["assemble"] = str(e)
327
+ # logging.exception(f"[Pipeline] Assembly failed for task {task_id}: {e}")
328
 
329
+ # Finalize
330
+ task["status"] = "completed"
331
+ logging.info(f"[Pipeline] Task {task_id} completed.")
332
+ return task["result"]