akhaliq HF Staff commited on
Commit
275a10f
·
verified ·
1 Parent(s): efee1d5

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +39 -4
app.py CHANGED
@@ -1,8 +1,10 @@
1
  import gradio as gr
2
  import os
3
  import tempfile
 
4
  from typing import Optional, Tuple, Union
5
  from huggingface_hub import InferenceClient, whoami
 
6
 
7
  # Initialize Hugging Face Inference Client with fal-ai provider
8
  client = InferenceClient(
@@ -33,6 +35,21 @@ def verify_pro_status(token: Optional[Union[gr.OAuthToken, str]]) -> bool:
33
  print(f"Could not verify user's PRO/Enterprise status: {e}")
34
  return False
35
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
36
  def generate_video(
37
  prompt: str,
38
  duration: int = 8,
@@ -43,6 +60,9 @@ def generate_video(
43
  Generate video using Sora-2 through Hugging Face Inference API with fal-ai provider.
44
  Returns tuple of (video_path, status_message).
45
  """
 
 
 
46
  try:
47
  # Use provided API key or environment variable
48
  if api_key:
@@ -62,10 +82,15 @@ def generate_video(
62
  model="akhaliq/sora-2",
63
  )
64
 
65
- # Save to temporary file
66
- with tempfile.NamedTemporaryFile(suffix=".mp4", delete=False) as tmp_file:
67
- tmp_file.write(video_bytes)
68
- video_path = tmp_file.name
 
 
 
 
 
69
 
70
  status_message = f"✅ Video generated successfully!"
71
  return video_path, status_message
@@ -285,6 +310,15 @@ def create_ui():
285
 
286
  # Launch the application
287
  if __name__ == "__main__":
 
 
 
 
 
 
 
 
 
288
  app = create_ui()
289
  # Launch without special auth parameters and no queue
290
  # OAuth is enabled via Space metadata (hf_oauth: true in README.md)
@@ -292,4 +326,5 @@ if __name__ == "__main__":
292
  show_api=False,
293
  enable_monitoring=False,
294
  quiet=True,
 
295
  )
 
1
  import gradio as gr
2
  import os
3
  import tempfile
4
+ import shutil
5
  from typing import Optional, Tuple, Union
6
  from huggingface_hub import InferenceClient, whoami
7
+ from pathlib import Path
8
 
9
  # Initialize Hugging Face Inference Client with fal-ai provider
10
  client = InferenceClient(
 
35
  print(f"Could not verify user's PRO/Enterprise status: {e}")
36
  return False
37
 
38
+ def cleanup_temp_files():
39
+ """Clean up old temporary video files to prevent storage overflow."""
40
+ try:
41
+ temp_dir = tempfile.gettempdir()
42
+ # Clean up old .mp4 files in temp directory
43
+ for file_path in Path(temp_dir).glob("*.mp4"):
44
+ try:
45
+ # Remove files older than 5 minutes
46
+ if file_path.stat().st_mtime < (os.time.time() - 300):
47
+ file_path.unlink(missing_ok=True)
48
+ except Exception:
49
+ pass # Ignore errors for individual files
50
+ except Exception as e:
51
+ print(f"Cleanup error: {e}")
52
+
53
  def generate_video(
54
  prompt: str,
55
  duration: int = 8,
 
60
  Generate video using Sora-2 through Hugging Face Inference API with fal-ai provider.
61
  Returns tuple of (video_path, status_message).
62
  """
63
+ # Clean up old files before generating new ones
64
+ cleanup_temp_files()
65
+
66
  try:
67
  # Use provided API key or environment variable
68
  if api_key:
 
82
  model="akhaliq/sora-2",
83
  )
84
 
85
+ # Save to temporary file with proper cleanup
86
+ # Use NamedTemporaryFile with delete=True but keep reference
87
+ temp_file = tempfile.NamedTemporaryFile(suffix=".mp4", delete=False)
88
+ try:
89
+ temp_file.write(video_bytes)
90
+ temp_file.flush()
91
+ video_path = temp_file.name
92
+ finally:
93
+ temp_file.close()
94
 
95
  status_message = f"✅ Video generated successfully!"
96
  return video_path, status_message
 
310
 
311
  # Launch the application
312
  if __name__ == "__main__":
313
+ # Clean up any leftover files on startup
314
+ try:
315
+ cleanup_temp_files()
316
+ # Also try to clear Gradio's cache
317
+ if os.path.exists("gradio_cached_examples"):
318
+ shutil.rmtree("gradio_cached_examples", ignore_errors=True)
319
+ except Exception as e:
320
+ print(f"Initial cleanup error: {e}")
321
+
322
  app = create_ui()
323
  # Launch without special auth parameters and no queue
324
  # OAuth is enabled via Space metadata (hf_oauth: true in README.md)
 
326
  show_api=False,
327
  enable_monitoring=False,
328
  quiet=True,
329
+ max_threads=10, # Limit threads to prevent resource exhaustion
330
  )