Alessio Grancini
commited on
Update app.py
Browse files
app.py
CHANGED
|
@@ -2,6 +2,7 @@ from ultralytics import YOLO
|
|
| 2 |
import cv2
|
| 3 |
import gradio as gr
|
| 4 |
import numpy as np
|
|
|
|
| 5 |
import os
|
| 6 |
import torch
|
| 7 |
import utils
|
|
@@ -13,21 +14,20 @@ from point_cloud_generator import display_pcd
|
|
| 13 |
|
| 14 |
|
| 15 |
|
| 16 |
-
import spaces # Required for ZeroGPU
|
| 17 |
|
| 18 |
-
# Ensure CUDA is NOT initialized before ZeroGPU assigns a device
|
| 19 |
-
torch.backends.cudnn.enabled = False # Prevents CUDA errors on first GPU allocation
|
| 20 |
|
| 21 |
|
|
|
|
|
|
|
| 22 |
def initialize_gpu():
|
| 23 |
"""Ensure ZeroGPU assigns a GPU before initializing CUDA"""
|
| 24 |
global device
|
| 25 |
try:
|
| 26 |
-
with spaces.GPU(): # Ensures
|
|
|
|
| 27 |
if torch.cuda.is_available():
|
| 28 |
device = torch.device("cuda")
|
| 29 |
print(f"✅ GPU initialized: {torch.cuda.get_device_name(0)}")
|
| 30 |
-
torch.cuda.empty_cache() # Clear memory
|
| 31 |
else:
|
| 32 |
print("❌ No GPU detected after ZeroGPU allocation.")
|
| 33 |
device = torch.device("cpu")
|
|
@@ -35,8 +35,6 @@ def initialize_gpu():
|
|
| 35 |
print(f"🚨 GPU initialization failed: {e}")
|
| 36 |
device = torch.device("cpu")
|
| 37 |
|
| 38 |
-
# ✅ Don't call CUDA here!
|
| 39 |
-
device = torch.device("cpu") # Default to CPU
|
| 40 |
|
| 41 |
# Run GPU initialization before using CUDA
|
| 42 |
initialize_gpu()
|
|
|
|
| 2 |
import cv2
|
| 3 |
import gradio as gr
|
| 4 |
import numpy as np
|
| 5 |
+
import spaces
|
| 6 |
import os
|
| 7 |
import torch
|
| 8 |
import utils
|
|
|
|
| 14 |
|
| 15 |
|
| 16 |
|
|
|
|
| 17 |
|
|
|
|
|
|
|
| 18 |
|
| 19 |
|
| 20 |
+
device = torch.device("cpu") # Start in CPU mode
|
| 21 |
+
|
| 22 |
def initialize_gpu():
|
| 23 |
"""Ensure ZeroGPU assigns a GPU before initializing CUDA"""
|
| 24 |
global device
|
| 25 |
try:
|
| 26 |
+
with spaces.GPU(): # Ensures ZeroGPU assigns a GPU
|
| 27 |
+
torch.cuda.empty_cache() # Prevent leftover memory issues
|
| 28 |
if torch.cuda.is_available():
|
| 29 |
device = torch.device("cuda")
|
| 30 |
print(f"✅ GPU initialized: {torch.cuda.get_device_name(0)}")
|
|
|
|
| 31 |
else:
|
| 32 |
print("❌ No GPU detected after ZeroGPU allocation.")
|
| 33 |
device = torch.device("cpu")
|
|
|
|
| 35 |
print(f"🚨 GPU initialization failed: {e}")
|
| 36 |
device = torch.device("cpu")
|
| 37 |
|
|
|
|
|
|
|
| 38 |
|
| 39 |
# Run GPU initialization before using CUDA
|
| 40 |
initialize_gpu()
|