Spaces:
Running
on
Zero
Running
on
Zero
Commit
·
fcc9ef6
1
Parent(s):
0f3b600
add
Browse files
app.py
CHANGED
|
@@ -129,7 +129,7 @@ def load_models():
|
|
| 129 |
|
| 130 |
return pipeline, model, infer_config
|
| 131 |
|
| 132 |
-
@spaces.GPU(duration=
|
| 133 |
def process_images(input_images, prompt, steps=75, guidance_scale=7.5, pipeline=None):
|
| 134 |
"""Process input images and run refinement"""
|
| 135 |
device = pipeline.device
|
|
@@ -199,7 +199,7 @@ def process_images(input_images, prompt, steps=75, guidance_scale=7.5, pipeline=
|
|
| 199 |
|
| 200 |
return output, input_image
|
| 201 |
|
| 202 |
-
@spaces.GPU(duration=
|
| 203 |
def create_mesh(refined_image, model, infer_config):
|
| 204 |
"""Generate mesh from refined image"""
|
| 205 |
# Convert PIL image to tensor
|
|
@@ -479,7 +479,7 @@ def create_demo():
|
|
| 479 |
)
|
| 480 |
|
| 481 |
# Set up event handlers
|
| 482 |
-
@spaces.GPU(duration=
|
| 483 |
def generate(prompt, guidance_scale, num_steps):
|
| 484 |
try:
|
| 485 |
torch.cuda.empty_cache() # Clear GPU memory before starting
|
|
@@ -492,7 +492,7 @@ def create_demo():
|
|
| 492 |
print(error_msg)
|
| 493 |
return None, error_msg
|
| 494 |
|
| 495 |
-
@spaces.GPU(duration=
|
| 496 |
def refine(input_image, prompt, steps, guidance_scale):
|
| 497 |
try:
|
| 498 |
torch.cuda.empty_cache() # Clear GPU memory before starting
|
|
|
|
| 129 |
|
| 130 |
return pipeline, model, infer_config
|
| 131 |
|
| 132 |
+
@spaces.GPU(duration=20)
|
| 133 |
def process_images(input_images, prompt, steps=75, guidance_scale=7.5, pipeline=None):
|
| 134 |
"""Process input images and run refinement"""
|
| 135 |
device = pipeline.device
|
|
|
|
| 199 |
|
| 200 |
return output, input_image
|
| 201 |
|
| 202 |
+
@spaces.GPU(duration=20)
|
| 203 |
def create_mesh(refined_image, model, infer_config):
|
| 204 |
"""Generate mesh from refined image"""
|
| 205 |
# Convert PIL image to tensor
|
|
|
|
| 479 |
)
|
| 480 |
|
| 481 |
# Set up event handlers
|
| 482 |
+
@spaces.GPU(duration=20) # Reduced duration to 20 seconds
|
| 483 |
def generate(prompt, guidance_scale, num_steps):
|
| 484 |
try:
|
| 485 |
torch.cuda.empty_cache() # Clear GPU memory before starting
|
|
|
|
| 492 |
print(error_msg)
|
| 493 |
return None, error_msg
|
| 494 |
|
| 495 |
+
@spaces.GPU(duration=20) # Reduced duration to 20 seconds
|
| 496 |
def refine(input_image, prompt, steps, guidance_scale):
|
| 497 |
try:
|
| 498 |
torch.cuda.empty_cache() # Clear GPU memory before starting
|