Spaces:
Running
on
Zero
Running
on
Zero
shuanholmes
commited on
Commit
·
8ffebce
1
Parent(s):
6b7562f
[FireFlow] Bug Quick Fix
Browse files
app.py
CHANGED
|
@@ -59,7 +59,7 @@ add_sampling_metadata = True
|
|
| 59 |
|
| 60 |
@spaces.GPU(duration=120)
|
| 61 |
@torch.inference_mode()
|
| 62 |
-
def edit(init_image, source_prompt, target_prompt, editing_strategy, num_steps, inject_step, guidance
|
| 63 |
global ae, t5, clip, model, name, is_schnell, output_dir, add_sampling_metadata, offload
|
| 64 |
device = "cuda" if torch.cuda.is_available() else "cpu"
|
| 65 |
torch.cuda.empty_cache()
|
|
@@ -93,7 +93,7 @@ def edit(init_image, source_prompt, target_prompt, editing_strategy, num_steps,
|
|
| 93 |
height=height,
|
| 94 |
num_steps=num_steps,
|
| 95 |
guidance=guidance,
|
| 96 |
-
seed=
|
| 97 |
)
|
| 98 |
if opts.seed is None:
|
| 99 |
opts.seed = torch.Generator(device="cpu").seed()
|
|
|
|
| 59 |
|
| 60 |
@spaces.GPU(duration=120)
|
| 61 |
@torch.inference_mode()
|
| 62 |
+
def edit(init_image, source_prompt, target_prompt, editing_strategy, num_steps, inject_step, guidance):
|
| 63 |
global ae, t5, clip, model, name, is_schnell, output_dir, add_sampling_metadata, offload
|
| 64 |
device = "cuda" if torch.cuda.is_available() else "cpu"
|
| 65 |
torch.cuda.empty_cache()
|
|
|
|
| 93 |
height=height,
|
| 94 |
num_steps=num_steps,
|
| 95 |
guidance=guidance,
|
| 96 |
+
seed=None,
|
| 97 |
)
|
| 98 |
if opts.seed is None:
|
| 99 |
opts.seed = torch.Generator(device="cpu").seed()
|