Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
|
@@ -28,16 +28,19 @@ pipe.scheduler = EulerDiscreteScheduler.from_config(pipe.scheduler.config, times
|
|
| 28 |
|
| 29 |
# Inference function.
|
| 30 |
@spaces.GPU(enable_queue=True)
|
| 31 |
-
def generate_image(prompt, option):
|
| 32 |
global step_loaded
|
| 33 |
print(prompt, option)
|
| 34 |
ckpt, step = opts[option]
|
|
|
|
| 35 |
if step != step_loaded:
|
| 36 |
print(f"Switching checkpoint from {step_loaded} to {step}")
|
| 37 |
pipe.scheduler = EulerDiscreteScheduler.from_config(pipe.scheduler.config, timestep_spacing="trailing", prediction_type="sample" if step == 1 else "epsilon")
|
| 38 |
pipe.unet.load_state_dict(load_file(hf_hub_download(repo, ckpt), device=device))
|
| 39 |
step_loaded = step
|
| 40 |
-
|
|
|
|
|
|
|
| 41 |
|
| 42 |
with gr.Blocks(css="style.css") as demo:
|
| 43 |
gr.HTML(
|
|
|
|
| 28 |
|
| 29 |
# Inference function.
|
| 30 |
@spaces.GPU(enable_queue=True)
|
| 31 |
+
def generate_image(prompt, option, progress=gr.Progress()):
|
| 32 |
global step_loaded
|
| 33 |
print(prompt, option)
|
| 34 |
ckpt, step = opts[option]
|
| 35 |
+
progress(0, total=step)
|
| 36 |
if step != step_loaded:
|
| 37 |
print(f"Switching checkpoint from {step_loaded} to {step}")
|
| 38 |
pipe.scheduler = EulerDiscreteScheduler.from_config(pipe.scheduler.config, timestep_spacing="trailing", prediction_type="sample" if step == 1 else "epsilon")
|
| 39 |
pipe.unet.load_state_dict(load_file(hf_hub_download(repo, ckpt), device=device))
|
| 40 |
step_loaded = step
|
| 41 |
+
def inference_callback(p, i, t, kwargs):
|
| 42 |
+
progress(i+1, total=step)
|
| 43 |
+
return pipe(prompt, num_inference_steps=step, guidance_scale=0, callback_on_step_end=inference_callback).images[0]
|
| 44 |
|
| 45 |
with gr.Blocks(css="style.css") as demo:
|
| 46 |
gr.HTML(
|