Spaces:
Runtime error
Runtime error
| from diffusers import StableDiffusionControlNetPipeline, ControlNetModel | |
| from diffusers import UniPCMultistepScheduler | |
| from diffusers.utils import load_image | |
| import gradio as gr | |
| import torch | |
| # Constants | |
| low_threshold = 100 | |
| high_threshold = 200 | |
| # Models | |
| controlnet = ControlNetModel.from_pretrained("lllyasviel/sd-controlnet-canny", torch_dtype=torch.float16) | |
| pipe = StableDiffusionControlNetPipeline.from_pretrained( | |
| "runwayml/stable-diffusion-v1-5", controlnet=controlnet, torch_dtype=torch.float16 | |
| ) | |
| pipe.scheduler = UniPCMultistepScheduler.from_config(pipe.scheduler.config) | |
| # This command loads the individual model components on GPU on-demand. So, we don't | |
| # need to explicitly call pipe.to("cuda"). | |
| pipe.enable_model_cpu_offload() | |
| # Generator seed, | |
| generator = torch.manual_seed(0) | |
| def get_canny_filter(image): | |
| if not isinstance(image, np.ndarray): | |
| image = np.array(image) | |
| image = cv2.Canny(image, low_threshold, high_threshold) | |
| image = image[:, :, None] | |
| image = np.concatenate([image, image, image], axis=2) | |
| canny_image = Image.fromarray(image) | |
| return canny_image | |
| def generate_images(image, prompt): | |
| canny_image = get_canny_filter(image) | |
| output = pipe( | |
| prompt, | |
| canny_image, | |
| generator=generator, | |
| num_images_per_prompt=3 | |
| ) | |
| return output.images | |
| gr.Interface( | |
| generate_images, | |
| inputs=[ | |
| gr.Image(type="pil"), | |
| gr.Textbox( | |
| label="Enter your prompt", | |
| max_lines=1, | |
| placeholder="Sandra Oh, best quality, extremely detailed", | |
| ), | |
| ], | |
| outputs=gr.Gallery().style(grid=[2], height="auto"), | |
| title="Generate controlled outputs with ControlNet and Stable Diffusion. ", | |
| description="This Space uses Canny edge maps as the additional conditioning.", | |
| examples=[["input_image_vermeer.png", "Sandra Oh, best quality, extremely detailed"]], | |
| allow_flagging=False, | |
| ).launch(enable_queue=True) | |