Spaces:
				
			
			
	
			
			
		Runtime error
		
	
	
	
			
			
	
	
	
	
		
		
		Runtime error
		
	Commit 
							
							Β·
						
						4b20d73
	
1
								Parent(s):
							
							5b71c3a
								
init
Browse files- app.py +4 -5
- models_server.py +8 -11
    	
        app.py
    CHANGED
    
    | @@ -98,28 +98,27 @@ def bot(messages): | |
| 98 | 
             
            with gr.Blocks() as demo:
         | 
| 99 | 
             
                gr.Markdown("<h1><center>HuggingGPT</center></h1>")
         | 
| 100 | 
             
                gr.Markdown("<p align='center'><img src='https://i.ibb.co/qNH3Jym/logo.png' height='25' width='95'></p>")
         | 
| 101 | 
            -
             | 
| 102 | 
             
                gr.Markdown("<p align='center' style='font-size: 20px;'>A system to connect LLMs with ML community. See our <a href='https://github.com/microsoft/JARVIS'>Project</a> and <a href='http://arxiv.org/abs/2303.17580'>Paper</a>.</p>")
         | 
| 103 | 
            -
                with gr.Row().style( | 
| 104 | 
             
                    with gr.Column(scale=0.85):
         | 
| 105 | 
             
                        openai_api_key = gr.Textbox(
         | 
| 106 | 
             
                            show_label=False,
         | 
| 107 | 
             
                            placeholder="Set your OpenAI API key here and press Enter",
         | 
| 108 | 
             
                            lines=1,
         | 
| 109 | 
             
                            type="password",
         | 
| 110 | 
            -
                        )
         | 
| 111 | 
             
                    with gr.Column(scale=0.15, min_width=0):
         | 
| 112 | 
             
                        btn1 = gr.Button("Submit").style(full_height=True)
         | 
| 113 |  | 
| 114 | 
             
                chatbot = gr.Chatbot([], elem_id="chatbot").style(height=500)
         | 
| 115 |  | 
| 116 | 
            -
                with gr.Row().style( | 
| 117 | 
             
                    with gr.Column(scale=0.85):
         | 
| 118 | 
             
                        txt = gr.Textbox(
         | 
| 119 | 
             
                            show_label=False,
         | 
| 120 | 
             
                            placeholder="Enter text and press enter. The url of the multimedia resource must contain the extension name.",
         | 
| 121 | 
             
                            lines=1,
         | 
| 122 | 
            -
                        )
         | 
| 123 | 
             
                    with gr.Column(scale=0.15, min_width=0):
         | 
| 124 | 
             
                        btn2 = gr.Button("Send").style(full_height=True)
         | 
| 125 |  | 
|  | |
| 98 | 
             
            with gr.Blocks() as demo:
         | 
| 99 | 
             
                gr.Markdown("<h1><center>HuggingGPT</center></h1>")
         | 
| 100 | 
             
                gr.Markdown("<p align='center'><img src='https://i.ibb.co/qNH3Jym/logo.png' height='25' width='95'></p>")
         | 
|  | |
| 101 | 
             
                gr.Markdown("<p align='center' style='font-size: 20px;'>A system to connect LLMs with ML community. See our <a href='https://github.com/microsoft/JARVIS'>Project</a> and <a href='http://arxiv.org/abs/2303.17580'>Paper</a>.</p>")
         | 
| 102 | 
            +
                with gr.Row().style():
         | 
| 103 | 
             
                    with gr.Column(scale=0.85):
         | 
| 104 | 
             
                        openai_api_key = gr.Textbox(
         | 
| 105 | 
             
                            show_label=False,
         | 
| 106 | 
             
                            placeholder="Set your OpenAI API key here and press Enter",
         | 
| 107 | 
             
                            lines=1,
         | 
| 108 | 
             
                            type="password",
         | 
| 109 | 
            +
                        ).style(container=False)
         | 
| 110 | 
             
                    with gr.Column(scale=0.15, min_width=0):
         | 
| 111 | 
             
                        btn1 = gr.Button("Submit").style(full_height=True)
         | 
| 112 |  | 
| 113 | 
             
                chatbot = gr.Chatbot([], elem_id="chatbot").style(height=500)
         | 
| 114 |  | 
| 115 | 
            +
                with gr.Row().style():
         | 
| 116 | 
             
                    with gr.Column(scale=0.85):
         | 
| 117 | 
             
                        txt = gr.Textbox(
         | 
| 118 | 
             
                            show_label=False,
         | 
| 119 | 
             
                            placeholder="Enter text and press enter. The url of the multimedia resource must contain the extension name.",
         | 
| 120 | 
             
                            lines=1,
         | 
| 121 | 
            +
                        ).style(container=False)
         | 
| 122 | 
             
                    with gr.Column(scale=0.15, min_width=0):
         | 
| 123 | 
             
                        btn2 = gr.Button("Send").style(full_height=True)
         | 
| 124 |  | 
    	
        models_server.py
    CHANGED
    
    | @@ -67,8 +67,10 @@ if config["proxy"]: | |
| 67 |  | 
| 68 | 
             
            start = time.time()
         | 
| 69 |  | 
|  | |
| 70 | 
             
            local_models = ""
         | 
| 71 |  | 
|  | |
| 72 | 
             
            def load_pipes(local_deployment):
         | 
| 73 | 
             
                other_pipes = {}
         | 
| 74 | 
             
                standard_pipes = {}
         | 
| @@ -266,29 +268,24 @@ def load_pipes(local_deployment): | |
| 266 | 
             
                        f"{local_models}runwayml/stable-diffusion-v1-5", controlnet=controlnet, torch_dtype=torch.float16
         | 
| 267 | 
             
                    )
         | 
| 268 |  | 
| 269 | 
            -
                    def mlsd_control_network():
         | 
| 270 | 
            -
                        model = MobileV2_MLSD_Large()
         | 
| 271 | 
            -
                        model.load_state_dict(torch.load(f"{local_models}lllyasviel/ControlNet/annotator/ckpts/mlsd_large_512_fp32.pth"), strict=True)
         | 
| 272 | 
            -
                        return MLSDdetector(model)
         | 
| 273 | 
            -
             | 
| 274 |  | 
| 275 | 
            -
                    hed_network =  | 
| 276 |  | 
| 277 | 
             
                    controlnet_sd_pipes = {
         | 
| 278 | 
             
                        "openpose-control": {
         | 
| 279 | 
            -
                            "model": OpenposeDetector( | 
| 280 | 
             
                        },
         | 
| 281 | 
             
                        "mlsd-control": {
         | 
| 282 | 
            -
                            "model":  | 
| 283 | 
             
                        },
         | 
| 284 | 
             
                        "hed-control": {
         | 
| 285 | 
            -
                            "model":  | 
| 286 | 
             
                        },
         | 
| 287 | 
             
                        "scribble-control": {
         | 
| 288 | 
            -
                            "model":  | 
| 289 | 
             
                        },
         | 
| 290 | 
             
                        "midas-control": {
         | 
| 291 | 
            -
                            "model": MidasDetector( | 
| 292 | 
             
                        },
         | 
| 293 | 
             
                        "canny-control": {
         | 
| 294 | 
             
                            "model": CannyDetector()
         | 
|  | |
| 67 |  | 
| 68 | 
             
            start = time.time()
         | 
| 69 |  | 
| 70 | 
            +
            # local_models = "models/"
         | 
| 71 | 
             
            local_models = ""
         | 
| 72 |  | 
| 73 | 
            +
             | 
| 74 | 
             
            def load_pipes(local_deployment):
         | 
| 75 | 
             
                other_pipes = {}
         | 
| 76 | 
             
                standard_pipes = {}
         | 
|  | |
| 268 | 
             
                        f"{local_models}runwayml/stable-diffusion-v1-5", controlnet=controlnet, torch_dtype=torch.float16
         | 
| 269 | 
             
                    )
         | 
| 270 |  | 
|  | |
|  | |
|  | |
|  | |
|  | |
| 271 |  | 
| 272 | 
            +
                    hed_network = HEDdetector.from_pretrained('lllyasviel/ControlNet')
         | 
| 273 |  | 
| 274 | 
             
                    controlnet_sd_pipes = {
         | 
| 275 | 
             
                        "openpose-control": {
         | 
| 276 | 
            +
                            "model": OpenposeDetector.from_pretrained('lllyasviel/ControlNet')
         | 
| 277 | 
             
                        },
         | 
| 278 | 
             
                        "mlsd-control": {
         | 
| 279 | 
            +
                            "model": MLSDdetector.from_pretrained('lllyasviel/ControlNet')
         | 
| 280 | 
             
                        },
         | 
| 281 | 
             
                        "hed-control": {
         | 
| 282 | 
            +
                            "model": hed_network
         | 
| 283 | 
             
                        },
         | 
| 284 | 
             
                        "scribble-control": {
         | 
| 285 | 
            +
                            "model": hed_network
         | 
| 286 | 
             
                        },
         | 
| 287 | 
             
                        "midas-control": {
         | 
| 288 | 
            +
                            "model": MidasDetector.from_pretrained('lllyasviel/ControlNet')
         | 
| 289 | 
             
                        },
         | 
| 290 | 
             
                        "canny-control": {
         | 
| 291 | 
             
                            "model": CannyDetector()
         | 
