Spaces:
				
			
			
	
			
			
		Runtime error
		
	
	
	
			
			
	
	
	
	
		
		
		Runtime error
		
	Upload 4 files
Browse files- README.md +1 -1
 - app.py +1 -1
 - multit2i.py +1 -1
 - requirements.txt +2 -1
 
    	
        README.md
    CHANGED
    
    | 
         @@ -4,7 +4,7 @@ emoji: ππ 
     | 
|
| 4 | 
         
             
            colorFrom: blue
         
     | 
| 5 | 
         
             
            colorTo: purple
         
     | 
| 6 | 
         
             
            sdk: gradio
         
     | 
| 7 | 
         
            -
            sdk_version: 5. 
     | 
| 8 | 
         
             
            app_file: app.py
         
     | 
| 9 | 
         
             
            short_description: Text-to-Image
         
     | 
| 10 | 
         
             
            license: mit
         
     | 
| 
         | 
|
| 4 | 
         
             
            colorFrom: blue
         
     | 
| 5 | 
         
             
            colorTo: purple
         
     | 
| 6 | 
         
             
            sdk: gradio
         
     | 
| 7 | 
         
            +
            sdk_version: 5.9.1
         
     | 
| 8 | 
         
             
            app_file: app.py
         
     | 
| 9 | 
         
             
            short_description: Text-to-Image
         
     | 
| 10 | 
         
             
            license: mit
         
     | 
    	
        app.py
    CHANGED
    
    | 
         @@ -182,4 +182,4 @@ with gr.Blocks(theme="NoCrypt/miku@>=1.2.2", fill_width=True, css=css) as demo: 
     | 
|
| 182 | 
         
             
                ).success(compose_prompt_to_copy, [v2_character, v2_series, prompt], [prompt], queue=False, show_api=False)
         
     | 
| 183 | 
         | 
| 184 | 
         
             
            #demo.queue(default_concurrency_limit=200, max_size=200)
         
     | 
| 185 | 
         
            -
            demo.launch(max_threads=400)
         
     | 
| 
         | 
|
| 182 | 
         
             
                ).success(compose_prompt_to_copy, [v2_character, v2_series, prompt], [prompt], queue=False, show_api=False)
         
     | 
| 183 | 
         | 
| 184 | 
         
             
            #demo.queue(default_concurrency_limit=200, max_size=200)
         
     | 
| 185 | 
         
            +
            demo.launch(max_threads=400, ssr_mode=False)
         
     | 
    	
        multit2i.py
    CHANGED
    
    | 
         @@ -69,7 +69,7 @@ def find_model_list(author: str="", tags: list[str]=[], not_tag="", sort: str="l 
     | 
|
| 69 | 
         
             
                for model in model_infos:
         
     | 
| 70 | 
         
             
                    if not model.private and not model.gated or HF_TOKEN is not None:
         
     | 
| 71 | 
         
             
                       loadable = is_loadable(model.id, force_gpu) if check_status else True
         
     | 
| 72 | 
         
            -
                       if not_tag and not_tag in model.tags or not loadable: continue
         
     | 
| 73 | 
         
             
                       models.append(model.id)
         
     | 
| 74 | 
         
             
                       if len(models) == limit: break
         
     | 
| 75 | 
         
             
                return models
         
     | 
| 
         | 
|
| 69 | 
         
             
                for model in model_infos:
         
     | 
| 70 | 
         
             
                    if not model.private and not model.gated or HF_TOKEN is not None:
         
     | 
| 71 | 
         
             
                       loadable = is_loadable(model.id, force_gpu) if check_status else True
         
     | 
| 72 | 
         
            +
                       if not_tag and not_tag in model.tags or not loadable or "not-for-all-audiences" in model.tags: continue
         
     | 
| 73 | 
         
             
                       models.append(model.id)
         
     | 
| 74 | 
         
             
                       if len(models) == limit: break
         
     | 
| 75 | 
         
             
                return models
         
     | 
    	
        requirements.txt
    CHANGED
    
    | 
         @@ -6,4 +6,5 @@ transformers==4.44.0 
     | 
|
| 6 | 
         
             
            optimum[onnxruntime]
         
     | 
| 7 | 
         
             
            dartrs
         
     | 
| 8 | 
         
             
            translatepy
         
     | 
| 9 | 
         
            -
            timm
         
     | 
| 
         | 
| 
         | 
|
| 6 | 
         
             
            optimum[onnxruntime]
         
     | 
| 7 | 
         
             
            dartrs
         
     | 
| 8 | 
         
             
            translatepy
         
     | 
| 9 | 
         
            +
            timm
         
     | 
| 10 | 
         
            +
            numpy<2
         
     |