Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
|
@@ -242,15 +242,6 @@ MAX_SEED = 1e7
|
|
| 242 |
title = "## AI 3D Model Generator"
|
| 243 |
description = "Our Image-to-3D Generator transforms your 2D photos into stunning, AI generated 3D models—ready for games, AR/VR, or 3D printing. Our AI 3D Modeling is based on Hunyuan 2.0. Check more in [imgto3d.ai](https://www.imgto3d.ai)."
|
| 244 |
|
| 245 |
-
# 顶层哨兵函数,HF 扫描用
|
| 246 |
-
@spaces.GPU(duration=1)
|
| 247 |
-
def _gpu_check():
|
| 248 |
-
import torch
|
| 249 |
-
if torch.cuda.is_available():
|
| 250 |
-
return f"GPU ready: {torch.cuda.get_device_name(0)}"
|
| 251 |
-
else:
|
| 252 |
-
return "No GPU available"
|
| 253 |
-
|
| 254 |
with gr.Blocks().queue() as demo:
|
| 255 |
gr.Markdown(title)
|
| 256 |
gr.Markdown(description)
|
|
@@ -297,18 +288,19 @@ with gr.Blocks().queue() as demo:
|
|
| 297 |
outputs=[html_export_mesh,file_export, glbPath_output, objPath_output]
|
| 298 |
)
|
| 299 |
|
| 300 |
-
|
| 301 |
-
#
|
| 302 |
-
|
| 303 |
-
|
| 304 |
-
|
| 305 |
-
static_dir
|
| 306 |
-
|
| 307 |
-
|
| 308 |
-
|
| 309 |
-
|
| 310 |
-
|
| 311 |
-
|
| 312 |
-
|
| 313 |
-
|
| 314 |
-
|
|
|
|
|
|
| 242 |
title = "## AI 3D Model Generator"
|
| 243 |
description = "Our Image-to-3D Generator transforms your 2D photos into stunning, AI generated 3D models—ready for games, AR/VR, or 3D printing. Our AI 3D Modeling is based on Hunyuan 2.0. Check more in [imgto3d.ai](https://www.imgto3d.ai)."
|
| 244 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 245 |
with gr.Blocks().queue() as demo:
|
| 246 |
gr.Markdown(title)
|
| 247 |
gr.Markdown(description)
|
|
|
|
| 288 |
outputs=[html_export_mesh,file_export, glbPath_output, objPath_output]
|
| 289 |
)
|
| 290 |
|
| 291 |
+
if __name__ == "__main__":
|
| 292 |
+
# https://discuss.huggingface.co/t/how-to-serve-an-html-file/33921/2
|
| 293 |
+
# create a FastAPI app
|
| 294 |
+
app = FastAPI()
|
| 295 |
+
# create a static directory to store the static files
|
| 296 |
+
static_dir = Path(SAVE_DIR).absolute()
|
| 297 |
+
static_dir.mkdir(parents=True, exist_ok=True)
|
| 298 |
+
app.mount("/static", StaticFiles(directory=static_dir, html=True), name="static")
|
| 299 |
+
shutil.copytree('./assets/env_maps', os.path.join(static_dir, 'env_maps'), dirs_exist_ok=True)
|
| 300 |
+
|
| 301 |
+
if args.low_vram_mode:
|
| 302 |
+
torch.cuda.empty_cache()
|
| 303 |
+
|
| 304 |
+
app = gr.mount_gradio_app(app, demo, path="/")
|
| 305 |
+
# demo.launch()
|
| 306 |
+
uvicorn.run(app, host=args.host, port=args.port)
|