HairStable Bot
commited on
Commit
·
82b3d02
1
Parent(s):
786fe18
fix(cpu): force DDIM in get_bald; add /health/gpu diagnostics
Browse files
Hair_stable_new_fresh/infer_full.py
CHANGED
|
@@ -170,6 +170,12 @@ class StableHair:
|
|
| 170 |
def get_bald(self, id_image, scale):
|
| 171 |
H, W = id_image.size
|
| 172 |
scale = float(scale)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 173 |
image = self.remove_hair_pipeline(
|
| 174 |
prompt="",
|
| 175 |
negative_prompt="",
|
|
|
|
| 170 |
def get_bald(self, id_image, scale):
|
| 171 |
H, W = id_image.size
|
| 172 |
scale = float(scale)
|
| 173 |
+
# Enforce a CPU/GPU-safe scheduler every call in case upstream resets it
|
| 174 |
+
try:
|
| 175 |
+
from diffusers import DDIMScheduler as _DDIM
|
| 176 |
+
self.remove_hair_pipeline.scheduler = _DDIM.from_config(self.remove_hair_pipeline.scheduler.config)
|
| 177 |
+
except Exception:
|
| 178 |
+
pass
|
| 179 |
image = self.remove_hair_pipeline(
|
| 180 |
prompt="",
|
| 181 |
negative_prompt="",
|
Hair_stable_new_fresh/server.py
CHANGED
|
@@ -56,6 +56,21 @@ def root():
|
|
| 56 |
return {"status": "ok"}
|
| 57 |
|
| 58 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 59 |
class HairSwapRequest(BaseModel):
|
| 60 |
source_id: str
|
| 61 |
reference_id: str
|
|
@@ -169,11 +184,11 @@ def get_hairswap(req: HairSwapRequest, _=Depends(verify_bearer)):
|
|
| 169 |
source_image=source_path,
|
| 170 |
reference_image=reference_path,
|
| 171 |
random_seed=-1,
|
| 172 |
-
step=
|
| 173 |
guidance_scale=req.guidance_scale,
|
| 174 |
scale=req.scale,
|
| 175 |
controlnet_conditioning_scale=req.controlnet_conditioning_scale,
|
| 176 |
-
size=
|
| 177 |
)
|
| 178 |
LOGGER.info("Hair transfer completed successfully")
|
| 179 |
except Exception as e:
|
|
|
|
| 56 |
return {"status": "ok"}
|
| 57 |
|
| 58 |
|
| 59 |
+
@app.get("/health/gpu")
|
| 60 |
+
def health_gpu():
|
| 61 |
+
try:
|
| 62 |
+
import torch as _t
|
| 63 |
+
return {
|
| 64 |
+
"cuda_available": bool(_t.cuda.is_available()),
|
| 65 |
+
"cuda_device_count": int(_t.cuda.device_count()) if _t.cuda.is_available() else 0,
|
| 66 |
+
"cuda_device_name": _t.cuda.get_device_name(0) if _t.cuda.is_available() else None,
|
| 67 |
+
"torch_version": getattr(_t, "__version__", None),
|
| 68 |
+
"torch_cuda_version": getattr(_t.version, "cuda", None),
|
| 69 |
+
}
|
| 70 |
+
except Exception as e:
|
| 71 |
+
return {"error": str(e)}
|
| 72 |
+
|
| 73 |
+
|
| 74 |
class HairSwapRequest(BaseModel):
|
| 75 |
source_id: str
|
| 76 |
reference_id: str
|
|
|
|
| 184 |
source_image=source_path,
|
| 185 |
reference_image=reference_path,
|
| 186 |
random_seed=-1,
|
| 187 |
+
step=20,
|
| 188 |
guidance_scale=req.guidance_scale,
|
| 189 |
scale=req.scale,
|
| 190 |
controlnet_conditioning_scale=req.controlnet_conditioning_scale,
|
| 191 |
+
size=448,
|
| 192 |
)
|
| 193 |
LOGGER.info("Hair transfer completed successfully")
|
| 194 |
except Exception as e:
|