Spaces:
Running
on
Zero
Running
on
Zero
Commit
·
134ecc0
1
Parent(s):
5f515e7
change examples, limit video length
Browse files
app.py
CHANGED
|
@@ -264,6 +264,31 @@ def init_video_session(GLOBAL_STATE: gr.State, video: str | dict) -> tuple[AppSt
|
|
| 264 |
if len(frames) == 0:
|
| 265 |
raise gr.Error("No frames could be loaded from the video.")
|
| 266 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 267 |
GLOBAL_STATE.video_frames = frames
|
| 268 |
# Try to capture original FPS if provided by loader
|
| 269 |
GLOBAL_STATE.video_fps = None
|
|
@@ -285,7 +310,8 @@ def init_video_session(GLOBAL_STATE: gr.State, video: str | dict) -> tuple[AppSt
|
|
| 285 |
first_frame = frames[0]
|
| 286 |
max_idx = len(frames) - 1
|
| 287 |
status = (
|
| 288 |
-
f"Loaded {len(frames)} frames @ {GLOBAL_STATE.video_fps or 'unknown'} fps
|
|
|
|
| 289 |
)
|
| 290 |
return GLOBAL_STATE, 0, max_idx, first_frame, status
|
| 291 |
|
|
@@ -650,10 +676,9 @@ with gr.Blocks(title="SAM2 Video (Transformers) - Interactive Segmentation", the
|
|
| 650 |
# (moved) Examples are defined above the render button
|
| 651 |
# Each example row must match the number of inputs (GLOBAL_STATE, video_in)
|
| 652 |
examples_list = [
|
| 653 |
-
[None, "./
|
| 654 |
-
[None, "./
|
| 655 |
-
[None, "./
|
| 656 |
-
[None, "./hurdles.mp4"],
|
| 657 |
]
|
| 658 |
with gr.Row():
|
| 659 |
gr.Examples(
|
|
|
|
| 264 |
if len(frames) == 0:
|
| 265 |
raise gr.Error("No frames could be loaded from the video.")
|
| 266 |
|
| 267 |
+
# Enforce max duration of 8 seconds (trim if longer)
|
| 268 |
+
MAX_SECONDS = 8.0
|
| 269 |
+
trimmed_note = ""
|
| 270 |
+
fps_in = None
|
| 271 |
+
if isinstance(info, dict) and info.get("fps"):
|
| 272 |
+
try:
|
| 273 |
+
fps_in = float(info["fps"]) or None
|
| 274 |
+
except Exception:
|
| 275 |
+
fps_in = None
|
| 276 |
+
if fps_in is not None:
|
| 277 |
+
max_frames_allowed = int(MAX_SECONDS * fps_in)
|
| 278 |
+
if len(frames) > max_frames_allowed:
|
| 279 |
+
frames = frames[:max_frames_allowed]
|
| 280 |
+
trimmed_note = f" (trimmed to {int(MAX_SECONDS)}s = {len(frames)} frames)"
|
| 281 |
+
if isinstance(info, dict):
|
| 282 |
+
info["num_frames"] = len(frames)
|
| 283 |
+
else:
|
| 284 |
+
# Fallback when FPS unknown: assume ~30 FPS and cap to 240 frames (~8s)
|
| 285 |
+
max_frames_allowed = 240
|
| 286 |
+
if len(frames) > max_frames_allowed:
|
| 287 |
+
frames = frames[:max_frames_allowed]
|
| 288 |
+
trimmed_note = " (trimmed to 240 frames ~8s @30fps)"
|
| 289 |
+
if isinstance(info, dict):
|
| 290 |
+
info["num_frames"] = len(frames)
|
| 291 |
+
|
| 292 |
GLOBAL_STATE.video_frames = frames
|
| 293 |
# Try to capture original FPS if provided by loader
|
| 294 |
GLOBAL_STATE.video_fps = None
|
|
|
|
| 310 |
first_frame = frames[0]
|
| 311 |
max_idx = len(frames) - 1
|
| 312 |
status = (
|
| 313 |
+
f"Loaded {len(frames)} frames @ {GLOBAL_STATE.video_fps or 'unknown'} fps{trimmed_note}. "
|
| 314 |
+
f"Device: {device}, dtype: bfloat16"
|
| 315 |
)
|
| 316 |
return GLOBAL_STATE, 0, max_idx, first_frame, status
|
| 317 |
|
|
|
|
| 676 |
# (moved) Examples are defined above the render button
|
| 677 |
# Each example row must match the number of inputs (GLOBAL_STATE, video_in)
|
| 678 |
examples_list = [
|
| 679 |
+
[None, "./deers.mp4"],
|
| 680 |
+
[None, "./penguins.mp4"],
|
| 681 |
+
[None, "./foot.mp4"],
|
|
|
|
| 682 |
]
|
| 683 |
with gr.Row():
|
| 684 |
gr.Examples(
|