Spaces:
Running
on
Zero
Running
on
Zero
xiaoyuxi
commited on
Commit
·
b6d15ed
1
Parent(s):
488d6c3
backend
Browse files
app.py
CHANGED
|
@@ -386,36 +386,22 @@ def update_tracker_model(model_name):
|
|
| 386 |
def get_video_settings(video_name):
|
| 387 |
"""Get video-specific settings based on video name"""
|
| 388 |
video_settings = {
|
| 389 |
-
"
|
| 390 |
-
"
|
| 391 |
-
"
|
| 392 |
-
"
|
| 393 |
-
"
|
| 394 |
-
"
|
| 395 |
-
"
|
| 396 |
-
"
|
| 397 |
-
"
|
| 398 |
-
"
|
| 399 |
-
"
|
| 400 |
-
"
|
| 401 |
-
"
|
| 402 |
-
"
|
| 403 |
-
"
|
| 404 |
-
"
|
| 405 |
-
"india": (50, 756, 3),
|
| 406 |
-
"judo": (50, 756, 3),
|
| 407 |
-
"kite-surf": (50, 756, 3),
|
| 408 |
-
"lab-coat": (50, 756, 3),
|
| 409 |
-
"libby": (50, 756, 3),
|
| 410 |
-
"loading": (50, 756, 3),
|
| 411 |
-
"mbike-trick": (50, 756, 3),
|
| 412 |
-
"motocross-jump": (50, 756, 3),
|
| 413 |
-
"paragliding-launch": (50, 756, 3),
|
| 414 |
-
"parkour": (50, 756, 3),
|
| 415 |
-
"pigs": (50, 756, 3),
|
| 416 |
-
"scooter-black": (50, 756, 3),
|
| 417 |
-
"shooting": (50, 756, 3),
|
| 418 |
-
"soapbox": (50, 756, 3)
|
| 419 |
}
|
| 420 |
|
| 421 |
return video_settings.get(video_name, (50, 756, 3))
|
|
@@ -508,7 +494,7 @@ with gr.Blocks(
|
|
| 508 |
Welcome to SpatialTracker V2! This interface allows you to track objects in videos using advanced computer vision techniques.
|
| 509 |
|
| 510 |
**Instructions:**
|
| 511 |
-
1. Upload a video file
|
| 512 |
2. Click on the object you want to track in the first frame
|
| 513 |
3. Adjust tracking parameters if needed
|
| 514 |
4. Click "Launch Visualization" to start tracking
|
|
@@ -519,16 +505,42 @@ with gr.Blocks(
|
|
| 519 |
status_text = "🟢 Backend Connected" if BACKEND_AVAILABLE else "🟡 Running in Standalone Mode"
|
| 520 |
gr.Markdown(f"**Status:** {status_text}")
|
| 521 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 522 |
with gr.Row():
|
| 523 |
with gr.Column(scale=1):
|
| 524 |
-
# Video upload section
|
| 525 |
-
with gr.Group():
|
| 526 |
-
gr.Markdown("### 📹 Video Upload")
|
| 527 |
-
video_input = gr.Video(
|
| 528 |
-
label="Upload Video",
|
| 529 |
-
format="mp4"
|
| 530 |
-
)
|
| 531 |
-
|
| 532 |
# Interactive frame display
|
| 533 |
with gr.Group():
|
| 534 |
gr.Markdown("### 🎯 Point Selection")
|
|
@@ -569,8 +581,9 @@ with gr.Blocks(
|
|
| 569 |
value="<p>Upload a video and select points to see 3D visualization here.</p>"
|
| 570 |
)
|
| 571 |
|
| 572 |
-
# Advanced settings section
|
| 573 |
-
with gr.Accordion("⚙️ Advanced Settings", open=
|
|
|
|
| 574 |
with gr.Row():
|
| 575 |
grid_size = gr.Slider(
|
| 576 |
minimum=10,
|
|
@@ -578,7 +591,7 @@ with gr.Blocks(
|
|
| 578 |
step=10,
|
| 579 |
value=50,
|
| 580 |
label="Grid Size",
|
| 581 |
-
info="Size of the tracking grid"
|
| 582 |
)
|
| 583 |
vo_points = gr.Slider(
|
| 584 |
minimum=100,
|
|
@@ -586,7 +599,7 @@ with gr.Blocks(
|
|
| 586 |
step=50,
|
| 587 |
value=756,
|
| 588 |
label="VO Points",
|
| 589 |
-
info="Number of visual odometry points"
|
| 590 |
)
|
| 591 |
fps = gr.Slider(
|
| 592 |
minimum=1,
|
|
@@ -594,27 +607,13 @@ with gr.Blocks(
|
|
| 594 |
step=1,
|
| 595 |
value=3,
|
| 596 |
label="FPS",
|
| 597 |
-
info="Frames per second for processing"
|
| 598 |
)
|
| 599 |
|
| 600 |
# Launch button
|
| 601 |
with gr.Row():
|
| 602 |
launch_btn = gr.Button("🚀 Launch Visualization", variant="primary", size="lg")
|
| 603 |
|
| 604 |
-
# Example videos section
|
| 605 |
-
with gr.Accordion("📂 Example Videos", open=False):
|
| 606 |
-
gr.Examples(
|
| 607 |
-
examples=[
|
| 608 |
-
["examples/blackswan.mp4"],
|
| 609 |
-
["examples/bike-packing.mp4"],
|
| 610 |
-
["examples/bmx-trees.mp4"],
|
| 611 |
-
["examples/breakdance.mp4"],
|
| 612 |
-
["examples/camel.mp4"],
|
| 613 |
-
],
|
| 614 |
-
inputs=video_input,
|
| 615 |
-
label="Try these example videos"
|
| 616 |
-
)
|
| 617 |
-
|
| 618 |
# Hidden state variables
|
| 619 |
original_image_state = gr.State(None)
|
| 620 |
selected_points = gr.State([])
|
|
|
|
| 386 |
def get_video_settings(video_name):
|
| 387 |
"""Get video-specific settings based on video name"""
|
| 388 |
video_settings = {
|
| 389 |
+
"kiss": (45, 700, 10),
|
| 390 |
+
"backpack": (40, 600, 2),
|
| 391 |
+
"kitchen": (60, 800, 3),
|
| 392 |
+
"pillow": (35, 500, 2),
|
| 393 |
+
"hockey": (45, 700, 2),
|
| 394 |
+
"drifting": (35, 1000, 6),
|
| 395 |
+
"ball": (45, 256, 6),
|
| 396 |
+
"ken_block_0": (45, 700, 2),
|
| 397 |
+
"ego_kc1": (45, 500, 4),
|
| 398 |
+
"vertical_place": (45, 500, 3),
|
| 399 |
+
"ego_teaser": (45, 1200, 10),
|
| 400 |
+
"robot_unitree": (45, 500, 4),
|
| 401 |
+
"droid_robot": (35, 400, 5),
|
| 402 |
+
"robot_2": (45, 256, 5),
|
| 403 |
+
"cinema_0": (45, 356, 5),
|
| 404 |
+
"cinema_1": (45, 756, 3),
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 405 |
}
|
| 406 |
|
| 407 |
return video_settings.get(video_name, (50, 756, 3))
|
|
|
|
| 494 |
Welcome to SpatialTracker V2! This interface allows you to track objects in videos using advanced computer vision techniques.
|
| 495 |
|
| 496 |
**Instructions:**
|
| 497 |
+
1. Upload a video file or select from examples below
|
| 498 |
2. Click on the object you want to track in the first frame
|
| 499 |
3. Adjust tracking parameters if needed
|
| 500 |
4. Click "Launch Visualization" to start tracking
|
|
|
|
| 505 |
status_text = "🟢 Backend Connected" if BACKEND_AVAILABLE else "🟡 Running in Standalone Mode"
|
| 506 |
gr.Markdown(f"**Status:** {status_text}")
|
| 507 |
|
| 508 |
+
# Example videos section - moved to top
|
| 509 |
+
with gr.Group():
|
| 510 |
+
gr.Markdown("### 📂 Example Videos")
|
| 511 |
+
gr.Markdown("Try these example videos to get started quickly:")
|
| 512 |
+
|
| 513 |
+
# Define video_input here so it can be referenced in examples
|
| 514 |
+
video_input = gr.Video(
|
| 515 |
+
label="Upload Video or Select Example",
|
| 516 |
+
format="mp4"
|
| 517 |
+
)
|
| 518 |
+
|
| 519 |
+
gr.Examples(
|
| 520 |
+
examples=[
|
| 521 |
+
["examples/kiss.mp4"],
|
| 522 |
+
["examples/backpack.mp4"],
|
| 523 |
+
["examples/kitchen.mp4"],
|
| 524 |
+
["examples/pillow.mp4"],
|
| 525 |
+
["examples/hockey.mp4"],
|
| 526 |
+
["examples/drifting.mp4"],
|
| 527 |
+
["examples/ball.mp4"],
|
| 528 |
+
["examples/ken_block_0.mp4"],
|
| 529 |
+
["examples/ego_kc1.mp4"],
|
| 530 |
+
["examples/vertical_place.mp4"],
|
| 531 |
+
["examples/ego_teaser.mp4"],
|
| 532 |
+
["examples/robot_unitree.mp4"],
|
| 533 |
+
["examples/droid_robot.mp4"],
|
| 534 |
+
["examples/robot_2.mp4"],
|
| 535 |
+
["examples/cinema_0.mp4"],
|
| 536 |
+
["examples/cinema_1.mp4"],
|
| 537 |
+
],
|
| 538 |
+
inputs=video_input,
|
| 539 |
+
label="Click on any example to load it"
|
| 540 |
+
)
|
| 541 |
+
|
| 542 |
with gr.Row():
|
| 543 |
with gr.Column(scale=1):
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 544 |
# Interactive frame display
|
| 545 |
with gr.Group():
|
| 546 |
gr.Markdown("### 🎯 Point Selection")
|
|
|
|
| 581 |
value="<p>Upload a video and select points to see 3D visualization here.</p>"
|
| 582 |
)
|
| 583 |
|
| 584 |
+
# Advanced settings section - changed to open=True
|
| 585 |
+
with gr.Accordion("⚙️ Advanced Settings", open=True):
|
| 586 |
+
gr.Markdown("Adjust these parameters to optimize tracking performance:")
|
| 587 |
with gr.Row():
|
| 588 |
grid_size = gr.Slider(
|
| 589 |
minimum=10,
|
|
|
|
| 591 |
step=10,
|
| 592 |
value=50,
|
| 593 |
label="Grid Size",
|
| 594 |
+
info="Size of the tracking grid (larger = more detailed)"
|
| 595 |
)
|
| 596 |
vo_points = gr.Slider(
|
| 597 |
minimum=100,
|
|
|
|
| 599 |
step=50,
|
| 600 |
value=756,
|
| 601 |
label="VO Points",
|
| 602 |
+
info="Number of visual odometry points (more = better accuracy)"
|
| 603 |
)
|
| 604 |
fps = gr.Slider(
|
| 605 |
minimum=1,
|
|
|
|
| 607 |
step=1,
|
| 608 |
value=3,
|
| 609 |
label="FPS",
|
| 610 |
+
info="Frames per second for processing (higher = smoother but slower)"
|
| 611 |
)
|
| 612 |
|
| 613 |
# Launch button
|
| 614 |
with gr.Row():
|
| 615 |
launch_btn = gr.Button("🚀 Launch Visualization", variant="primary", size="lg")
|
| 616 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 617 |
# Hidden state variables
|
| 618 |
original_image_state = gr.State(None)
|
| 619 |
selected_points = gr.State([])
|