Spaces:
Running
on
Zero
Running
on
Zero
Y Phung Nguyen
commited on
Commit
·
c5ac360
1
Parent(s):
22b7790
Upd models loader
Browse files
ui.py
CHANGED
|
@@ -328,10 +328,18 @@ def create_demo():
|
|
| 328 |
status_lines.append(f"❌ MedSwin ({model_name}): error loading")
|
| 329 |
else:
|
| 330 |
# Use GPU-decorated function to load the model
|
| 331 |
-
|
| 332 |
-
|
| 333 |
-
|
| 334 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 335 |
status_lines.append(f"⏳ MedSwin ({model_name}): loading...")
|
| 336 |
|
| 337 |
# TTS model status
|
|
@@ -346,15 +354,15 @@ def create_demo():
|
|
| 346 |
# ASR (Whisper) model status
|
| 347 |
if WHISPER_AVAILABLE:
|
| 348 |
if config.global_whisper_model is not None:
|
| 349 |
-
status_lines.append("✅ ASR (Whisper
|
| 350 |
else:
|
| 351 |
-
status_lines.append("⚠️ ASR (Whisper
|
| 352 |
else:
|
| 353 |
status_lines.append("❌ ASR: library not available")
|
| 354 |
-
|
| 355 |
-
|
| 356 |
-
|
| 357 |
-
|
| 358 |
except Exception as e:
|
| 359 |
return f"❌ Error: {str(e)[:100]}", False
|
| 360 |
|
|
@@ -452,23 +460,57 @@ def create_demo():
|
|
| 452 |
|
| 453 |
# Initialize status on load
|
| 454 |
def init_model_status():
|
| 455 |
-
|
| 456 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 457 |
|
| 458 |
# Update status when model selection changes
|
| 459 |
def update_model_status_on_change(model_name):
|
| 460 |
-
|
| 461 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 462 |
|
| 463 |
# Handle model selection change
|
| 464 |
def on_model_change(model_name):
|
| 465 |
-
|
| 466 |
-
|
| 467 |
-
|
| 468 |
-
|
| 469 |
-
|
| 470 |
-
|
| 471 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 472 |
|
| 473 |
# Update status display periodically or on model status changes
|
| 474 |
def refresh_model_status(model_name):
|
|
@@ -494,8 +536,19 @@ def create_demo():
|
|
| 494 |
outputs=None
|
| 495 |
)
|
| 496 |
# Finally update status to show all models
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 497 |
demo.load(
|
| 498 |
-
fn=
|
| 499 |
inputs=None,
|
| 500 |
outputs=[model_status]
|
| 501 |
)
|
|
@@ -511,9 +564,23 @@ def create_demo():
|
|
| 511 |
# Check if model is loaded
|
| 512 |
if not is_model_loaded(medical_model_name):
|
| 513 |
# Try to load it
|
| 514 |
-
|
| 515 |
-
|
| 516 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 517 |
updated_history = history + [{"role": "assistant", "content": error_msg}]
|
| 518 |
yield updated_history, ""
|
| 519 |
return
|
|
|
|
| 328 |
status_lines.append(f"❌ MedSwin ({model_name}): error loading")
|
| 329 |
else:
|
| 330 |
# Use GPU-decorated function to load the model
|
| 331 |
+
try:
|
| 332 |
+
result = load_model_with_gpu(model_name)
|
| 333 |
+
if result and isinstance(result, tuple) and len(result) == 2:
|
| 334 |
+
status_text, is_ready = result
|
| 335 |
+
if is_ready:
|
| 336 |
+
status_lines.append(f"✅ MedSwin ({model_name}): loaded and ready")
|
| 337 |
+
else:
|
| 338 |
+
status_lines.append(f"⏳ MedSwin ({model_name}): loading...")
|
| 339 |
+
else:
|
| 340 |
+
status_lines.append(f"⏳ MedSwin ({model_name}): loading...")
|
| 341 |
+
except Exception as e:
|
| 342 |
+
logger.error(f"Error calling load_model_with_gpu: {e}")
|
| 343 |
status_lines.append(f"⏳ MedSwin ({model_name}): loading...")
|
| 344 |
|
| 345 |
# TTS model status
|
|
|
|
| 354 |
# ASR (Whisper) model status
|
| 355 |
if WHISPER_AVAILABLE:
|
| 356 |
if config.global_whisper_model is not None:
|
| 357 |
+
status_lines.append("✅ ASR (Whisper): loaded and ready")
|
| 358 |
else:
|
| 359 |
+
status_lines.append("⚠️ ASR (Whisper): not loaded")
|
| 360 |
else:
|
| 361 |
status_lines.append("❌ ASR: library not available")
|
| 362 |
+
|
| 363 |
+
status_text = "\n".join(status_lines)
|
| 364 |
+
is_ready = is_model_loaded(model_name)
|
| 365 |
+
return status_text, is_ready
|
| 366 |
except Exception as e:
|
| 367 |
return f"❌ Error: {str(e)[:100]}", False
|
| 368 |
|
|
|
|
| 460 |
|
| 461 |
# Initialize status on load
|
| 462 |
def init_model_status():
|
| 463 |
+
try:
|
| 464 |
+
result = check_model_status(DEFAULT_MEDICAL_MODEL)
|
| 465 |
+
if result and isinstance(result, tuple) and len(result) == 2:
|
| 466 |
+
status_text, is_ready = result
|
| 467 |
+
return status_text
|
| 468 |
+
else:
|
| 469 |
+
return "⚠️ Unable to check model status"
|
| 470 |
+
except Exception as e:
|
| 471 |
+
logger.error(f"Error in init_model_status: {e}")
|
| 472 |
+
return f"⚠️ Error: {str(e)[:100]}"
|
| 473 |
|
| 474 |
# Update status when model selection changes
|
| 475 |
def update_model_status_on_change(model_name):
|
| 476 |
+
try:
|
| 477 |
+
result = check_model_status(model_name)
|
| 478 |
+
if result and isinstance(result, tuple) and len(result) == 2:
|
| 479 |
+
status_text, is_ready = result
|
| 480 |
+
return status_text
|
| 481 |
+
else:
|
| 482 |
+
return "⚠️ Unable to check model status"
|
| 483 |
+
except Exception as e:
|
| 484 |
+
logger.error(f"Error in update_model_status_on_change: {e}")
|
| 485 |
+
return f"⚠️ Error: {str(e)[:100]}"
|
| 486 |
|
| 487 |
# Handle model selection change
|
| 488 |
def on_model_change(model_name):
|
| 489 |
+
try:
|
| 490 |
+
result = load_model_and_update_status(model_name)
|
| 491 |
+
if result and isinstance(result, tuple) and len(result) == 2:
|
| 492 |
+
status_text, is_ready = result
|
| 493 |
+
submit_enabled = is_ready
|
| 494 |
+
return (
|
| 495 |
+
status_text,
|
| 496 |
+
gr.update(interactive=submit_enabled),
|
| 497 |
+
gr.update(interactive=submit_enabled)
|
| 498 |
+
)
|
| 499 |
+
else:
|
| 500 |
+
error_msg = "⚠️ Unable to load model status"
|
| 501 |
+
return (
|
| 502 |
+
error_msg,
|
| 503 |
+
gr.update(interactive=False),
|
| 504 |
+
gr.update(interactive=False)
|
| 505 |
+
)
|
| 506 |
+
except Exception as e:
|
| 507 |
+
logger.error(f"Error in on_model_change: {e}")
|
| 508 |
+
error_msg = f"⚠️ Error: {str(e)[:100]}"
|
| 509 |
+
return (
|
| 510 |
+
error_msg,
|
| 511 |
+
gr.update(interactive=False),
|
| 512 |
+
gr.update(interactive=False)
|
| 513 |
+
)
|
| 514 |
|
| 515 |
# Update status display periodically or on model status changes
|
| 516 |
def refresh_model_status(model_name):
|
|
|
|
| 536 |
outputs=None
|
| 537 |
)
|
| 538 |
# Finally update status to show all models
|
| 539 |
+
def update_status_after_load():
|
| 540 |
+
try:
|
| 541 |
+
result = check_model_status(DEFAULT_MEDICAL_MODEL)
|
| 542 |
+
if result and isinstance(result, tuple) and len(result) == 2:
|
| 543 |
+
return result[0]
|
| 544 |
+
else:
|
| 545 |
+
return "⚠️ Unable to check model status"
|
| 546 |
+
except Exception as e:
|
| 547 |
+
logger.error(f"Error updating status after load: {e}")
|
| 548 |
+
return f"⚠️ Error: {str(e)[:100]}"
|
| 549 |
+
|
| 550 |
demo.load(
|
| 551 |
+
fn=update_status_after_load,
|
| 552 |
inputs=None,
|
| 553 |
outputs=[model_status]
|
| 554 |
)
|
|
|
|
| 564 |
# Check if model is loaded
|
| 565 |
if not is_model_loaded(medical_model_name):
|
| 566 |
# Try to load it
|
| 567 |
+
try:
|
| 568 |
+
result = load_model_and_update_status(medical_model_name)
|
| 569 |
+
if result and isinstance(result, tuple) and len(result) == 2:
|
| 570 |
+
status_text, is_ready = result
|
| 571 |
+
if not is_ready:
|
| 572 |
+
error_msg = "⚠️ Model is not ready. Please wait for the model to finish loading before sending messages."
|
| 573 |
+
updated_history = history + [{"role": "assistant", "content": error_msg}]
|
| 574 |
+
yield updated_history, ""
|
| 575 |
+
return
|
| 576 |
+
else:
|
| 577 |
+
error_msg = "⚠️ Unable to check model status. Please try again."
|
| 578 |
+
updated_history = history + [{"role": "assistant", "content": error_msg}]
|
| 579 |
+
yield updated_history, ""
|
| 580 |
+
return
|
| 581 |
+
except Exception as e:
|
| 582 |
+
logger.error(f"Error loading model: {e}")
|
| 583 |
+
error_msg = f"⚠️ Error loading model: {str(e)[:200]}. Please try again."
|
| 584 |
updated_history = history + [{"role": "assistant", "content": error_msg}]
|
| 585 |
yield updated_history, ""
|
| 586 |
return
|