Spaces:
Paused
Paused
Fix device mismatch: load models on same device as GPU computation
Browse files
app.py
CHANGED
|
@@ -35,8 +35,8 @@ def load_models_and_components(model_name: str):
|
|
| 35 |
"""Load and cache models, tokenizer, vocoder, and feature extractor."""
|
| 36 |
global _models_cache, _tokenizer_cache, _vocoder_cache, _feature_extractor_cache
|
| 37 |
|
| 38 |
-
# Set device (
|
| 39 |
-
device = torch.device("cpu")
|
| 40 |
|
| 41 |
if model_name not in _models_cache:
|
| 42 |
print(f"Loading {model_name} model...")
|
|
|
|
| 35 |
"""Load and cache models, tokenizer, vocoder, and feature extractor."""
|
| 36 |
global _models_cache, _tokenizer_cache, _vocoder_cache, _feature_extractor_cache
|
| 37 |
|
| 38 |
+
# Set device (GPU if available for Spaces GPU acceleration)
|
| 39 |
+
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
| 40 |
|
| 41 |
if model_name not in _models_cache:
|
| 42 |
print(f"Loading {model_name} model...")
|