Spaces:
Sleeping
Sleeping
modify fallback model
Browse files
app.py
CHANGED
|
@@ -79,14 +79,19 @@ MODEL_CONFIG = {
|
|
| 79 |
"dtype": torch.float16 if torch.cuda.is_available() else torch.float32,
|
| 80 |
"is_t5": True
|
| 81 |
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 82 |
}
|
| 83 |
|
| 84 |
# Tambahkan model fallback ke MODEL_CONFIG
|
| 85 |
-
MODEL_CONFIG["Fallback Model"] = {
|
| 86 |
-
|
| 87 |
-
|
| 88 |
-
|
| 89 |
-
}
|
| 90 |
|
| 91 |
def initialize_model_once(model_key):
|
| 92 |
with MODEL_CACHE["init_lock"]:
|
|
|
|
| 79 |
"dtype": torch.float16 if torch.cuda.is_available() else torch.float32,
|
| 80 |
"is_t5": True
|
| 81 |
}
|
| 82 |
+
"Fallback Model": {
|
| 83 |
+
"name": "TinyLlama/TinyLlama-1.1B-intermediate-step-1431k-3T",
|
| 84 |
+
"description": "Model sangat ringan untuk fallback",
|
| 85 |
+
"dtype": torch.float16 if torch.cuda.is_available() else torch.float32
|
| 86 |
+
}
|
| 87 |
}
|
| 88 |
|
| 89 |
# Tambahkan model fallback ke MODEL_CONFIG
|
| 90 |
+
# MODEL_CONFIG["Fallback Model"] = {
|
| 91 |
+
# "name": "TinyLlama/TinyLlama-1.1B-intermediate-step-1431k-3T",
|
| 92 |
+
# "description": "Model sangat ringan untuk fallback",
|
| 93 |
+
# "dtype": torch.float16 if torch.cuda.is_available() else torch.float32
|
| 94 |
+
# }
|
| 95 |
|
| 96 |
def initialize_model_once(model_key):
|
| 97 |
with MODEL_CACHE["init_lock"]:
|