Spaces:
Sleeping
Sleeping
add model_info and replace getattr in create_llm_pipeline
Browse files
app.py
CHANGED
|
@@ -192,6 +192,9 @@ def create_llm_pipeline(model_key):
|
|
| 192 |
print(f"Creating pipeline for model: {model_key}")
|
| 193 |
tokenizer, model, is_gguf = initialize_model_once(model_key)
|
| 194 |
|
|
|
|
|
|
|
|
|
|
| 195 |
if model is None:
|
| 196 |
raise ValueError(f"Model is None for {model_key}")
|
| 197 |
|
|
@@ -210,7 +213,7 @@ def create_llm_pipeline(model_key):
|
|
| 210 |
return llm
|
| 211 |
|
| 212 |
# Create appropriate pipeline for HF models
|
| 213 |
-
elif
|
| 214 |
print("Creating T5 pipeline")
|
| 215 |
pipe = pipeline(
|
| 216 |
"text2text-generation",
|
|
|
|
| 192 |
print(f"Creating pipeline for model: {model_key}")
|
| 193 |
tokenizer, model, is_gguf = initialize_model_once(model_key)
|
| 194 |
|
| 195 |
+
# Get the model info for reference
|
| 196 |
+
model_info = MODEL_CONFIG[model_key]
|
| 197 |
+
|
| 198 |
if model is None:
|
| 199 |
raise ValueError(f"Model is None for {model_key}")
|
| 200 |
|
|
|
|
| 213 |
return llm
|
| 214 |
|
| 215 |
# Create appropriate pipeline for HF models
|
| 216 |
+
elif model_info.get("is_t5", False):
|
| 217 |
print("Creating T5 pipeline")
|
| 218 |
pipe = pipeline(
|
| 219 |
"text2text-generation",
|