Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
|
@@ -35,11 +35,19 @@ pipe.set_adapters(["better"], adapter_weights=[1.0])
|
|
| 35 |
pipe.fuse_lora(adapter_name=["better"], lora_scale=1.0)
|
| 36 |
pipe.unload_lora_weights()
|
| 37 |
|
| 38 |
-
#
|
| 39 |
pipe.transformer.to(memory_format=torch.channels_last)
|
| 40 |
pipe.vae.to(memory_format=torch.channels_last)
|
| 41 |
|
| 42 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 43 |
|
| 44 |
torch.cuda.empty_cache()
|
| 45 |
|
|
|
|
| 35 |
pipe.fuse_lora(adapter_name=["better"], lora_scale=1.0)
|
| 36 |
pipe.unload_lora_weights()
|
| 37 |
|
| 38 |
+
# Correctly set memory format
|
| 39 |
pipe.transformer.to(memory_format=torch.channels_last)
|
| 40 |
pipe.vae.to(memory_format=torch.channels_last)
|
| 41 |
|
| 42 |
+
# Conditionally enable xformers only for the transformer
|
| 43 |
+
if hasattr(pipe, "transformer") and torch.cuda.is_available():
|
| 44 |
+
try:
|
| 45 |
+
pipe.transformer.enable_xformers_memory_efficient_attention()
|
| 46 |
+
except Exception as e:
|
| 47 |
+
print(
|
| 48 |
+
"Warning: Could not enable xformers for the transformer due to the following error:"
|
| 49 |
+
)
|
| 50 |
+
print(e)
|
| 51 |
|
| 52 |
torch.cuda.empty_cache()
|
| 53 |
|