Spaces:
Runtime error
Runtime error
Update custom_pipeline.py
Browse files- custom_pipeline.py +2 -2
custom_pipeline.py
CHANGED
|
@@ -48,7 +48,7 @@ class FluxWithCFGPipeline(FluxPipeline):
|
|
| 48 |
with progressively increasing resolution for faster generation.
|
| 49 |
"""
|
| 50 |
@torch.inference_mode()
|
| 51 |
-
|
| 52 |
self,
|
| 53 |
prompt: Union[str, List[str]] = None,
|
| 54 |
prompt_2: Optional[Union[str, List[str]]] = None,
|
|
@@ -138,7 +138,7 @@ class FluxWithCFGPipeline(FluxPipeline):
|
|
| 138 |
timestep = t.expand(latents.shape[0]).to(latents.dtype)
|
| 139 |
|
| 140 |
if generate_with_graph:
|
| 141 |
-
return
|
| 142 |
else:
|
| 143 |
noise_pred = self.transformer(
|
| 144 |
hidden_states=latents,
|
|
|
|
| 48 |
with progressively increasing resolution for faster generation.
|
| 49 |
"""
|
| 50 |
@torch.inference_mode()
|
| 51 |
+
def generate_images(
|
| 52 |
self,
|
| 53 |
prompt: Union[str, List[str]] = None,
|
| 54 |
prompt_2: Optional[Union[str, List[str]]] = None,
|
|
|
|
| 138 |
timestep = t.expand(latents.shape[0]).to(latents.dtype)
|
| 139 |
|
| 140 |
if generate_with_graph:
|
| 141 |
+
return generate_with_graph(latents, prompt_embeds, pooled_prompt_embeds, text_ids, latent_image_ids, timestep)
|
| 142 |
else:
|
| 143 |
noise_pred = self.transformer(
|
| 144 |
hidden_states=latents,
|