Skip to content

Commit b7b4683

Browse files
yiyixuxuyiyixuxu
andauthored
allow Attend-and-excite pipeline work with different image sizes (#2476)
add attn_res variable Co-authored-by: yiyixuxu <yixu310@gmail,com>
1 parent 56958e1 commit b7b4683

File tree

1 file changed

+4
-1
lines changed

1 file changed

+4
-1
lines changed

src/diffusers/pipelines/stable_diffusion/pipeline_stable_diffusion_attend_and_excite.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -717,6 +717,7 @@ def __call__(
717717
max_iter_to_alter: int = 25,
718718
thresholds: dict = {0: 0.05, 10: 0.5, 20: 0.8},
719719
scale_factor: int = 20,
720+
attn_res: int = 16,
720721
):
721722
r"""
722723
Function invoked when calling the pipeline for generation.
@@ -788,6 +789,8 @@ def __call__(
788789
Dictionary defining the iterations and desired thresholds to apply iterative latent refinement in.
789790
scale_factor (`int`, *optional*, default to 20):
790791
Scale factor that controls the step size of each Attend and Excite update.
792+
attn_res (`int`, *optional*, default to 16):
793+
The resolution of most semantic attention map.
791794
792795
Examples:
793796
@@ -860,7 +863,7 @@ def __call__(
860863
# 6. Prepare extra step kwargs. TODO: Logic should ideally just be moved out of the pipeline
861864
extra_step_kwargs = self.prepare_extra_step_kwargs(generator, eta)
862865

863-
self.attention_store = AttentionStore()
866+
self.attention_store = AttentionStore(attn_res=attn_res)
864867
self.register_attention_control()
865868

866869
# default config for step size from original repo

0 commit comments

Comments
 (0)