Skip to content

Commit 6d0e026

Browse files
uP
1 parent a2dc95c commit 6d0e026

File tree

2 files changed

+2
-2
lines changed

2 files changed

+2
-2
lines changed

src/diffusers/loaders.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -848,7 +848,7 @@ def _modify_text_encoder(self, attn_processors: Dict[str, LoRAAttnProcessor]):
848848
"""
849849
# Loop over the original attention modules.
850850
for name, _ in self.text_encoder.named_modules():
851-
if any([x in name for x in TEXT_ENCODER_TARGET_MODULES]):
851+
if any(x in name for x in TEXT_ENCODER_TARGET_MODULES):
852852
# Retrieve the module and its corresponding LoRA processor.
853853
module = self.text_encoder.get_submodule(name)
854854
# Construct a new function that performs the LoRA merging. We will monkey patch

tests/models/test_lora_layers.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,7 @@ def create_unet_lora_layers(unet: nn.Module):
4646
def create_text_encoder_lora_layers(text_encoder: nn.Module):
4747
text_lora_attn_procs = {}
4848
for name, module in text_encoder.named_modules():
49-
if any([x in name for x in TEXT_ENCODER_TARGET_MODULES]):
49+
if any(x in name for x in TEXT_ENCODER_TARGET_MODULES):
5050
text_lora_attn_procs[name] = LoRAAttnProcessor(hidden_size=module.out_features, cross_attention_dim=None)
5151
text_encoder_lora_layers = AttnProcsLayers(text_lora_attn_procs)
5252
return text_encoder_lora_layers

0 commit comments

Comments
 (0)