From 52e606d6dc33a9b5ec2d20f255f56cf472b0bbea Mon Sep 17 00:00:00 2001 From: Steven Munn Date: Thu, 4 Apr 2024 09:23:49 -0700 Subject: [PATCH 1/4] Skip scaling if scale is identity --- src/diffusers/models/unets/unet_2d_condition.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/diffusers/models/unets/unet_2d_condition.py b/src/diffusers/models/unets/unet_2d_condition.py index 34327e1049c5..e54bac040596 100644 --- a/src/diffusers/models/unets/unet_2d_condition.py +++ b/src/diffusers/models/unets/unet_2d_condition.py @@ -1183,7 +1183,7 @@ def forward( else: lora_scale = 1.0 - if USE_PEFT_BACKEND: + if USE_PEFT_BACKEND and lora_scale != 1.0: # weight the lora layers by setting `lora_scale` for each PEFT layer scale_lora_layers(self, lora_scale) @@ -1302,7 +1302,7 @@ def forward( sample = self.conv_act(sample) sample = self.conv_out(sample) - if USE_PEFT_BACKEND: + if USE_PEFT_BACKEND and lora_scale != 1.0: # remove `lora_scale` from each PEFT layer unscale_lora_layers(self, lora_scale) From 800c7193bdb8792d9b32ca68e6bbaf14f3224a45 Mon Sep 17 00:00:00 2001 From: Steven Munn Date: Fri, 5 Apr 2024 07:44:54 -0700 Subject: [PATCH 2/4] move check for weight one to scale and unscale lora --- src/diffusers/models/unets/unet_2d_condition.py | 4 ++-- src/diffusers/utils/peft_utils.py | 4 ++++ 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/src/diffusers/models/unets/unet_2d_condition.py b/src/diffusers/models/unets/unet_2d_condition.py index e54bac040596..34327e1049c5 100644 --- a/src/diffusers/models/unets/unet_2d_condition.py +++ b/src/diffusers/models/unets/unet_2d_condition.py @@ -1183,7 +1183,7 @@ def forward( else: lora_scale = 1.0 - if USE_PEFT_BACKEND and lora_scale != 1.0: + if USE_PEFT_BACKEND: # weight the lora layers by setting `lora_scale` for each PEFT layer scale_lora_layers(self, lora_scale) @@ -1302,7 +1302,7 @@ def forward( sample = self.conv_act(sample) sample = self.conv_out(sample) - if USE_PEFT_BACKEND and lora_scale != 1.0: + if USE_PEFT_BACKEND: # remove `lora_scale` from each PEFT layer unscale_lora_layers(self, lora_scale) diff --git a/src/diffusers/utils/peft_utils.py b/src/diffusers/utils/peft_utils.py index feececc56966..4545ca5e3db4 100644 --- a/src/diffusers/utils/peft_utils.py +++ b/src/diffusers/utils/peft_utils.py @@ -109,6 +109,8 @@ def scale_lora_layers(model, weight): The weight to be given to the LoRA layers. """ from peft.tuners.tuners_utils import BaseTunerLayer + if weight == 1.0: + return for module in model.modules(): if isinstance(module, BaseTunerLayer): @@ -128,6 +130,8 @@ def unscale_lora_layers(model, weight: Optional[float] = None): value. """ from peft.tuners.tuners_utils import BaseTunerLayer + if weight == 1.0: + return for module in model.modules(): if isinstance(module, BaseTunerLayer): From bba7838bcf54b668caa972d11aa7b94d284501f9 Mon Sep 17 00:00:00 2001 From: Steven Munn Date: Fri, 5 Apr 2024 08:54:55 -0700 Subject: [PATCH 3/4] fix code style/quality --- src/diffusers/utils/peft_utils.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/src/diffusers/utils/peft_utils.py b/src/diffusers/utils/peft_utils.py index 4545ca5e3db4..8ea12e2e3b3f 100644 --- a/src/diffusers/utils/peft_utils.py +++ b/src/diffusers/utils/peft_utils.py @@ -64,9 +64,11 @@ def recurse_remove_peft_layers(model): module_replaced = False if isinstance(module, LoraLayer) and isinstance(module, torch.nn.Linear): - new_module = torch.nn.Linear(module.in_features, module.out_features, bias=module.bias is not None).to( - module.weight.device - ) + new_module = torch.nn.Linear( + module.in_features, + module.out_features, + bias=module.bias is not None, + ).to(module.weight.device) new_module.weight = module.weight if module.bias is not None: new_module.bias = module.bias @@ -109,6 +111,7 @@ def scale_lora_layers(model, weight): The weight to be given to the LoRA layers. """ from peft.tuners.tuners_utils import BaseTunerLayer + if weight == 1.0: return @@ -130,6 +133,7 @@ def unscale_lora_layers(model, weight: Optional[float] = None): value. """ from peft.tuners.tuners_utils import BaseTunerLayer + if weight == 1.0: return From 067eb8ee5490fdf6d3a6b108f8fad6c63e4cca60 Mon Sep 17 00:00:00 2001 From: Steven Munn <5297082+stevenjlm@users.noreply.github.com> Date: Fri, 5 Apr 2024 19:45:09 -0700 Subject: [PATCH 4/4] Empty-Commit