Skip to content

Commit 5b51ad0

Browse files
authored
[LoRA] fix vanilla fine-tuned lora loading. (#8691)
fix vanilla fine-tuned lora loading.
1 parent 10b4e35 commit 5b51ad0

File tree

1 file changed

+1
-2
lines changed

1 file changed

+1
-2
lines changed

src/diffusers/loaders/lora.py

+1-2
Original file line numberDiff line numberDiff line change
@@ -396,8 +396,7 @@ def load_lora_into_unet(cls, state_dict, network_alphas, unet, adapter_name=None
396396
# their prefixes.
397397
keys = list(state_dict.keys())
398398
only_text_encoder = all(key.startswith(cls.text_encoder_name) for key in keys)
399-
400-
if any(key.startswith(cls.unet_name) for key in keys) and not only_text_encoder:
399+
if not only_text_encoder:
401400
# Load the layers corresponding to UNet.
402401
logger.info(f"Loading {cls.unet_name}.")
403402
unet.load_attn_procs(

0 commit comments

Comments
 (0)