From 09675934006cefb1eb3e58c41fca9ec372a7c797 Mon Sep 17 00:00:00 2001 From: Jonathan Yin Date: Wed, 11 Dec 2024 00:03:33 -0800 Subject: [PATCH] Fix Nonetype attribute error when loading multiple Flux loras (#10182) Fix Nonetype attribute error --- src/diffusers/loaders/lora_pipeline.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/diffusers/loaders/lora_pipeline.py b/src/diffusers/loaders/lora_pipeline.py index eb9b42c5fbb7..1445394b8784 100644 --- a/src/diffusers/loaders/lora_pipeline.py +++ b/src/diffusers/loaders/lora_pipeline.py @@ -2313,7 +2313,7 @@ def _maybe_expand_transformer_param_shape_or_error_( for name, module in transformer.named_modules(): if isinstance(module, torch.nn.Linear): module_weight = module.weight.data - module_bias = module.bias.data if hasattr(module, "bias") else None + module_bias = module.bias.data if module.bias is not None else None bias = module_bias is not None lora_A_weight_name = f"{name}.lora_A.weight"