diff --git a/src/diffusers/models/transformer_2d.py b/src/diffusers/models/transformer_2d.py index 4819d3be48e1..c96aef65f339 100644 --- a/src/diffusers/models/transformer_2d.py +++ b/src/diffusers/models/transformer_2d.py @@ -284,7 +284,7 @@ def forward( hidden_states = self.norm(hidden_states) if not self.use_linear_projection: - hidden_states = self.proj_in(hidden_states, lora_scale) + hidden_states = self.proj_in(hidden_states, scale=lora_scale) inner_dim = hidden_states.shape[1] hidden_states = hidden_states.permute(0, 2, 3, 1).reshape(batch, height * width, inner_dim) else: