Skip to content

Commit

Permalink
Add bias to LoRA sidecar layer unit tests.
Browse files Browse the repository at this point in the history
  • Loading branch information
RyanJDick committed Sep 13, 2024
1 parent 457d561 commit 08e2b76
Show file tree
Hide file tree
Showing 3 changed files with 6 additions and 4 deletions.
1 change: 0 additions & 1 deletion invokeai/backend/lora/layers/lora_layer.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@ def __init__(
self.up = up
self.mid = mid
self.down = down
self.bias = bias

@classmethod
def from_state_dict_values(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,14 +26,16 @@ def test_concatenated_lora_linear_sidecar_layer():
for out_features in sub_layer_out_features:
down = torch.randn(rank, in_features)
up = torch.randn(out_features, rank)
sub_layers.append(LoRALayer(up=up, mid=None, down=down, alpha=1.0, bias=None))
bias = torch.randn(out_features)
sub_layers.append(LoRALayer(up=up, mid=None, down=down, alpha=1.0, bias=bias))
concatenated_lora_layer = ConcatenatedLoRALayer(sub_layers, concat_axis=0)

# Patch the ConcatenatedLoRA layer into the linear layer.
linear_patched = copy.deepcopy(linear)
linear_patched.weight.data += (
concatenated_lora_layer.get_weight(linear_patched.weight) * concatenated_lora_layer.scale()
)
linear_patched.bias.data += concatenated_lora_layer.get_bias(linear_patched.bias) * concatenated_lora_layer.scale()

# Create a ConcatenatedLoRALinearSidecarLayer.
concatenated_lora_linear_sidecar_layer = ConcatenatedLoRALinearSidecarLayer(concatenated_lora_layer, weight=1.0)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,12 +20,13 @@ def test_lora_linear_sidecar_layer():
rank = 4
down = torch.randn(rank, in_features)
up = torch.randn(out_features, rank)
lora_layer = LoRALayer(up=up, mid=None, down=down, alpha=1.0, bias=None)
bias = torch.randn(out_features)
lora_layer = LoRALayer(up=up, mid=None, down=down, alpha=1.0, bias=bias)

# Patch the LoRA layer into the linear layer.
linear_patched = copy.deepcopy(linear)
linear_patched.weight.data += lora_layer.get_weight(linear_patched.weight) * lora_layer.scale()

linear_patched.bias.data += lora_layer.get_bias(linear_patched.bias) * lora_layer.scale()
# Create a LoRALinearSidecarLayer.
lora_linear_sidecar_layer = LoRALinearSidecarLayer(lora_layer, weight=1.0)
linear_with_sidecar = LoRASidecarModule(linear, [lora_linear_sidecar_layer])
Expand Down

0 comments on commit 08e2b76

Please sign in to comment.