From 1e01e0c9f41fe5d88a8d82063af0a9a8643d3599 Mon Sep 17 00:00:00 2001 From: paperspace Date: Wed, 14 Feb 2024 15:09:19 +0000 Subject: [PATCH] Add comments --- experiments/laplace_lora/lora_transformer.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/experiments/laplace_lora/lora_transformer.py b/experiments/laplace_lora/lora_transformer.py index e6f62903..db067d47 100644 --- a/experiments/laplace_lora/lora_transformer.py +++ b/experiments/laplace_lora/lora_transformer.py @@ -33,6 +33,7 @@ def __init__(self, config: FrozenConfigDict): WEIGHTS_TO_LORA = ["q_proj", "v_proj", "o_proj"] modules = list(model.model.layers.named_parameters()) + # Get layer index, name for layers to adapt module_names_with_layer = [ (name.split(".")[0], f'layer.{name.strip('.weight')}') for name, param in modules @@ -44,7 +45,7 @@ def __init__(self, config: FrozenConfigDict): ) ] - # only adapt last layer + # Subset of layers to adapt if self.target_modules == "last_layer": modules = [ [layer for name, layer in list(group)]