Skip to content

Commit

Permalink
add support for configurable target_modules (#53)
Browse files Browse the repository at this point in the history
  • Loading branch information
samos123 committed Oct 14, 2023
1 parent 9d0f649 commit 4beaf23
Show file tree
Hide file tree
Showing 2 changed files with 4 additions and 3 deletions.
2 changes: 1 addition & 1 deletion model-trainer-huggingface/src/test_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@


def test_parse_training_args_int_float():
params = {"num_train_epochs": "1"}
params = {"num_train_epochs": "1", "target_modules": "q,v"}
assert parse_training_args(params).num_train_epochs == 1.0

params = {"num_train_epochs": "1", "max_steps": "5"}
Expand Down
5 changes: 3 additions & 2 deletions model-trainer-huggingface/src/train.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -206,12 +206,13 @@
"lora_config2 = LoraConfig(\n",
" r=16,\n",
" lora_alpha=32,\n",
" # target modules should be unset so it can detect target_modules automatically\n",
" # target_modules=[\"query_key_value\"],\n",
" lora_dropout=0.05,\n",
" bias=\"none\",\n",
" task_type=\"CAUSAL_LM\"\n",
")\n",
"target_modules = params.get(\"target_modules\")\n",
"if target_modules:\n",
" lora_config2.target_modules = [mod.strip() for mod in target_modules.split(\",\")]\n",
"\n",
"model = prepare_model_for_kbit_training(model)\n",
"\n",
Expand Down

0 comments on commit 4beaf23

Please sign in to comment.