Skip to content

Commit

Permalink
Added expts
Browse files Browse the repository at this point in the history
  • Loading branch information
itsmemala committed Mar 18, 2024
1 parent ec18833 commit d029a67
Show file tree
Hide file tree
Showing 5 changed files with 415 additions and 30 deletions.
430 changes: 402 additions & 28 deletions CL_Metrics.ipynb

Large diffs are not rendered by default.

4 changes: 4 additions & 0 deletions Collas24 BehaviourSH-MAS.txt
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,10 @@
## ANCL MAS (Avg)
!python FABR//run.py --bert_model 'bert-base-uncased' --experiment annomi --approach bert_adapter_ewc_ancl --imp function --baseline ewc_ancl --backbone bert_adapter --note random0 --idrandom 0 --seed 0 --scenario dil --use_cls_wgts True --train_batch_size 32 --num_train_epochs 50 --valid_loss_es 0.002 --lr_patience 5 --learning_rate 0.00003 --lamb 0.1 --alpha_lamb 0.05 --ancl True --save_wd_old_magn True --my_save_path /content/gdrive/MyDrive/Collas24/BehavSH/BehavSH_ANCLMAS.1.1/

## CHSF ANCL MAS ##
!python FABR//run.py --bert_model 'bert-base-uncased' --experiment annomi --approach bert_adapter_ewc --imp function --baseline ewc --backbone bert_adapter --note random0 --idrandom 0 --seed 0 --scenario dil --use_cls_wgts True --train_batch_size 32 --num_train_epochs 50 --valid_loss_es 0.002 --lr_patience 5 --learning_rate 0.00003 --custom_lamb 0,0 --break_after_task 1 --my_save_path /content/gdrive/MyDrive/Collas24/BehavSH/BehavSH_MAS_t1gold/
!python FABR//run.py --bert_model 'bert-base-uncased' --experiment annomi --approach bert_adapter_ewc_ancl --imp function --baseline ewc_ancl --backbone bert_adapter --note random0 --idrandom 0 --seed 0 --scenario dil --use_cls_wgts True --train_batch_size 32 --num_train_epochs 50 --valid_loss_es 0.002 --lr_patience 5 --learning_rate 0.00003 --custom_lamb 0,2 --custom_alpha_lamb 0,0.01 --ancl True --break_after_task 1 --my_save_path /content/gdrive/MyDrive/Collas24/BehavSH/BehavSH_ANCLMAS_t1.1/

## Adapt-KT
(Fisher Avg)
!python FABR//run.py --bert_model 'bert-base-uncased' --experiment annomi --approach bert_adapter_ewc_freeze --imp function --backbone bert_adapter --baseline ewc_freeze --note random0 --idrandom 0 --seed 0 --scenario dil --use_cls_wgts True --train_batch_size 32 --num_train_epochs 50 --valid_loss_es 0.002 --lr_patience 5 --learning_rate 0.00003 --lamb 5 --adapt_type kt --save_wd_old_magn True --my_save_path /content/gdrive/MyDrive/Collas24/BehavSH/BehavSH_NoL1LAMAS_AdaptKT.9/
Expand Down
6 changes: 5 additions & 1 deletion Collas24 IntentSH-MAS.txt
Original file line number Diff line number Diff line change
Expand Up @@ -53,4 +53,8 @@
!python FABR//run.py --bert_model 'bert-base-uncased' --experiment hwu64 --approach bert_adapter_ewc --imp function --backbone bert_adapter --baseline ewc --note random0 --idrandom 0 --seed 0 --scenario cil --use_rbs True --train_batch_size 32 --num_train_epochs 50 --valid_loss_es 0.002 --lr_patience 5 --learning_rate 0.003 --lamb 100 --save_wd_old_magn True --my_save_path /content/gdrive/MyDrive/Collas24/IntentSH_MAS_wlast.3/

## ANCL MAS (Fisher Avg) ##
!python FABR//run.py --bert_model 'bert-base-uncased' --experiment hwu64 --approach bert_adapter_ewc_ancl --imp function --backbone bert_adapter --baseline ewc_ancl --note random0 --idrandom 0 --seed 0 --scenario cil --use_rbs True --train_batch_size 32 --num_train_epochs 50 --valid_loss_es 0.002 --lr_patience 5 --learning_rate 0.003 --lamb 100 --alpha_lamb 50 --ancl True --save_wd_old_magn True --my_save_path /content/gdrive/MyDrive/Collas24/IntentSH_ANCLMAS_wlast.3.1/
!python FABR//run.py --bert_model 'bert-base-uncased' --experiment hwu64 --approach bert_adapter_ewc_ancl --imp function --backbone bert_adapter --baseline ewc_ancl --note random0 --idrandom 0 --seed 0 --scenario cil --use_rbs True --train_batch_size 32 --num_train_epochs 50 --valid_loss_es 0.002 --lr_patience 5 --learning_rate 0.003 --lamb 100 --alpha_lamb 50 --ancl True --save_wd_old_magn True --my_save_path /content/gdrive/MyDrive/Collas24/IntentSH_ANCLMAS_wlast.3.1/

### CHSF ANCL MAS ##
!python FABR//run.py --bert_model 'bert-base-uncased' --experiment hwu64 --approach bert_adapter_ewc --imp function --backbone bert_adapter --baseline ewc --note random0 --idrandom 0 --seed 0 --scenario cil --use_rbs True --train_batch_size 32 --num_train_epochs 50 --valid_loss_es 0.002 --lr_patience 5 --learning_rate 0.003 --custom_lamb 0,0 --break_after_task 1 --my_save_path /content/gdrive/MyDrive/Collas24/IntentSH_MAS_wlast_t1gold/
!python FABR//run.py --bert_model 'bert-base-uncased' --experiment hwu64 --approach bert_adapter_ewc_ancl --imp function --backbone bert_adapter --baseline ewc_ancl --note random0 --idrandom 0 --seed 0 --scenario cil --use_rbs True --train_batch_size 32 --num_train_epochs 50 --valid_loss_es 0.002 --lr_patience 5 --learning_rate 0.003 --custom_lamb 0,0.5 --custom_alpha_lamb 0,0.01 --ancl True --break_after_task 1 --my_save_path /content/gdrive/MyDrive/Collas24/IntentSH_ANCLMAS_wlast_t1.1.1/
4 changes: 4 additions & 0 deletions approaches/bert_adapter_ewc_ancl.py
Original file line number Diff line number Diff line change
Expand Up @@ -211,6 +211,10 @@ def train(self,t,train,valid,args,num_train_steps,save_path,train_data,valid_dat
for n in self.fisher.keys():
self.lamb[n] = (1/(self.args.learning_rate*self.fisher[n]))/self.args.lamb_div
self.lamb[n] = torch.clip(self.lamb[n],min=torch.finfo(self.lamb[n].dtype).min,max=torch.finfo(self.lamb[n].dtype).max)
elif phase=='mcl' and self.args.custom_lamb is not None:
# Set EWC lambda for subsequent task
self.lamb = self.args.custom_lamb[t+1] if t+1<=self.args.break_after_task else 0
self.alpha_lamb = self.args.custom_alpha_lamb[t+1] if t+1<=self.args.break_after_task else 0

if phase=='fo':
fo_model=utils.get_model(self.model)
Expand Down
1 change: 0 additions & 1 deletion approaches/bert_adapter_ewc_freeze.py
Original file line number Diff line number Diff line change
Expand Up @@ -288,7 +288,6 @@ def train(self,t,train,valid,args,num_train_steps,save_path,train_data,valid_dat
elif phase=='mcl' and self.args.custom_lamb is not None:
# Set EWC lambda for subsequent task
self.lamb = self.args.custom_lamb[t+1] if t+1<=self.args.break_after_task else 0
self.alpha_lamb = self.args.custom_alpha_lamb[t+1] if t+1<=self.args.break_after_task else 0

if phase=='fo':
fo_model=utils.get_model(self.model)
Expand Down

0 comments on commit d029a67

Please sign in to comment.