From 41c4b5a42cfe61fc4c6d1d5e73377f84ba2a1377 Mon Sep 17 00:00:00 2001 From: Jacob Morrison Date: Sun, 31 Aug 2025 12:13:19 -0700 Subject: [PATCH] Update finetune.py --- open_instruct/finetune.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/open_instruct/finetune.py b/open_instruct/finetune.py index 7e9d272de..460c2667b 100644 --- a/open_instruct/finetune.py +++ b/open_instruct/finetune.py @@ -956,7 +956,7 @@ def main(args: FlatArguments, tc: TokenizerConfig): os.path.join(get_last_checkpoint_path(args, incomplete=True), "COMPLETED"), "w" ) as f: f.write("COMPLETED") # annoyingly, empty files arent uploaded by beaker. - if accelerator.is_local_main_process: # TODO: in mason local model this is gonna error out if using something like output/test; because mason used the same shared file ssytem. + if accelerator.is_main_process: # TODO: in mason local model this is gonna error out if using something like output/test; because mason used the same shared file ssytem. clean_last_n_checkpoints(args.output_dir, args.keep_last_n_checkpoints) accelerator.wait_for_everyone() @@ -971,7 +971,7 @@ def main(args: FlatArguments, tc: TokenizerConfig): # use this to mark the checkpoint as completely saved, to avoid restoring from garbled checkpoints with open(os.path.join(get_last_checkpoint_path(args, incomplete=True), "COMPLETED"), "w") as f: f.write("COMPLETED") # annoyingly, empty files arent uploaded by beaker. - if accelerator.is_local_main_process: + if accelerator.is_main_process: clean_last_n_checkpoints(args.output_dir, args.keep_last_n_checkpoints) accelerator.wait_for_everyone() @@ -981,7 +981,7 @@ def main(args: FlatArguments, tc: TokenizerConfig): ) # remove all checkpoints to save space - if args.clean_checkpoints_at_end and accelerator.is_local_main_process: + if args.clean_checkpoints_at_end and accelerator.is_main_process: clean_last_n_checkpoints(args.output_dir, keep_last_n_checkpoints=0) if (