Skip to content

Commit

Permalink
refactor building accelerator args so it can easily be extended in do…
Browse files Browse the repository at this point in the history
…wnstream trainers
  • Loading branch information
winglian committed Aug 22, 2024
1 parent 746154d commit bfd24f2
Showing 1 changed file with 4 additions and 1 deletion.
5 changes: 4 additions & 1 deletion src/transformers/trainer.py
Original file line number Diff line number Diff line change
Expand Up @@ -4705,7 +4705,7 @@ def _add_sm_patterns_to_gitignore(self) -> None:
self.repo.git_commit("Add *.sagemaker patterns to .gitignore.")
self.repo.git_push()

def create_accelerator_and_postprocess(self):
def build_accelerator_args(self):
grad_acc_kwargs = {}
if is_accelerate_available("0.28.0") and self.args.accelerator_config.gradient_accumulation_kwargs is not None:
grad_acc_kwargs = self.args.accelerator_config.gradient_accumulation_kwargs
Expand Down Expand Up @@ -4758,7 +4758,10 @@ def create_accelerator_and_postprocess(self):
args["dataloader_config"] = dataloader_config
else:
args.update(accelerator_config)
return args

def create_accelerator_and_postprocess(self):
args = self.build_accelerator_args()
# create accelerator object
self.accelerator = Accelerator(**args)
# some Trainer classes need to use `gather` instead of `gather_for_metrics`, thus we store a flag
Expand Down

0 comments on commit bfd24f2

Please sign in to comment.