Skip to content

Commit

Permalink
auto_find_batch_size isn't yet supported with DeepSpeed/FSDP. Raise…
Browse files Browse the repository at this point in the history
… error accrodingly. (huggingface#29058)

Update trainer.py
  • Loading branch information
pacman100 committed Feb 16, 2024
1 parent b262808 commit 4c18ddb
Showing 1 changed file with 5 additions and 0 deletions.
5 changes: 5 additions & 0 deletions src/transformers/trainer.py
Original file line number Diff line number Diff line change
Expand Up @@ -4136,6 +4136,11 @@ def create_accelerator_and_postprocess(self):
wrapper = "DeepSpeed" if self.is_deepspeed_enabled else "FSDP"
raise ValueError(f"{wrapper} can't be used with `save_only_model` along with `load_best_model_at_end`.")

# `auto_find_batch_size` isn't yet supported with DeepSpeed/FSDP
if (self.is_deepspeed_enabled or self.is_fsdp_enabled) and self.args.auto_find_batch_size:
wrapper = "DeepSpeed" if self.is_deepspeed_enabled else "FSDP"
raise NotImplementedError(f"`{wrapper}` doesn't support `auto_find_batch_size`.")

def propagate_args_to_deepspeed(self, auto_find_batch_size=False):
"""
Sets values in the deepspeed plugin based on the Trainer args
Expand Down

0 comments on commit 4c18ddb

Please sign in to comment.