Skip to content

Commit

Permalink
remove fsdp and deepspeed patches
Browse files Browse the repository at this point in the history
  • Loading branch information
winglian committed Dec 13, 2024
1 parent 8c940e7 commit c466872
Showing 1 changed file with 12 additions and 12 deletions.
24 changes: 12 additions & 12 deletions src/axolotl/utils/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -380,18 +380,18 @@ def apply_patches(self) -> None:
plugin_manager = PluginManager.get_instance()
plugin_manager.pre_model_load(self.cfg)

if self.cfg.fsdp:
from axolotl.monkeypatch.trainer_fsdp_optim import (
patch_training_loop_for_fsdp,
)

patch_training_loop_for_fsdp()
elif self.cfg.deepspeed and self.cfg.gradient_accumulation_steps > 1:
from axolotl.monkeypatch.trainer_grad_accum import (
patch_training_loop_for_deepspeed_0_16_x,
)

patch_training_loop_for_deepspeed_0_16_x()
# if self.cfg.fsdp:
# from axolotl.monkeypatch.trainer_fsdp_optim import (
# patch_training_loop_for_fsdp,
# )
#
# patch_training_loop_for_fsdp()
# elif self.cfg.deepspeed and self.cfg.gradient_accumulation_steps > 1:
# from axolotl.monkeypatch.trainer_grad_accum import (
# patch_training_loop_for_deepspeed_0_16_x,
# )
#
# patch_training_loop_for_deepspeed_0_16_x()

if self.cfg.gradient_checkpointing == "unsloth":
transformers.modeling_utils.checkpoint = hf_grad_checkpoint_unsloth_wrapper
Expand Down

0 comments on commit c466872

Please sign in to comment.