don't need to set here
Browse files
src/axolotl/utils/trainer.py
CHANGED
@@ -81,9 +81,6 @@ def setup_trainer(cfg, train_dataset, eval_dataset, model, tokenizer):
|
|
81 |
training_arguments_kwargs["fsdp"] = cfg.fsdp
|
82 |
if cfg.fsdp_config:
|
83 |
training_arguments_kwargs["fsdp_config"] = dict(cfg.fsdp_config)
|
84 |
-
# can't set optimizers directly on trainer when using fsdp, so set them here
|
85 |
-
if cfg.optimizer:
|
86 |
-
training_arguments_kwargs["optim"] = cfg.optimizer
|
87 |
|
88 |
# deepspeed
|
89 |
if (
|
|
|
81 |
training_arguments_kwargs["fsdp"] = cfg.fsdp
|
82 |
if cfg.fsdp_config:
|
83 |
training_arguments_kwargs["fsdp_config"] = dict(cfg.fsdp_config)
|
|
|
|
|
|
|
84 |
|
85 |
# deepspeed
|
86 |
if (
|