use micro batch size for eval size if not specified
Browse files
src/axolotl/utils/trainer.py
CHANGED
@@ -85,7 +85,7 @@ def setup_trainer(cfg, train_dataset, eval_dataset, model, tokenizer):
|
|
85 |
|
86 |
training_args = transformers.TrainingArguments(
|
87 |
per_device_train_batch_size=cfg.micro_batch_size,
|
88 |
-
per_device_eval_batch_size=cfg.eval_batch_size,
|
89 |
gradient_accumulation_steps=cfg.gradient_accumulation_steps,
|
90 |
eval_accumulation_steps=cfg.gradient_accumulation_steps,
|
91 |
num_train_epochs=cfg.num_epochs,
|
|
|
85 |
|
86 |
training_args = transformers.TrainingArguments(
|
87 |
per_device_train_batch_size=cfg.micro_batch_size,
|
88 |
+
per_device_eval_batch_size=cfg.eval_batch_size if cfg.eval_batch_size is not None else cfg.micro_batch_size,
|
89 |
gradient_accumulation_steps=cfg.gradient_accumulation_steps,
|
90 |
eval_accumulation_steps=cfg.gradient_accumulation_steps,
|
91 |
num_train_epochs=cfg.num_epochs,
|