|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 3.0, |
|
"eval_steps": 500, |
|
"global_step": 936, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 134.19052124023438, |
|
"learning_rate": 4.996794871794872e-05, |
|
"loss": 9.1177, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 46.553192138671875, |
|
"learning_rate": 4.9006410256410256e-05, |
|
"loss": 2.6116, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 17.68166732788086, |
|
"learning_rate": 4.8012820512820516e-05, |
|
"loss": 0.8879, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 17.172128677368164, |
|
"learning_rate": 4.701923076923077e-05, |
|
"loss": 1.0567, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 104.65974426269531, |
|
"learning_rate": 4.602564102564102e-05, |
|
"loss": 0.9872, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 16.036008834838867, |
|
"learning_rate": 4.503205128205128e-05, |
|
"loss": 0.9717, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 27.394895553588867, |
|
"learning_rate": 4.403846153846154e-05, |
|
"loss": 0.8679, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 37.157169342041016, |
|
"learning_rate": 4.30448717948718e-05, |
|
"loss": 0.8257, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 8.90817928314209, |
|
"learning_rate": 4.205128205128206e-05, |
|
"loss": 0.8604, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 13.38546085357666, |
|
"learning_rate": 4.105769230769231e-05, |
|
"loss": 0.8587, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 32.03287124633789, |
|
"learning_rate": 4.006410256410257e-05, |
|
"loss": 0.8512, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"grad_norm": 20.29245376586914, |
|
"learning_rate": 3.9070512820512824e-05, |
|
"loss": 0.6728, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"grad_norm": 10.910124778747559, |
|
"learning_rate": 3.807692307692308e-05, |
|
"loss": 0.611, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"grad_norm": 14.703102111816406, |
|
"learning_rate": 3.708333333333334e-05, |
|
"loss": 0.6468, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"grad_norm": 9.841747283935547, |
|
"learning_rate": 3.608974358974359e-05, |
|
"loss": 0.5327, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"grad_norm": 15.09598445892334, |
|
"learning_rate": 3.5096153846153845e-05, |
|
"loss": 0.5669, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"grad_norm": 18.652040481567383, |
|
"learning_rate": 3.4102564102564105e-05, |
|
"loss": 0.4854, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"grad_norm": 8.733405113220215, |
|
"learning_rate": 3.310897435897436e-05, |
|
"loss": 0.5627, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"grad_norm": 21.200965881347656, |
|
"learning_rate": 3.211538461538462e-05, |
|
"loss": 0.5001, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"grad_norm": 7.368778228759766, |
|
"learning_rate": 3.112179487179487e-05, |
|
"loss": 0.4897, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"grad_norm": 16.03817367553711, |
|
"learning_rate": 3.012820512820513e-05, |
|
"loss": 0.5294, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"grad_norm": 35.705631256103516, |
|
"learning_rate": 2.913461538461539e-05, |
|
"loss": 0.4197, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"grad_norm": 10.462698936462402, |
|
"learning_rate": 2.8141025641025643e-05, |
|
"loss": 0.3589, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"grad_norm": 7.011005878448486, |
|
"learning_rate": 2.7147435897435896e-05, |
|
"loss": 0.4396, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"grad_norm": 4.32899284362793, |
|
"learning_rate": 2.6153846153846157e-05, |
|
"loss": 0.4604, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"grad_norm": 5.010735988616943, |
|
"learning_rate": 2.516025641025641e-05, |
|
"loss": 0.3764, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"grad_norm": 6.4428582191467285, |
|
"learning_rate": 2.4166666666666667e-05, |
|
"loss": 0.3035, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"grad_norm": 11.917937278747559, |
|
"learning_rate": 2.3173076923076924e-05, |
|
"loss": 0.3287, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"grad_norm": 18.710346221923828, |
|
"learning_rate": 2.217948717948718e-05, |
|
"loss": 0.3209, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"grad_norm": 5.8296990394592285, |
|
"learning_rate": 2.1185897435897437e-05, |
|
"loss": 0.329, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"grad_norm": 11.532828330993652, |
|
"learning_rate": 2.0192307692307694e-05, |
|
"loss": 0.2934, |
|
"step": 930 |
|
} |
|
], |
|
"logging_steps": 31, |
|
"max_steps": 1560, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 5, |
|
"save_steps": 500, |
|
"total_flos": 1.1075205215224332e+19, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|