|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 5.0, |
|
"eval_steps": 500, |
|
"global_step": 3120, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0016025641025641025, |
|
"grad_norm": 213.62364196777344, |
|
"learning_rate": 4.998397435897436e-05, |
|
"loss": 9.3378, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.049679487179487176, |
|
"grad_norm": 43.086360931396484, |
|
"learning_rate": 4.950320512820513e-05, |
|
"loss": 2.5252, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.09935897435897435, |
|
"grad_norm": 48.219451904296875, |
|
"learning_rate": 4.9006410256410256e-05, |
|
"loss": 1.5213, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.14903846153846154, |
|
"grad_norm": 30.23198127746582, |
|
"learning_rate": 4.850961538461539e-05, |
|
"loss": 1.9636, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.1987179487179487, |
|
"grad_norm": 52.96772766113281, |
|
"learning_rate": 4.8012820512820516e-05, |
|
"loss": 1.3545, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.2483974358974359, |
|
"grad_norm": 48.79069900512695, |
|
"learning_rate": 4.751602564102564e-05, |
|
"loss": 1.4998, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.2980769230769231, |
|
"grad_norm": 38.8032341003418, |
|
"learning_rate": 4.701923076923077e-05, |
|
"loss": 1.4654, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.34775641025641024, |
|
"grad_norm": 106.62429809570312, |
|
"learning_rate": 4.65224358974359e-05, |
|
"loss": 1.8027, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.3974358974358974, |
|
"grad_norm": 25.370744705200195, |
|
"learning_rate": 4.602564102564102e-05, |
|
"loss": 1.4394, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.44711538461538464, |
|
"grad_norm": 82.52824401855469, |
|
"learning_rate": 4.5528846153846157e-05, |
|
"loss": 1.4573, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.4967948717948718, |
|
"grad_norm": 19.148210525512695, |
|
"learning_rate": 4.503205128205128e-05, |
|
"loss": 1.2547, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.5464743589743589, |
|
"grad_norm": 46.984779357910156, |
|
"learning_rate": 4.453525641025642e-05, |
|
"loss": 1.3297, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.5961538461538461, |
|
"grad_norm": 45.50709915161133, |
|
"learning_rate": 4.403846153846154e-05, |
|
"loss": 1.3359, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.6458333333333334, |
|
"grad_norm": 36.45267105102539, |
|
"learning_rate": 4.354166666666667e-05, |
|
"loss": 1.2466, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.6955128205128205, |
|
"grad_norm": 20.8362979888916, |
|
"learning_rate": 4.30448717948718e-05, |
|
"loss": 1.1206, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.7451923076923077, |
|
"grad_norm": 64.35557556152344, |
|
"learning_rate": 4.2548076923076924e-05, |
|
"loss": 1.3676, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.7948717948717948, |
|
"grad_norm": 12.930615425109863, |
|
"learning_rate": 4.205128205128206e-05, |
|
"loss": 1.3871, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.844551282051282, |
|
"grad_norm": 51.6870002746582, |
|
"learning_rate": 4.1554487179487184e-05, |
|
"loss": 1.0038, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 0.8942307692307693, |
|
"grad_norm": 14.677131652832031, |
|
"learning_rate": 4.105769230769231e-05, |
|
"loss": 1.2283, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 0.9439102564102564, |
|
"grad_norm": 43.24152755737305, |
|
"learning_rate": 4.056089743589744e-05, |
|
"loss": 1.0872, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 0.9935897435897436, |
|
"grad_norm": 25.641794204711914, |
|
"learning_rate": 4.006410256410257e-05, |
|
"loss": 0.9991, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.0432692307692308, |
|
"grad_norm": 36.622703552246094, |
|
"learning_rate": 3.956730769230769e-05, |
|
"loss": 0.9941, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 1.092948717948718, |
|
"grad_norm": 17.249757766723633, |
|
"learning_rate": 3.9070512820512824e-05, |
|
"loss": 0.9877, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 1.142628205128205, |
|
"grad_norm": 31.591552734375, |
|
"learning_rate": 3.857371794871795e-05, |
|
"loss": 0.8917, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 1.1923076923076923, |
|
"grad_norm": 28.23312759399414, |
|
"learning_rate": 3.807692307692308e-05, |
|
"loss": 0.9264, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 1.2419871794871795, |
|
"grad_norm": 37.707183837890625, |
|
"learning_rate": 3.7580128205128204e-05, |
|
"loss": 0.849, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 1.2916666666666667, |
|
"grad_norm": 12.67361831665039, |
|
"learning_rate": 3.708333333333334e-05, |
|
"loss": 0.8255, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 1.3413461538461537, |
|
"grad_norm": 39.1461181640625, |
|
"learning_rate": 3.658653846153846e-05, |
|
"loss": 0.7372, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 1.391025641025641, |
|
"grad_norm": 33.27256393432617, |
|
"learning_rate": 3.608974358974359e-05, |
|
"loss": 0.8227, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 1.4407051282051282, |
|
"grad_norm": 545.9265747070312, |
|
"learning_rate": 3.559294871794872e-05, |
|
"loss": 0.7941, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 1.4903846153846154, |
|
"grad_norm": 23.55372428894043, |
|
"learning_rate": 3.5096153846153845e-05, |
|
"loss": 0.8686, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 1.5400641025641026, |
|
"grad_norm": 22.64493179321289, |
|
"learning_rate": 3.459935897435898e-05, |
|
"loss": 0.7059, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 1.5897435897435899, |
|
"grad_norm": 79.4326171875, |
|
"learning_rate": 3.4102564102564105e-05, |
|
"loss": 0.7567, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 1.6394230769230769, |
|
"grad_norm": 13.156254768371582, |
|
"learning_rate": 3.360576923076923e-05, |
|
"loss": 0.7138, |
|
"step": 1023 |
|
}, |
|
{ |
|
"epoch": 1.689102564102564, |
|
"grad_norm": 39.97475051879883, |
|
"learning_rate": 3.310897435897436e-05, |
|
"loss": 0.676, |
|
"step": 1054 |
|
}, |
|
{ |
|
"epoch": 1.7387820512820513, |
|
"grad_norm": 24.62538719177246, |
|
"learning_rate": 3.261217948717949e-05, |
|
"loss": 0.683, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 1.7884615384615383, |
|
"grad_norm": 18.064680099487305, |
|
"learning_rate": 3.211538461538462e-05, |
|
"loss": 0.5746, |
|
"step": 1116 |
|
}, |
|
{ |
|
"epoch": 1.8381410256410255, |
|
"grad_norm": 10.56497573852539, |
|
"learning_rate": 3.1618589743589746e-05, |
|
"loss": 0.5881, |
|
"step": 1147 |
|
}, |
|
{ |
|
"epoch": 1.8878205128205128, |
|
"grad_norm": 43.007911682128906, |
|
"learning_rate": 3.112179487179487e-05, |
|
"loss": 0.78, |
|
"step": 1178 |
|
}, |
|
{ |
|
"epoch": 1.9375, |
|
"grad_norm": 25.748680114746094, |
|
"learning_rate": 3.0625000000000006e-05, |
|
"loss": 0.592, |
|
"step": 1209 |
|
}, |
|
{ |
|
"epoch": 1.9871794871794872, |
|
"grad_norm": 20.511089324951172, |
|
"learning_rate": 3.012820512820513e-05, |
|
"loss": 0.6807, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 2.0368589743589745, |
|
"grad_norm": 26.275846481323242, |
|
"learning_rate": 2.963141025641026e-05, |
|
"loss": 0.6935, |
|
"step": 1271 |
|
}, |
|
{ |
|
"epoch": 2.0865384615384617, |
|
"grad_norm": 69.18566131591797, |
|
"learning_rate": 2.913461538461539e-05, |
|
"loss": 0.511, |
|
"step": 1302 |
|
}, |
|
{ |
|
"epoch": 2.136217948717949, |
|
"grad_norm": 4.620207786560059, |
|
"learning_rate": 2.8637820512820513e-05, |
|
"loss": 0.5338, |
|
"step": 1333 |
|
}, |
|
{ |
|
"epoch": 2.185897435897436, |
|
"grad_norm": 44.329566955566406, |
|
"learning_rate": 2.8141025641025643e-05, |
|
"loss": 0.4471, |
|
"step": 1364 |
|
}, |
|
{ |
|
"epoch": 2.235576923076923, |
|
"grad_norm": 24.95821762084961, |
|
"learning_rate": 2.7644230769230773e-05, |
|
"loss": 0.5295, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 2.28525641025641, |
|
"grad_norm": 32.0108757019043, |
|
"learning_rate": 2.7147435897435896e-05, |
|
"loss": 0.5075, |
|
"step": 1426 |
|
}, |
|
{ |
|
"epoch": 2.3349358974358974, |
|
"grad_norm": 15.610992431640625, |
|
"learning_rate": 2.6650641025641026e-05, |
|
"loss": 0.4898, |
|
"step": 1457 |
|
}, |
|
{ |
|
"epoch": 2.3846153846153846, |
|
"grad_norm": 6.399534225463867, |
|
"learning_rate": 2.6153846153846157e-05, |
|
"loss": 0.4273, |
|
"step": 1488 |
|
}, |
|
{ |
|
"epoch": 2.434294871794872, |
|
"grad_norm": 22.35305404663086, |
|
"learning_rate": 2.565705128205128e-05, |
|
"loss": 0.4826, |
|
"step": 1519 |
|
}, |
|
{ |
|
"epoch": 2.483974358974359, |
|
"grad_norm": 26.82170867919922, |
|
"learning_rate": 2.516025641025641e-05, |
|
"loss": 0.4472, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 2.5336538461538463, |
|
"grad_norm": 31.885419845581055, |
|
"learning_rate": 2.466346153846154e-05, |
|
"loss": 0.3986, |
|
"step": 1581 |
|
}, |
|
{ |
|
"epoch": 2.5833333333333335, |
|
"grad_norm": 21.020639419555664, |
|
"learning_rate": 2.4166666666666667e-05, |
|
"loss": 0.4856, |
|
"step": 1612 |
|
}, |
|
{ |
|
"epoch": 2.6330128205128203, |
|
"grad_norm": 10.991323471069336, |
|
"learning_rate": 2.3669871794871794e-05, |
|
"loss": 0.5008, |
|
"step": 1643 |
|
}, |
|
{ |
|
"epoch": 2.6826923076923075, |
|
"grad_norm": 17.077383041381836, |
|
"learning_rate": 2.3173076923076924e-05, |
|
"loss": 0.3585, |
|
"step": 1674 |
|
}, |
|
{ |
|
"epoch": 2.7323717948717947, |
|
"grad_norm": 6.274226188659668, |
|
"learning_rate": 2.2676282051282054e-05, |
|
"loss": 0.4101, |
|
"step": 1705 |
|
}, |
|
{ |
|
"epoch": 2.782051282051282, |
|
"grad_norm": 7.574007511138916, |
|
"learning_rate": 2.217948717948718e-05, |
|
"loss": 0.3604, |
|
"step": 1736 |
|
}, |
|
{ |
|
"epoch": 2.831730769230769, |
|
"grad_norm": 15.08122730255127, |
|
"learning_rate": 2.168269230769231e-05, |
|
"loss": 0.407, |
|
"step": 1767 |
|
}, |
|
{ |
|
"epoch": 2.8814102564102564, |
|
"grad_norm": 32.56257247924805, |
|
"learning_rate": 2.1185897435897437e-05, |
|
"loss": 0.4002, |
|
"step": 1798 |
|
}, |
|
{ |
|
"epoch": 2.9310897435897436, |
|
"grad_norm": 34.24089431762695, |
|
"learning_rate": 2.0689102564102564e-05, |
|
"loss": 0.429, |
|
"step": 1829 |
|
}, |
|
{ |
|
"epoch": 2.980769230769231, |
|
"grad_norm": 5.287622928619385, |
|
"learning_rate": 2.0192307692307694e-05, |
|
"loss": 0.352, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 3.030448717948718, |
|
"grad_norm": 33.49539566040039, |
|
"learning_rate": 1.969551282051282e-05, |
|
"loss": 0.3348, |
|
"step": 1891 |
|
}, |
|
{ |
|
"epoch": 3.0801282051282053, |
|
"grad_norm": 3.116619825363159, |
|
"learning_rate": 1.919871794871795e-05, |
|
"loss": 0.2613, |
|
"step": 1922 |
|
}, |
|
{ |
|
"epoch": 3.1298076923076925, |
|
"grad_norm": 5.825009346008301, |
|
"learning_rate": 1.8701923076923078e-05, |
|
"loss": 0.279, |
|
"step": 1953 |
|
}, |
|
{ |
|
"epoch": 3.1794871794871793, |
|
"grad_norm": 2.2447965145111084, |
|
"learning_rate": 1.8205128205128204e-05, |
|
"loss": 0.3405, |
|
"step": 1984 |
|
}, |
|
{ |
|
"epoch": 3.2291666666666665, |
|
"grad_norm": 6.297438144683838, |
|
"learning_rate": 1.7708333333333335e-05, |
|
"loss": 0.3294, |
|
"step": 2015 |
|
}, |
|
{ |
|
"epoch": 3.2788461538461537, |
|
"grad_norm": 17.42154884338379, |
|
"learning_rate": 1.721153846153846e-05, |
|
"loss": 0.296, |
|
"step": 2046 |
|
}, |
|
{ |
|
"epoch": 3.328525641025641, |
|
"grad_norm": 11.834112167358398, |
|
"learning_rate": 1.6714743589743588e-05, |
|
"loss": 0.3767, |
|
"step": 2077 |
|
}, |
|
{ |
|
"epoch": 3.378205128205128, |
|
"grad_norm": 2.2308290004730225, |
|
"learning_rate": 1.6217948717948718e-05, |
|
"loss": 0.2352, |
|
"step": 2108 |
|
}, |
|
{ |
|
"epoch": 3.4278846153846154, |
|
"grad_norm": 13.97610855102539, |
|
"learning_rate": 1.5721153846153845e-05, |
|
"loss": 0.2937, |
|
"step": 2139 |
|
}, |
|
{ |
|
"epoch": 3.4775641025641026, |
|
"grad_norm": 4.005247116088867, |
|
"learning_rate": 1.5224358974358973e-05, |
|
"loss": 0.283, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 3.52724358974359, |
|
"grad_norm": 5.180654525756836, |
|
"learning_rate": 1.4727564102564103e-05, |
|
"loss": 0.2773, |
|
"step": 2201 |
|
}, |
|
{ |
|
"epoch": 3.5769230769230766, |
|
"grad_norm": 6.487146854400635, |
|
"learning_rate": 1.423076923076923e-05, |
|
"loss": 0.2665, |
|
"step": 2232 |
|
}, |
|
{ |
|
"epoch": 3.626602564102564, |
|
"grad_norm": 2.202457904815674, |
|
"learning_rate": 1.373397435897436e-05, |
|
"loss": 0.2541, |
|
"step": 2263 |
|
}, |
|
{ |
|
"epoch": 3.676282051282051, |
|
"grad_norm": 47.195499420166016, |
|
"learning_rate": 1.3237179487179489e-05, |
|
"loss": 0.2362, |
|
"step": 2294 |
|
}, |
|
{ |
|
"epoch": 3.7259615384615383, |
|
"grad_norm": 21.26088523864746, |
|
"learning_rate": 1.2740384615384615e-05, |
|
"loss": 0.2331, |
|
"step": 2325 |
|
}, |
|
{ |
|
"epoch": 3.7756410256410255, |
|
"grad_norm": 16.77073860168457, |
|
"learning_rate": 1.2243589743589744e-05, |
|
"loss": 0.2134, |
|
"step": 2356 |
|
}, |
|
{ |
|
"epoch": 3.8253205128205128, |
|
"grad_norm": 2.246936559677124, |
|
"learning_rate": 1.1746794871794872e-05, |
|
"loss": 0.2437, |
|
"step": 2387 |
|
}, |
|
{ |
|
"epoch": 3.875, |
|
"grad_norm": 1.4183766841888428, |
|
"learning_rate": 1.125e-05, |
|
"loss": 0.2012, |
|
"step": 2418 |
|
}, |
|
{ |
|
"epoch": 3.9246794871794872, |
|
"grad_norm": 3.9523444175720215, |
|
"learning_rate": 1.0753205128205129e-05, |
|
"loss": 0.2204, |
|
"step": 2449 |
|
}, |
|
{ |
|
"epoch": 3.9743589743589745, |
|
"grad_norm": 3.564764976501465, |
|
"learning_rate": 1.0256410256410256e-05, |
|
"loss": 0.2034, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 4.024038461538462, |
|
"grad_norm": 1.7849295139312744, |
|
"learning_rate": 9.759615384615384e-06, |
|
"loss": 0.1819, |
|
"step": 2511 |
|
}, |
|
{ |
|
"epoch": 4.073717948717949, |
|
"grad_norm": 1.7893842458724976, |
|
"learning_rate": 9.262820512820514e-06, |
|
"loss": 0.1585, |
|
"step": 2542 |
|
}, |
|
{ |
|
"epoch": 4.123397435897436, |
|
"grad_norm": 3.4071810245513916, |
|
"learning_rate": 8.766025641025641e-06, |
|
"loss": 0.1596, |
|
"step": 2573 |
|
}, |
|
{ |
|
"epoch": 4.173076923076923, |
|
"grad_norm": 28.766258239746094, |
|
"learning_rate": 8.26923076923077e-06, |
|
"loss": 0.1563, |
|
"step": 2604 |
|
}, |
|
{ |
|
"epoch": 4.222756410256411, |
|
"grad_norm": 3.066229820251465, |
|
"learning_rate": 7.772435897435898e-06, |
|
"loss": 0.1612, |
|
"step": 2635 |
|
}, |
|
{ |
|
"epoch": 4.272435897435898, |
|
"grad_norm": 1.4539713859558105, |
|
"learning_rate": 7.275641025641026e-06, |
|
"loss": 0.151, |
|
"step": 2666 |
|
}, |
|
{ |
|
"epoch": 4.322115384615385, |
|
"grad_norm": 3.9048240184783936, |
|
"learning_rate": 6.778846153846154e-06, |
|
"loss": 0.1514, |
|
"step": 2697 |
|
}, |
|
{ |
|
"epoch": 4.371794871794872, |
|
"grad_norm": 1.2496715784072876, |
|
"learning_rate": 6.282051282051282e-06, |
|
"loss": 0.1632, |
|
"step": 2728 |
|
}, |
|
{ |
|
"epoch": 4.421474358974359, |
|
"grad_norm": 1.516411542892456, |
|
"learning_rate": 5.785256410256411e-06, |
|
"loss": 0.1497, |
|
"step": 2759 |
|
}, |
|
{ |
|
"epoch": 4.471153846153846, |
|
"grad_norm": 2.217137575149536, |
|
"learning_rate": 5.288461538461538e-06, |
|
"loss": 0.1514, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 4.520833333333333, |
|
"grad_norm": 1.7278540134429932, |
|
"learning_rate": 4.791666666666667e-06, |
|
"loss": 0.1409, |
|
"step": 2821 |
|
}, |
|
{ |
|
"epoch": 4.57051282051282, |
|
"grad_norm": 2.3950905799865723, |
|
"learning_rate": 4.294871794871795e-06, |
|
"loss": 0.1524, |
|
"step": 2852 |
|
}, |
|
{ |
|
"epoch": 4.6201923076923075, |
|
"grad_norm": 2.2806148529052734, |
|
"learning_rate": 3.798076923076923e-06, |
|
"loss": 0.1498, |
|
"step": 2883 |
|
}, |
|
{ |
|
"epoch": 4.669871794871795, |
|
"grad_norm": 2.3029630184173584, |
|
"learning_rate": 3.3012820512820517e-06, |
|
"loss": 0.1471, |
|
"step": 2914 |
|
}, |
|
{ |
|
"epoch": 4.719551282051282, |
|
"grad_norm": 1.592606782913208, |
|
"learning_rate": 2.8044871794871797e-06, |
|
"loss": 0.14, |
|
"step": 2945 |
|
}, |
|
{ |
|
"epoch": 4.769230769230769, |
|
"grad_norm": 3.534292221069336, |
|
"learning_rate": 2.307692307692308e-06, |
|
"loss": 0.1426, |
|
"step": 2976 |
|
}, |
|
{ |
|
"epoch": 4.818910256410256, |
|
"grad_norm": 2.344146966934204, |
|
"learning_rate": 1.810897435897436e-06, |
|
"loss": 0.1399, |
|
"step": 3007 |
|
}, |
|
{ |
|
"epoch": 4.868589743589744, |
|
"grad_norm": 11.425804138183594, |
|
"learning_rate": 1.3141025641025643e-06, |
|
"loss": 0.1374, |
|
"step": 3038 |
|
}, |
|
{ |
|
"epoch": 4.918269230769231, |
|
"grad_norm": 2.704406499862671, |
|
"learning_rate": 8.173076923076924e-07, |
|
"loss": 0.1389, |
|
"step": 3069 |
|
}, |
|
{ |
|
"epoch": 4.967948717948718, |
|
"grad_norm": 6.8166728019714355, |
|
"learning_rate": 3.205128205128205e-07, |
|
"loss": 0.1721, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"step": 3120, |
|
"total_flos": 1.845867535870722e+19, |
|
"train_loss": 0.6135548925552613, |
|
"train_runtime": 6226.8155, |
|
"train_samples_per_second": 2.003, |
|
"train_steps_per_second": 0.501 |
|
} |
|
], |
|
"logging_steps": 31, |
|
"max_steps": 3120, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 5, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.845867535870722e+19, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|