|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 10.0, |
|
"eval_steps": 500, |
|
"global_step": 2080, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.004807692307692308, |
|
"grad_norm": 416.0, |
|
"learning_rate": 9.615384615384617e-07, |
|
"loss": 51.8221, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.02403846153846154, |
|
"grad_norm": 386.0, |
|
"learning_rate": 4.807692307692308e-06, |
|
"loss": 49.6687, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.04807692307692308, |
|
"grad_norm": 202.0, |
|
"learning_rate": 9.615384615384616e-06, |
|
"loss": 45.9328, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.07211538461538461, |
|
"grad_norm": 126.5, |
|
"learning_rate": 1.4423076923076923e-05, |
|
"loss": 37.1137, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.09615384615384616, |
|
"grad_norm": 49.75, |
|
"learning_rate": 1.923076923076923e-05, |
|
"loss": 29.2088, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.1201923076923077, |
|
"grad_norm": 20.125, |
|
"learning_rate": 2.4038461538461542e-05, |
|
"loss": 25.4723, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.14423076923076922, |
|
"grad_norm": 15.9375, |
|
"learning_rate": 2.8846153846153845e-05, |
|
"loss": 23.5413, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.16826923076923078, |
|
"grad_norm": 9.1875, |
|
"learning_rate": 3.365384615384616e-05, |
|
"loss": 21.2424, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.19230769230769232, |
|
"grad_norm": 4.8125, |
|
"learning_rate": 3.846153846153846e-05, |
|
"loss": 20.2839, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.21634615384615385, |
|
"grad_norm": 4.59375, |
|
"learning_rate": 4.326923076923077e-05, |
|
"loss": 19.3389, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.2403846153846154, |
|
"grad_norm": 6.40625, |
|
"learning_rate": 4.8076923076923084e-05, |
|
"loss": 18.9421, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.2644230769230769, |
|
"grad_norm": 8.4375, |
|
"learning_rate": 5.288461538461539e-05, |
|
"loss": 18.0351, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.28846153846153844, |
|
"grad_norm": 13.875, |
|
"learning_rate": 5.769230769230769e-05, |
|
"loss": 17.0595, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.3125, |
|
"grad_norm": 23.625, |
|
"learning_rate": 6.25e-05, |
|
"loss": 14.7782, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.33653846153846156, |
|
"grad_norm": 30.75, |
|
"learning_rate": 6.730769230769232e-05, |
|
"loss": 10.9389, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.3605769230769231, |
|
"grad_norm": 24.625, |
|
"learning_rate": 7.211538461538462e-05, |
|
"loss": 5.5696, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.38461538461538464, |
|
"grad_norm": 6.34375, |
|
"learning_rate": 7.692307692307693e-05, |
|
"loss": 2.548, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.40865384615384615, |
|
"grad_norm": 5.71875, |
|
"learning_rate": 8.173076923076923e-05, |
|
"loss": 2.0589, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.4326923076923077, |
|
"grad_norm": 3.03125, |
|
"learning_rate": 8.653846153846155e-05, |
|
"loss": 1.8063, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.4567307692307692, |
|
"grad_norm": 1.0390625, |
|
"learning_rate": 9.134615384615385e-05, |
|
"loss": 1.6319, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.4807692307692308, |
|
"grad_norm": 1.28125, |
|
"learning_rate": 9.615384615384617e-05, |
|
"loss": 1.502, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.5048076923076923, |
|
"grad_norm": 0.65234375, |
|
"learning_rate": 0.00010096153846153846, |
|
"loss": 1.4262, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.5288461538461539, |
|
"grad_norm": 2.625, |
|
"learning_rate": 0.00010576923076923077, |
|
"loss": 1.3708, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.5528846153846154, |
|
"grad_norm": 0.95703125, |
|
"learning_rate": 0.00011057692307692309, |
|
"loss": 1.309, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.5769230769230769, |
|
"grad_norm": 1.0546875, |
|
"learning_rate": 0.00011538461538461538, |
|
"loss": 1.2806, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.6009615384615384, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 0.0001201923076923077, |
|
"loss": 1.2449, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.625, |
|
"grad_norm": 0.6484375, |
|
"learning_rate": 0.000125, |
|
"loss": 1.2165, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.6490384615384616, |
|
"grad_norm": 0.828125, |
|
"learning_rate": 0.0001298076923076923, |
|
"loss": 1.1992, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.6730769230769231, |
|
"grad_norm": 2.328125, |
|
"learning_rate": 0.00013461538461538464, |
|
"loss": 1.1854, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.6971153846153846, |
|
"grad_norm": 1.5078125, |
|
"learning_rate": 0.0001394230769230769, |
|
"loss": 1.1787, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.7211538461538461, |
|
"grad_norm": 2.6875, |
|
"learning_rate": 0.00014423076923076924, |
|
"loss": 1.1479, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.7451923076923077, |
|
"grad_norm": 1.8828125, |
|
"learning_rate": 0.00014903846153846155, |
|
"loss": 1.1501, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.7692307692307693, |
|
"grad_norm": 2.25, |
|
"learning_rate": 0.00015384615384615385, |
|
"loss": 1.121, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.7932692307692307, |
|
"grad_norm": 3.0625, |
|
"learning_rate": 0.00015865384615384616, |
|
"loss": 1.1039, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.8173076923076923, |
|
"grad_norm": 0.76953125, |
|
"learning_rate": 0.00016346153846153846, |
|
"loss": 1.1093, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.8413461538461539, |
|
"grad_norm": 2.4375, |
|
"learning_rate": 0.0001682692307692308, |
|
"loss": 1.1086, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.8653846153846154, |
|
"grad_norm": 3.765625, |
|
"learning_rate": 0.0001730769230769231, |
|
"loss": 1.1016, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.8894230769230769, |
|
"grad_norm": 5.8125, |
|
"learning_rate": 0.00017788461538461537, |
|
"loss": 1.0919, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.9134615384615384, |
|
"grad_norm": 0.95703125, |
|
"learning_rate": 0.0001826923076923077, |
|
"loss": 1.0958, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.9375, |
|
"grad_norm": 3.765625, |
|
"learning_rate": 0.0001875, |
|
"loss": 1.0871, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.9615384615384616, |
|
"grad_norm": 3.328125, |
|
"learning_rate": 0.00019230769230769233, |
|
"loss": 1.072, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.9855769230769231, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 0.00019711538461538464, |
|
"loss": 1.067, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 2.5660059452056885, |
|
"eval_runtime": 0.2581, |
|
"eval_samples_per_second": 38.744, |
|
"eval_steps_per_second": 3.874, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 1.0096153846153846, |
|
"grad_norm": 2.90625, |
|
"learning_rate": 0.0001999994367286727, |
|
"loss": 1.0461, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 1.0336538461538463, |
|
"grad_norm": 2.53125, |
|
"learning_rate": 0.00019999309999911409, |
|
"loss": 1.0479, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 1.0576923076923077, |
|
"grad_norm": 1.1875, |
|
"learning_rate": 0.00019997972289848503, |
|
"loss": 1.0469, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 1.0817307692307692, |
|
"grad_norm": 1.5703125, |
|
"learning_rate": 0.00019995930636864802, |
|
"loss": 1.0353, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 1.1057692307692308, |
|
"grad_norm": 0.80078125, |
|
"learning_rate": 0.00019993185184710165, |
|
"loss": 1.0479, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 1.1298076923076923, |
|
"grad_norm": 13.0625, |
|
"learning_rate": 0.00019989736126687963, |
|
"loss": 1.0327, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 1.1538461538461537, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 0.00019985583705641418, |
|
"loss": 1.0344, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 1.1778846153846154, |
|
"grad_norm": 4.5625, |
|
"learning_rate": 0.00019980728213936567, |
|
"loss": 1.0178, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 1.2019230769230769, |
|
"grad_norm": 1.6640625, |
|
"learning_rate": 0.00019975169993441627, |
|
"loss": 1.0139, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 1.2259615384615385, |
|
"grad_norm": 2.6875, |
|
"learning_rate": 0.0001996890943550295, |
|
"loss": 0.9975, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 2.375, |
|
"learning_rate": 0.00019961946980917456, |
|
"loss": 0.9845, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 1.2740384615384617, |
|
"grad_norm": 5.25, |
|
"learning_rate": 0.00019954283119901615, |
|
"loss": 0.9965, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 1.2980769230769231, |
|
"grad_norm": 1.8125, |
|
"learning_rate": 0.0001994591839205691, |
|
"loss": 0.9998, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 1.3221153846153846, |
|
"grad_norm": 2.15625, |
|
"learning_rate": 0.00019936853386331858, |
|
"loss": 0.9872, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 1.3461538461538463, |
|
"grad_norm": 0.8671875, |
|
"learning_rate": 0.0001992708874098054, |
|
"loss": 0.9926, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 1.3701923076923077, |
|
"grad_norm": 2.625, |
|
"learning_rate": 0.00019916625143517663, |
|
"loss": 0.9819, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 1.3942307692307692, |
|
"grad_norm": 0.98828125, |
|
"learning_rate": 0.00019905463330670143, |
|
"loss": 0.9828, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 1.4182692307692308, |
|
"grad_norm": 4.9375, |
|
"learning_rate": 0.00019893604088325257, |
|
"loss": 0.9768, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 1.4423076923076923, |
|
"grad_norm": 1.7734375, |
|
"learning_rate": 0.0001988104825147528, |
|
"loss": 0.9772, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 1.4663461538461537, |
|
"grad_norm": 4.0, |
|
"learning_rate": 0.0001986779670415872, |
|
"loss": 0.9754, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 1.4903846153846154, |
|
"grad_norm": 3.25, |
|
"learning_rate": 0.0001985385037939806, |
|
"loss": 0.9712, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 1.5144230769230769, |
|
"grad_norm": 2.375, |
|
"learning_rate": 0.00019839210259134062, |
|
"loss": 0.9726, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 1.5384615384615383, |
|
"grad_norm": 1.484375, |
|
"learning_rate": 0.00019823877374156647, |
|
"loss": 0.954, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 1.5625, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 0.00019807852804032305, |
|
"loss": 0.9547, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 1.5865384615384617, |
|
"grad_norm": 1.328125, |
|
"learning_rate": 0.00019791137677028082, |
|
"loss": 0.9552, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 1.6105769230769231, |
|
"grad_norm": 2.109375, |
|
"learning_rate": 0.0001977373317003215, |
|
"loss": 0.9472, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 1.6346153846153846, |
|
"grad_norm": 3.6875, |
|
"learning_rate": 0.00019755640508470942, |
|
"loss": 0.9488, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 1.6586538461538463, |
|
"grad_norm": 1.6171875, |
|
"learning_rate": 0.0001973686096622286, |
|
"loss": 0.9445, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 1.6826923076923077, |
|
"grad_norm": 5.40625, |
|
"learning_rate": 0.00019717395865528602, |
|
"loss": 0.9428, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 1.7067307692307692, |
|
"grad_norm": 2.40625, |
|
"learning_rate": 0.0001969724657689805, |
|
"loss": 0.9394, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 1.7307692307692308, |
|
"grad_norm": 0.9921875, |
|
"learning_rate": 0.00019676414519013781, |
|
"loss": 0.9524, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.7548076923076923, |
|
"grad_norm": 1.7265625, |
|
"learning_rate": 0.00019654901158631182, |
|
"loss": 0.9384, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 1.7788461538461537, |
|
"grad_norm": 2.5625, |
|
"learning_rate": 0.00019632708010475165, |
|
"loss": 0.9354, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 1.8028846153846154, |
|
"grad_norm": 2.078125, |
|
"learning_rate": 0.0001960983663713353, |
|
"loss": 0.9314, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 1.8269230769230769, |
|
"grad_norm": 2.53125, |
|
"learning_rate": 0.00019586288648946947, |
|
"loss": 0.9466, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 1.8509615384615383, |
|
"grad_norm": 9.25, |
|
"learning_rate": 0.0001956206570389556, |
|
"loss": 0.947, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 1.875, |
|
"grad_norm": 1.890625, |
|
"learning_rate": 0.0001953716950748227, |
|
"loss": 0.9377, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.8990384615384617, |
|
"grad_norm": 0.734375, |
|
"learning_rate": 0.0001951160181261263, |
|
"loss": 0.9395, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 1.9230769230769231, |
|
"grad_norm": 1.8515625, |
|
"learning_rate": 0.00019485364419471454, |
|
"loss": 0.9244, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.9471153846153846, |
|
"grad_norm": 1.25, |
|
"learning_rate": 0.00019458459175396043, |
|
"loss": 0.9304, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 1.9711538461538463, |
|
"grad_norm": 2.34375, |
|
"learning_rate": 0.0001943088797474612, |
|
"loss": 0.9328, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 1.9951923076923077, |
|
"grad_norm": 8.625, |
|
"learning_rate": 0.00019402652758770475, |
|
"loss": 0.9432, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_loss": 2.447457790374756, |
|
"eval_runtime": 0.2388, |
|
"eval_samples_per_second": 41.883, |
|
"eval_steps_per_second": 4.188, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 2.019230769230769, |
|
"grad_norm": 7.15625, |
|
"learning_rate": 0.00019373755515470254, |
|
"loss": 0.926, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 2.043269230769231, |
|
"grad_norm": 2.5, |
|
"learning_rate": 0.00019344198279459, |
|
"loss": 0.9221, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 2.0673076923076925, |
|
"grad_norm": 1.0703125, |
|
"learning_rate": 0.00019313983131819407, |
|
"loss": 0.9142, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 2.0913461538461537, |
|
"grad_norm": 2.359375, |
|
"learning_rate": 0.00019283112199956775, |
|
"loss": 0.9251, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 2.1153846153846154, |
|
"grad_norm": 1.2265625, |
|
"learning_rate": 0.00019251587657449236, |
|
"loss": 0.9006, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 2.139423076923077, |
|
"grad_norm": 0.98046875, |
|
"learning_rate": 0.00019219411723894717, |
|
"loss": 0.9073, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 2.1634615384615383, |
|
"grad_norm": 4.9375, |
|
"learning_rate": 0.0001918658666475465, |
|
"loss": 0.9052, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 2.1875, |
|
"grad_norm": 4.125, |
|
"learning_rate": 0.00019153114791194473, |
|
"loss": 0.9128, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 2.2115384615384617, |
|
"grad_norm": 1.1953125, |
|
"learning_rate": 0.00019118998459920902, |
|
"loss": 0.914, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 2.235576923076923, |
|
"grad_norm": 7.84375, |
|
"learning_rate": 0.00019084240073015998, |
|
"loss": 0.9143, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 2.2596153846153846, |
|
"grad_norm": 2.53125, |
|
"learning_rate": 0.0001904884207776804, |
|
"loss": 0.9272, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 2.2836538461538463, |
|
"grad_norm": 0.6796875, |
|
"learning_rate": 0.00019012806966499217, |
|
"loss": 0.9118, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 2.3076923076923075, |
|
"grad_norm": 2.8125, |
|
"learning_rate": 0.0001897613727639014, |
|
"loss": 0.935, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 2.331730769230769, |
|
"grad_norm": 2.71875, |
|
"learning_rate": 0.00018938835589301216, |
|
"loss": 0.9075, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 2.355769230769231, |
|
"grad_norm": 3.203125, |
|
"learning_rate": 0.00018900904531590846, |
|
"loss": 0.8945, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 2.3798076923076925, |
|
"grad_norm": 1.5859375, |
|
"learning_rate": 0.00018862346773930523, |
|
"loss": 0.8932, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 2.4038461538461537, |
|
"grad_norm": 2.609375, |
|
"learning_rate": 0.0001882316503111678, |
|
"loss": 0.8981, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 2.4278846153846154, |
|
"grad_norm": 1.921875, |
|
"learning_rate": 0.00018783362061880062, |
|
"loss": 0.895, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 2.451923076923077, |
|
"grad_norm": 7.25, |
|
"learning_rate": 0.00018742940668690464, |
|
"loss": 0.8995, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 2.4759615384615383, |
|
"grad_norm": 7.90625, |
|
"learning_rate": 0.00018701903697560437, |
|
"loss": 0.8864, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"grad_norm": 4.75, |
|
"learning_rate": 0.00018660254037844388, |
|
"loss": 0.8893, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 2.5240384615384617, |
|
"grad_norm": 13.375, |
|
"learning_rate": 0.00018617994622035253, |
|
"loss": 0.9098, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 2.5480769230769234, |
|
"grad_norm": 1.2265625, |
|
"learning_rate": 0.00018575128425558023, |
|
"loss": 0.9133, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 2.5721153846153846, |
|
"grad_norm": 0.90625, |
|
"learning_rate": 0.00018531658466560246, |
|
"loss": 0.9042, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 2.5961538461538463, |
|
"grad_norm": 1.8125, |
|
"learning_rate": 0.00018487587805699526, |
|
"loss": 0.8994, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 2.6201923076923075, |
|
"grad_norm": 1.21875, |
|
"learning_rate": 0.00018442919545928022, |
|
"loss": 0.8857, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 2.644230769230769, |
|
"grad_norm": 1.703125, |
|
"learning_rate": 0.0001839765683227398, |
|
"loss": 0.8794, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 2.668269230769231, |
|
"grad_norm": 1.078125, |
|
"learning_rate": 0.00018351802851620294, |
|
"loss": 0.8803, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 2.6923076923076925, |
|
"grad_norm": 2.625, |
|
"learning_rate": 0.00018305360832480117, |
|
"loss": 0.8835, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 2.7163461538461537, |
|
"grad_norm": 1.1640625, |
|
"learning_rate": 0.00018258334044769558, |
|
"loss": 0.881, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 2.7403846153846154, |
|
"grad_norm": 10.125, |
|
"learning_rate": 0.00018210725799577439, |
|
"loss": 0.8897, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 2.7644230769230766, |
|
"grad_norm": 8.0625, |
|
"learning_rate": 0.00018162539448932164, |
|
"loss": 0.8743, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 2.7884615384615383, |
|
"grad_norm": 1.5546875, |
|
"learning_rate": 0.00018113778385565733, |
|
"loss": 0.8811, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 2.8125, |
|
"grad_norm": 1.5078125, |
|
"learning_rate": 0.00018064446042674828, |
|
"loss": 0.887, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 2.8365384615384617, |
|
"grad_norm": 2.578125, |
|
"learning_rate": 0.00018014545893679115, |
|
"loss": 0.8917, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 2.8605769230769234, |
|
"grad_norm": 1.6640625, |
|
"learning_rate": 0.00017964081451976672, |
|
"loss": 0.8733, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 2.8846153846153846, |
|
"grad_norm": 2.890625, |
|
"learning_rate": 0.0001791305627069662, |
|
"loss": 0.9015, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 2.9086538461538463, |
|
"grad_norm": 2.75, |
|
"learning_rate": 0.00017861473942448953, |
|
"loss": 0.8923, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 2.9326923076923075, |
|
"grad_norm": 17.125, |
|
"learning_rate": 0.00017809338099071577, |
|
"loss": 0.8995, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 2.956730769230769, |
|
"grad_norm": 2.390625, |
|
"learning_rate": 0.00017756652411374618, |
|
"loss": 0.9079, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 2.980769230769231, |
|
"grad_norm": 2.234375, |
|
"learning_rate": 0.00017703420588881946, |
|
"loss": 0.9222, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_loss": 2.442279815673828, |
|
"eval_runtime": 0.238, |
|
"eval_samples_per_second": 42.025, |
|
"eval_steps_per_second": 4.202, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 3.0048076923076925, |
|
"grad_norm": 7.46875, |
|
"learning_rate": 0.0001764964637957, |
|
"loss": 0.896, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 3.0288461538461537, |
|
"grad_norm": 2.140625, |
|
"learning_rate": 0.0001759533356960391, |
|
"loss": 0.888, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 3.0528846153846154, |
|
"grad_norm": 0.8984375, |
|
"learning_rate": 0.00017540485983070887, |
|
"loss": 0.8629, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 3.076923076923077, |
|
"grad_norm": 2.75, |
|
"learning_rate": 0.00017485107481711012, |
|
"loss": 0.8652, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 3.1009615384615383, |
|
"grad_norm": 0.90234375, |
|
"learning_rate": 0.00017429201964645313, |
|
"loss": 0.8815, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 3.125, |
|
"grad_norm": 15.375, |
|
"learning_rate": 0.0001737277336810124, |
|
"loss": 0.8888, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 3.1490384615384617, |
|
"grad_norm": 2.78125, |
|
"learning_rate": 0.00017315825665135522, |
|
"loss": 0.8722, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 3.173076923076923, |
|
"grad_norm": 1.0, |
|
"learning_rate": 0.00017258362865354426, |
|
"loss": 0.8506, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 3.1971153846153846, |
|
"grad_norm": 1.3359375, |
|
"learning_rate": 0.00017200389014631446, |
|
"loss": 0.8684, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 3.2211538461538463, |
|
"grad_norm": 5.84375, |
|
"learning_rate": 0.00017141908194822446, |
|
"loss": 0.8432, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 3.2451923076923075, |
|
"grad_norm": 0.8046875, |
|
"learning_rate": 0.00017082924523478262, |
|
"loss": 0.8466, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 3.269230769230769, |
|
"grad_norm": 9.75, |
|
"learning_rate": 0.00017023442153554777, |
|
"loss": 0.8492, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 3.293269230769231, |
|
"grad_norm": 1.1171875, |
|
"learning_rate": 0.0001696346527312053, |
|
"loss": 0.8435, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 3.3173076923076925, |
|
"grad_norm": 5.28125, |
|
"learning_rate": 0.00016902998105061844, |
|
"loss": 0.8578, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 3.3413461538461537, |
|
"grad_norm": 0.95703125, |
|
"learning_rate": 0.00016842044906785483, |
|
"loss": 0.8517, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 3.3653846153846154, |
|
"grad_norm": 2.21875, |
|
"learning_rate": 0.0001678060996991891, |
|
"loss": 0.87, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 3.389423076923077, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 0.0001671869762000811, |
|
"loss": 0.8582, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 3.4134615384615383, |
|
"grad_norm": 19.625, |
|
"learning_rate": 0.00016656312216213034, |
|
"loss": 0.8727, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 3.4375, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 0.00016593458151000688, |
|
"loss": 0.878, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 3.4615384615384617, |
|
"grad_norm": 1.0390625, |
|
"learning_rate": 0.0001653013984983585, |
|
"loss": 0.851, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 3.485576923076923, |
|
"grad_norm": 4.0625, |
|
"learning_rate": 0.00016466361770869494, |
|
"loss": 0.8602, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 3.5096153846153846, |
|
"grad_norm": 1.2734375, |
|
"learning_rate": 0.00016402128404624882, |
|
"loss": 0.8497, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 3.5336538461538463, |
|
"grad_norm": 15.0, |
|
"learning_rate": 0.00016337444273681407, |
|
"loss": 0.8458, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 3.5576923076923075, |
|
"grad_norm": 2.53125, |
|
"learning_rate": 0.00016272313932356162, |
|
"loss": 0.8353, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 3.581730769230769, |
|
"grad_norm": 2.125, |
|
"learning_rate": 0.0001620674196638327, |
|
"loss": 0.8244, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 3.605769230769231, |
|
"grad_norm": 11.625, |
|
"learning_rate": 0.0001614073299259101, |
|
"loss": 0.8282, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 3.6298076923076925, |
|
"grad_norm": 12.4375, |
|
"learning_rate": 0.00016074291658576772, |
|
"loss": 0.8396, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 3.6538461538461537, |
|
"grad_norm": 17.25, |
|
"learning_rate": 0.0001600742264237979, |
|
"loss": 0.8315, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 3.6778846153846154, |
|
"grad_norm": 3.046875, |
|
"learning_rate": 0.00015940130652151803, |
|
"loss": 0.8336, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 3.7019230769230766, |
|
"grad_norm": 14.4375, |
|
"learning_rate": 0.0001587242042582554, |
|
"loss": 0.8239, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 3.7259615384615383, |
|
"grad_norm": 0.703125, |
|
"learning_rate": 0.00015804296730781135, |
|
"loss": 0.8185, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"grad_norm": 1.9296875, |
|
"learning_rate": 0.0001573576436351046, |
|
"loss": 0.8416, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 3.7740384615384617, |
|
"grad_norm": 1.2578125, |
|
"learning_rate": 0.0001566682814927943, |
|
"loss": 0.8347, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 3.7980769230769234, |
|
"grad_norm": 2.5625, |
|
"learning_rate": 0.00015597492941788222, |
|
"loss": 0.8497, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 3.8221153846153846, |
|
"grad_norm": 10.1875, |
|
"learning_rate": 0.00015527763622829577, |
|
"loss": 0.8309, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 3.8461538461538463, |
|
"grad_norm": 1.7890625, |
|
"learning_rate": 0.00015457645101945046, |
|
"loss": 0.8141, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 3.8701923076923075, |
|
"grad_norm": 1.25, |
|
"learning_rate": 0.00015387142316079347, |
|
"loss": 0.8212, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 3.894230769230769, |
|
"grad_norm": 1.6484375, |
|
"learning_rate": 0.00015316260229232727, |
|
"loss": 0.8197, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 3.918269230769231, |
|
"grad_norm": 4.25, |
|
"learning_rate": 0.00015245003832111483, |
|
"loss": 0.8395, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 3.9423076923076925, |
|
"grad_norm": 4.09375, |
|
"learning_rate": 0.00015173378141776568, |
|
"loss": 0.8238, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 3.9663461538461537, |
|
"grad_norm": 2.390625, |
|
"learning_rate": 0.0001510138820129033, |
|
"loss": 0.8065, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 3.9903846153846154, |
|
"grad_norm": 7.375, |
|
"learning_rate": 0.00015029039079361448, |
|
"loss": 0.8069, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_loss": 2.4326889514923096, |
|
"eval_runtime": 0.2383, |
|
"eval_samples_per_second": 41.965, |
|
"eval_steps_per_second": 4.196, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 4.014423076923077, |
|
"grad_norm": 12.3125, |
|
"learning_rate": 0.0001495633586998807, |
|
"loss": 0.8046, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 4.038461538461538, |
|
"grad_norm": 10.3125, |
|
"learning_rate": 0.00014883283692099112, |
|
"loss": 0.8045, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 4.0625, |
|
"grad_norm": 0.6640625, |
|
"learning_rate": 0.00014809887689193877, |
|
"loss": 0.8012, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 4.086538461538462, |
|
"grad_norm": 0.79296875, |
|
"learning_rate": 0.00014736153028979893, |
|
"loss": 0.7927, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 4.110576923076923, |
|
"grad_norm": 1.359375, |
|
"learning_rate": 0.00014662084903009065, |
|
"loss": 0.8012, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 4.134615384615385, |
|
"grad_norm": 1.03125, |
|
"learning_rate": 0.00014587688526312143, |
|
"loss": 0.7909, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 4.158653846153846, |
|
"grad_norm": 0.69921875, |
|
"learning_rate": 0.00014512969137031538, |
|
"loss": 0.7764, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 4.1826923076923075, |
|
"grad_norm": 2.828125, |
|
"learning_rate": 0.00014437931996052518, |
|
"loss": 0.7747, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 4.206730769230769, |
|
"grad_norm": 1.203125, |
|
"learning_rate": 0.00014362582386632798, |
|
"loss": 0.7841, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 4.230769230769231, |
|
"grad_norm": 0.72265625, |
|
"learning_rate": 0.00014286925614030542, |
|
"loss": 0.7827, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 4.2548076923076925, |
|
"grad_norm": 1.046875, |
|
"learning_rate": 0.00014210967005130837, |
|
"loss": 0.7824, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 4.278846153846154, |
|
"grad_norm": 0.7578125, |
|
"learning_rate": 0.00014134711908070631, |
|
"loss": 0.7813, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 4.302884615384615, |
|
"grad_norm": 1.0703125, |
|
"learning_rate": 0.00014058165691862174, |
|
"loss": 0.7777, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 4.326923076923077, |
|
"grad_norm": 0.453125, |
|
"learning_rate": 0.0001398133374601501, |
|
"loss": 0.7812, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 4.350961538461538, |
|
"grad_norm": 1.1796875, |
|
"learning_rate": 0.0001390422148015649, |
|
"loss": 0.7779, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 4.375, |
|
"grad_norm": 4.90625, |
|
"learning_rate": 0.000138268343236509, |
|
"loss": 0.7778, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 4.399038461538462, |
|
"grad_norm": 0.435546875, |
|
"learning_rate": 0.00013749177725217175, |
|
"loss": 0.7731, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 4.423076923076923, |
|
"grad_norm": 0.63671875, |
|
"learning_rate": 0.00013671257152545277, |
|
"loss": 0.7759, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 4.447115384615385, |
|
"grad_norm": 0.57421875, |
|
"learning_rate": 0.00013593078091911218, |
|
"loss": 0.7759, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 4.471153846153846, |
|
"grad_norm": 0.6640625, |
|
"learning_rate": 0.00013514646047790775, |
|
"loss": 0.7716, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 4.4951923076923075, |
|
"grad_norm": 0.4921875, |
|
"learning_rate": 0.00013435966542471928, |
|
"loss": 0.7764, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 4.519230769230769, |
|
"grad_norm": 0.49609375, |
|
"learning_rate": 0.0001335704511566605, |
|
"loss": 0.7836, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 4.543269230769231, |
|
"grad_norm": 0.48046875, |
|
"learning_rate": 0.00013277887324117857, |
|
"loss": 0.7786, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 4.5673076923076925, |
|
"grad_norm": 0.5078125, |
|
"learning_rate": 0.00013198498741214166, |
|
"loss": 0.7694, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 4.591346153846154, |
|
"grad_norm": 0.48828125, |
|
"learning_rate": 0.0001311888495659149, |
|
"loss": 0.773, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 4.615384615384615, |
|
"grad_norm": 0.55078125, |
|
"learning_rate": 0.0001303905157574247, |
|
"loss": 0.77, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 4.639423076923077, |
|
"grad_norm": 0.4765625, |
|
"learning_rate": 0.00012959004219621195, |
|
"loss": 0.7649, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 4.663461538461538, |
|
"grad_norm": 0.494140625, |
|
"learning_rate": 0.00012878748524247462, |
|
"loss": 0.7749, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 4.6875, |
|
"grad_norm": 0.45703125, |
|
"learning_rate": 0.00012798290140309923, |
|
"loss": 0.7666, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 4.711538461538462, |
|
"grad_norm": 0.4296875, |
|
"learning_rate": 0.00012717634732768243, |
|
"loss": 0.7723, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 4.735576923076923, |
|
"grad_norm": 0.44921875, |
|
"learning_rate": 0.0001263678798045425, |
|
"loss": 0.7713, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 4.759615384615385, |
|
"grad_norm": 0.376953125, |
|
"learning_rate": 0.0001255575557567207, |
|
"loss": 0.7692, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 4.783653846153846, |
|
"grad_norm": 0.416015625, |
|
"learning_rate": 0.00012474543223797352, |
|
"loss": 0.7608, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 4.8076923076923075, |
|
"grad_norm": 0.466796875, |
|
"learning_rate": 0.0001239315664287558, |
|
"loss": 0.7612, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 4.831730769230769, |
|
"grad_norm": 0.443359375, |
|
"learning_rate": 0.00012311601563219434, |
|
"loss": 0.7632, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 4.855769230769231, |
|
"grad_norm": 0.7109375, |
|
"learning_rate": 0.00012229883727005365, |
|
"loss": 0.7703, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 4.8798076923076925, |
|
"grad_norm": 0.439453125, |
|
"learning_rate": 0.00012148008887869269, |
|
"loss": 0.7666, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 4.903846153846154, |
|
"grad_norm": 0.404296875, |
|
"learning_rate": 0.00012065982810501404, |
|
"loss": 0.7706, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 4.927884615384615, |
|
"grad_norm": 0.423828125, |
|
"learning_rate": 0.00011983811270240484, |
|
"loss": 0.7655, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 4.951923076923077, |
|
"grad_norm": 0.59375, |
|
"learning_rate": 0.00011901500052667068, |
|
"loss": 0.7597, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 4.975961538461538, |
|
"grad_norm": 0.455078125, |
|
"learning_rate": 0.00011819054953196186, |
|
"loss": 0.7652, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"grad_norm": 1.0625, |
|
"learning_rate": 0.00011736481776669306, |
|
"loss": 0.7635, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"eval_loss": 2.423292875289917, |
|
"eval_runtime": 0.2377, |
|
"eval_samples_per_second": 42.07, |
|
"eval_steps_per_second": 4.207, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 5.024038461538462, |
|
"grad_norm": 0.96484375, |
|
"learning_rate": 0.00011653786336945614, |
|
"loss": 0.7427, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 5.048076923076923, |
|
"grad_norm": 0.423828125, |
|
"learning_rate": 0.00011570974456492678, |
|
"loss": 0.7423, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 5.072115384615385, |
|
"grad_norm": 0.52734375, |
|
"learning_rate": 0.00011488051965976486, |
|
"loss": 0.7404, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 5.096153846153846, |
|
"grad_norm": 0.44921875, |
|
"learning_rate": 0.00011405024703850929, |
|
"loss": 0.7424, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 5.1201923076923075, |
|
"grad_norm": 0.388671875, |
|
"learning_rate": 0.00011321898515946708, |
|
"loss": 0.7388, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 5.144230769230769, |
|
"grad_norm": 0.47265625, |
|
"learning_rate": 0.00011238679255059752, |
|
"loss": 0.7479, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 5.168269230769231, |
|
"grad_norm": 0.45703125, |
|
"learning_rate": 0.00011155372780539124, |
|
"loss": 0.7477, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 5.1923076923076925, |
|
"grad_norm": 0.490234375, |
|
"learning_rate": 0.00011071984957874479, |
|
"loss": 0.7426, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 5.216346153846154, |
|
"grad_norm": 0.396484375, |
|
"learning_rate": 0.00010988521658283071, |
|
"loss": 0.744, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 5.240384615384615, |
|
"grad_norm": 0.4375, |
|
"learning_rate": 0.0001090498875829638, |
|
"loss": 0.74, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 5.264423076923077, |
|
"grad_norm": 0.53515625, |
|
"learning_rate": 0.00010821392139346351, |
|
"loss": 0.7365, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 5.288461538461538, |
|
"grad_norm": 0.51171875, |
|
"learning_rate": 0.00010737737687351284, |
|
"loss": 0.7429, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 5.3125, |
|
"grad_norm": 0.45703125, |
|
"learning_rate": 0.00010654031292301432, |
|
"loss": 0.7501, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 5.336538461538462, |
|
"grad_norm": 0.7734375, |
|
"learning_rate": 0.00010570278847844275, |
|
"loss": 0.7378, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 5.360576923076923, |
|
"grad_norm": 0.76171875, |
|
"learning_rate": 0.0001048648625086957, |
|
"loss": 0.7476, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 5.384615384615385, |
|
"grad_norm": 0.42578125, |
|
"learning_rate": 0.00010402659401094152, |
|
"loss": 0.7388, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 5.408653846153846, |
|
"grad_norm": 0.64453125, |
|
"learning_rate": 0.00010318804200646553, |
|
"loss": 0.747, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 5.4326923076923075, |
|
"grad_norm": 1.0859375, |
|
"learning_rate": 0.00010234926553651422, |
|
"loss": 0.7353, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 5.456730769230769, |
|
"grad_norm": 0.54296875, |
|
"learning_rate": 0.00010151032365813859, |
|
"loss": 0.7411, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 5.480769230769231, |
|
"grad_norm": 0.50390625, |
|
"learning_rate": 0.00010067127544003563, |
|
"loss": 0.7377, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 5.5048076923076925, |
|
"grad_norm": 0.400390625, |
|
"learning_rate": 9.983217995838956e-05, |
|
"loss": 0.7351, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 5.528846153846154, |
|
"grad_norm": 0.427734375, |
|
"learning_rate": 9.899309629271246e-05, |
|
"loss": 0.7383, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 5.552884615384615, |
|
"grad_norm": 0.51171875, |
|
"learning_rate": 9.815408352168442e-05, |
|
"loss": 0.7406, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 5.576923076923077, |
|
"grad_norm": 0.62890625, |
|
"learning_rate": 9.73152007189939e-05, |
|
"loss": 0.7419, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 5.600961538461538, |
|
"grad_norm": 1.515625, |
|
"learning_rate": 9.647650694917856e-05, |
|
"loss": 0.7503, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 5.625, |
|
"grad_norm": 0.45703125, |
|
"learning_rate": 9.563806126346642e-05, |
|
"loss": 0.7448, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 5.649038461538462, |
|
"grad_norm": 0.6015625, |
|
"learning_rate": 9.479992269561833e-05, |
|
"loss": 0.7376, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 5.673076923076923, |
|
"grad_norm": 0.52734375, |
|
"learning_rate": 9.396215025777139e-05, |
|
"loss": 0.7505, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 5.697115384615385, |
|
"grad_norm": 0.51171875, |
|
"learning_rate": 9.312480293628404e-05, |
|
"loss": 0.7411, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 5.721153846153846, |
|
"grad_norm": 0.55078125, |
|
"learning_rate": 9.22879396875828e-05, |
|
"loss": 0.7402, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 5.7451923076923075, |
|
"grad_norm": 0.384765625, |
|
"learning_rate": 9.145161943401142e-05, |
|
"loss": 0.7341, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 5.769230769230769, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 9.061590105968208e-05, |
|
"loss": 0.7441, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 5.793269230769231, |
|
"grad_norm": 0.38671875, |
|
"learning_rate": 8.97808434063295e-05, |
|
"loss": 0.7372, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 5.8173076923076925, |
|
"grad_norm": 0.46484375, |
|
"learning_rate": 8.894650526916803e-05, |
|
"loss": 0.7381, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 5.841346153846154, |
|
"grad_norm": 0.404296875, |
|
"learning_rate": 8.811294539275185e-05, |
|
"loss": 0.7381, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 5.865384615384615, |
|
"grad_norm": 0.435546875, |
|
"learning_rate": 8.728022246683894e-05, |
|
"loss": 0.7374, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 5.889423076923077, |
|
"grad_norm": 0.4453125, |
|
"learning_rate": 8.644839512225886e-05, |
|
"loss": 0.7356, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 5.913461538461538, |
|
"grad_norm": 0.447265625, |
|
"learning_rate": 8.561752192678443e-05, |
|
"loss": 0.7434, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 5.9375, |
|
"grad_norm": 0.4453125, |
|
"learning_rate": 8.478766138100834e-05, |
|
"loss": 0.7391, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 5.961538461538462, |
|
"grad_norm": 1.171875, |
|
"learning_rate": 8.395887191422397e-05, |
|
"loss": 0.7421, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 5.985576923076923, |
|
"grad_norm": 0.484375, |
|
"learning_rate": 8.313121188031164e-05, |
|
"loss": 0.7364, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"eval_loss": 2.4450995922088623, |
|
"eval_runtime": 0.238, |
|
"eval_samples_per_second": 42.022, |
|
"eval_steps_per_second": 4.202, |
|
"step": 1248 |
|
}, |
|
{ |
|
"epoch": 6.009615384615385, |
|
"grad_norm": 0.5078125, |
|
"learning_rate": 8.23047395536298e-05, |
|
"loss": 0.7278, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 6.033653846153846, |
|
"grad_norm": 0.51171875, |
|
"learning_rate": 8.147951312491227e-05, |
|
"loss": 0.7181, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 6.0576923076923075, |
|
"grad_norm": 0.400390625, |
|
"learning_rate": 8.065559069717088e-05, |
|
"loss": 0.7184, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 6.081730769230769, |
|
"grad_norm": 0.46875, |
|
"learning_rate": 7.983303028160464e-05, |
|
"loss": 0.7117, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 6.105769230769231, |
|
"grad_norm": 0.55078125, |
|
"learning_rate": 7.901188979351526e-05, |
|
"loss": 0.7159, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 6.1298076923076925, |
|
"grad_norm": 0.6875, |
|
"learning_rate": 7.819222704822937e-05, |
|
"loss": 0.7092, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 6.153846153846154, |
|
"grad_norm": 0.466796875, |
|
"learning_rate": 7.73740997570278e-05, |
|
"loss": 0.7244, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 6.177884615384615, |
|
"grad_norm": 0.404296875, |
|
"learning_rate": 7.655756552308238e-05, |
|
"loss": 0.72, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 6.201923076923077, |
|
"grad_norm": 0.3828125, |
|
"learning_rate": 7.574268183739989e-05, |
|
"loss": 0.7184, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 6.225961538461538, |
|
"grad_norm": 0.46875, |
|
"learning_rate": 7.492950607477451e-05, |
|
"loss": 0.716, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 6.25, |
|
"grad_norm": 0.443359375, |
|
"learning_rate": 7.411809548974792e-05, |
|
"loss": 0.7183, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 6.274038461538462, |
|
"grad_norm": 0.3828125, |
|
"learning_rate": 7.330850721257825e-05, |
|
"loss": 0.7159, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 6.298076923076923, |
|
"grad_norm": 0.451171875, |
|
"learning_rate": 7.250079824521743e-05, |
|
"loss": 0.7263, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 6.322115384615385, |
|
"grad_norm": 0.40625, |
|
"learning_rate": 7.169502545729797e-05, |
|
"loss": 0.7194, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 6.346153846153846, |
|
"grad_norm": 0.466796875, |
|
"learning_rate": 7.089124558212871e-05, |
|
"loss": 0.7148, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 6.3701923076923075, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 7.008951521270037e-05, |
|
"loss": 0.7188, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 6.394230769230769, |
|
"grad_norm": 0.4140625, |
|
"learning_rate": 6.928989079770094e-05, |
|
"loss": 0.7241, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 6.418269230769231, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 6.849242863754108e-05, |
|
"loss": 0.7144, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 6.4423076923076925, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 6.769718488039023e-05, |
|
"loss": 0.7228, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 6.466346153846154, |
|
"grad_norm": 0.50390625, |
|
"learning_rate": 6.690421551822333e-05, |
|
"loss": 0.7197, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 6.490384615384615, |
|
"grad_norm": 0.376953125, |
|
"learning_rate": 6.611357638287823e-05, |
|
"loss": 0.7258, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 6.514423076923077, |
|
"grad_norm": 0.41796875, |
|
"learning_rate": 6.532532314212508e-05, |
|
"loss": 0.7134, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 6.538461538461538, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 6.453951129574644e-05, |
|
"loss": 0.7222, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 6.5625, |
|
"grad_norm": 0.59765625, |
|
"learning_rate": 6.375619617162985e-05, |
|
"loss": 0.7274, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 6.586538461538462, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 6.297543292187215e-05, |
|
"loss": 0.7236, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 6.610576923076923, |
|
"grad_norm": 0.447265625, |
|
"learning_rate": 6.219727651889646e-05, |
|
"loss": 0.7235, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 6.634615384615385, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 6.142178175158149e-05, |
|
"loss": 0.7162, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 6.658653846153846, |
|
"grad_norm": 0.388671875, |
|
"learning_rate": 6.064900322140392e-05, |
|
"loss": 0.7285, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 6.6826923076923075, |
|
"grad_norm": 0.38671875, |
|
"learning_rate": 5.9878995338594224e-05, |
|
"loss": 0.7136, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 6.706730769230769, |
|
"grad_norm": 0.36328125, |
|
"learning_rate": 5.9111812318305425e-05, |
|
"loss": 0.7169, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 6.730769230769231, |
|
"grad_norm": 0.4921875, |
|
"learning_rate": 5.834750817679606e-05, |
|
"loss": 0.7146, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 6.7548076923076925, |
|
"grad_norm": 0.408203125, |
|
"learning_rate": 5.75861367276269e-05, |
|
"loss": 0.7164, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 6.778846153846154, |
|
"grad_norm": 0.4140625, |
|
"learning_rate": 5.682775157787213e-05, |
|
"loss": 0.718, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 6.802884615384615, |
|
"grad_norm": 0.490234375, |
|
"learning_rate": 5.6072406124344855e-05, |
|
"loss": 0.7112, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 6.826923076923077, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 5.5320153549837415e-05, |
|
"loss": 0.7109, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 6.850961538461538, |
|
"grad_norm": 0.462890625, |
|
"learning_rate": 5.457104681937706e-05, |
|
"loss": 0.7192, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 6.875, |
|
"grad_norm": 0.482421875, |
|
"learning_rate": 5.382513867649663e-05, |
|
"loss": 0.7143, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 6.899038461538462, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 5.308248163952096e-05, |
|
"loss": 0.723, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 6.923076923076923, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 5.234312799786921e-05, |
|
"loss": 0.7089, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 6.947115384615385, |
|
"grad_norm": 0.359375, |
|
"learning_rate": 5.1607129808373254e-05, |
|
"loss": 0.7096, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 6.971153846153846, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 5.087453889161229e-05, |
|
"loss": 0.7133, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 6.9951923076923075, |
|
"grad_norm": 0.359375, |
|
"learning_rate": 5.014540682826434e-05, |
|
"loss": 0.7168, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"eval_loss": 2.450974464416504, |
|
"eval_runtime": 0.2387, |
|
"eval_samples_per_second": 41.885, |
|
"eval_steps_per_second": 4.189, |
|
"step": 1456 |
|
}, |
|
{ |
|
"epoch": 7.019230769230769, |
|
"grad_norm": 0.36328125, |
|
"learning_rate": 4.9419784955474524e-05, |
|
"loss": 0.7018, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 7.043269230769231, |
|
"grad_norm": 0.375, |
|
"learning_rate": 4.869772436324045e-05, |
|
"loss": 0.7019, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 7.0673076923076925, |
|
"grad_norm": 0.388671875, |
|
"learning_rate": 4.797927589081509e-05, |
|
"loss": 0.7056, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 7.091346153846154, |
|
"grad_norm": 0.43359375, |
|
"learning_rate": 4.726449012312726e-05, |
|
"loss": 0.7081, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 7.115384615384615, |
|
"grad_norm": 0.408203125, |
|
"learning_rate": 4.6553417387219886e-05, |
|
"loss": 0.709, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 7.139423076923077, |
|
"grad_norm": 0.361328125, |
|
"learning_rate": 4.58461077487067e-05, |
|
"loss": 0.7028, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 7.163461538461538, |
|
"grad_norm": 0.44921875, |
|
"learning_rate": 4.514261100824709e-05, |
|
"loss": 0.7022, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 7.1875, |
|
"grad_norm": 0.41796875, |
|
"learning_rate": 4.444297669803981e-05, |
|
"loss": 0.7063, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 7.211538461538462, |
|
"grad_norm": 0.38671875, |
|
"learning_rate": 4.374725407833532e-05, |
|
"loss": 0.7101, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 7.235576923076923, |
|
"grad_norm": 0.361328125, |
|
"learning_rate": 4.305549213396772e-05, |
|
"loss": 0.7046, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 7.259615384615385, |
|
"grad_norm": 0.390625, |
|
"learning_rate": 4.236773957090548e-05, |
|
"loss": 0.7018, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 7.283653846153846, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 4.168404481282233e-05, |
|
"loss": 0.7038, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 7.3076923076923075, |
|
"grad_norm": 0.390625, |
|
"learning_rate": 4.100445599768774e-05, |
|
"loss": 0.7072, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 7.331730769230769, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 4.0329020974377596e-05, |
|
"loss": 0.7026, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 7.355769230769231, |
|
"grad_norm": 0.427734375, |
|
"learning_rate": 3.96577872993053e-05, |
|
"loss": 0.7117, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 7.3798076923076925, |
|
"grad_norm": 0.39453125, |
|
"learning_rate": 3.899080223307335e-05, |
|
"loss": 0.7008, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 7.403846153846154, |
|
"grad_norm": 0.44140625, |
|
"learning_rate": 3.832811273714569e-05, |
|
"loss": 0.699, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 7.427884615384615, |
|
"grad_norm": 0.484375, |
|
"learning_rate": 3.7669765470541365e-05, |
|
"loss": 0.7069, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 7.451923076923077, |
|
"grad_norm": 0.494140625, |
|
"learning_rate": 3.701580678654925e-05, |
|
"loss": 0.7055, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 7.475961538461538, |
|
"grad_norm": 0.404296875, |
|
"learning_rate": 3.636628272946434e-05, |
|
"loss": 0.7088, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 7.5, |
|
"grad_norm": 0.40234375, |
|
"learning_rate": 3.5721239031346066e-05, |
|
"loss": 0.7042, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 7.524038461538462, |
|
"grad_norm": 0.392578125, |
|
"learning_rate": 3.508072110879804e-05, |
|
"loss": 0.703, |
|
"step": 1565 |
|
}, |
|
{ |
|
"epoch": 7.548076923076923, |
|
"grad_norm": 0.447265625, |
|
"learning_rate": 3.4444774059770536e-05, |
|
"loss": 0.7064, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 7.572115384615385, |
|
"grad_norm": 0.390625, |
|
"learning_rate": 3.381344266038518e-05, |
|
"loss": 0.7007, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 7.596153846153846, |
|
"grad_norm": 0.388671875, |
|
"learning_rate": 3.318677136178228e-05, |
|
"loss": 0.7018, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 7.6201923076923075, |
|
"grad_norm": 0.4921875, |
|
"learning_rate": 3.2564804286991135e-05, |
|
"loss": 0.7029, |
|
"step": 1585 |
|
}, |
|
{ |
|
"epoch": 7.644230769230769, |
|
"grad_norm": 0.40234375, |
|
"learning_rate": 3.1947585227823394e-05, |
|
"loss": 0.7023, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 7.668269230769231, |
|
"grad_norm": 0.41796875, |
|
"learning_rate": 3.1335157641789756e-05, |
|
"loss": 0.7134, |
|
"step": 1595 |
|
}, |
|
{ |
|
"epoch": 7.6923076923076925, |
|
"grad_norm": 0.3984375, |
|
"learning_rate": 3.072756464904006e-05, |
|
"loss": 0.7034, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 7.716346153846154, |
|
"grad_norm": 0.392578125, |
|
"learning_rate": 3.0124849029327405e-05, |
|
"loss": 0.7026, |
|
"step": 1605 |
|
}, |
|
{ |
|
"epoch": 7.740384615384615, |
|
"grad_norm": 0.39453125, |
|
"learning_rate": 2.9527053218996037e-05, |
|
"loss": 0.7046, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 7.764423076923077, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 2.8934219307993394e-05, |
|
"loss": 0.7015, |
|
"step": 1615 |
|
}, |
|
{ |
|
"epoch": 7.788461538461538, |
|
"grad_norm": 0.376953125, |
|
"learning_rate": 2.8346389036906828e-05, |
|
"loss": 0.7023, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 7.8125, |
|
"grad_norm": 0.3984375, |
|
"learning_rate": 2.776360379402445e-05, |
|
"loss": 0.704, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 7.836538461538462, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 2.7185904612421176e-05, |
|
"loss": 0.6993, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 7.860576923076923, |
|
"grad_norm": 0.361328125, |
|
"learning_rate": 2.6613332167069638e-05, |
|
"loss": 0.7017, |
|
"step": 1635 |
|
}, |
|
{ |
|
"epoch": 7.884615384615385, |
|
"grad_norm": 0.375, |
|
"learning_rate": 2.6045926771976303e-05, |
|
"loss": 0.7071, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 7.908653846153846, |
|
"grad_norm": 0.36328125, |
|
"learning_rate": 2.5483728377342984e-05, |
|
"loss": 0.7057, |
|
"step": 1645 |
|
}, |
|
{ |
|
"epoch": 7.9326923076923075, |
|
"grad_norm": 0.353515625, |
|
"learning_rate": 2.492677656675414e-05, |
|
"loss": 0.7088, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 7.956730769230769, |
|
"grad_norm": 0.40625, |
|
"learning_rate": 2.4375110554389702e-05, |
|
"loss": 0.6981, |
|
"step": 1655 |
|
}, |
|
{ |
|
"epoch": 7.980769230769231, |
|
"grad_norm": 0.3828125, |
|
"learning_rate": 2.382876918226409e-05, |
|
"loss": 0.7064, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"eval_loss": 2.4729323387145996, |
|
"eval_runtime": 0.3036, |
|
"eval_samples_per_second": 32.941, |
|
"eval_steps_per_second": 3.294, |
|
"step": 1664 |
|
}, |
|
{ |
|
"epoch": 8.004807692307692, |
|
"grad_norm": 0.42578125, |
|
"learning_rate": 2.328779091749145e-05, |
|
"loss": 0.7012, |
|
"step": 1665 |
|
}, |
|
{ |
|
"epoch": 8.028846153846153, |
|
"grad_norm": 0.390625, |
|
"learning_rate": 2.2752213849577188e-05, |
|
"loss": 0.6971, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 8.052884615384615, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 2.2222075687736187e-05, |
|
"loss": 0.7013, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 8.076923076923077, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 2.1697413758237784e-05, |
|
"loss": 0.6927, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 8.100961538461538, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 2.1178265001777554e-05, |
|
"loss": 0.69, |
|
"step": 1685 |
|
}, |
|
{ |
|
"epoch": 8.125, |
|
"grad_norm": 0.384765625, |
|
"learning_rate": 2.0664665970876496e-05, |
|
"loss": 0.6943, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 8.149038461538462, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 2.0156652827307364e-05, |
|
"loss": 0.7043, |
|
"step": 1695 |
|
}, |
|
{ |
|
"epoch": 8.173076923076923, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 1.965426133954854e-05, |
|
"loss": 0.7023, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 8.197115384615385, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 1.9157526880265765e-05, |
|
"loss": 0.7036, |
|
"step": 1705 |
|
}, |
|
{ |
|
"epoch": 8.221153846153847, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 1.8666484423821373e-05, |
|
"loss": 0.6978, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 8.245192307692308, |
|
"grad_norm": 0.359375, |
|
"learning_rate": 1.8181168543812066e-05, |
|
"loss": 0.6973, |
|
"step": 1715 |
|
}, |
|
{ |
|
"epoch": 8.26923076923077, |
|
"grad_norm": 0.361328125, |
|
"learning_rate": 1.7701613410634365e-05, |
|
"loss": 0.6943, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 8.29326923076923, |
|
"grad_norm": 0.40625, |
|
"learning_rate": 1.7227852789078913e-05, |
|
"loss": 0.6965, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 8.317307692307692, |
|
"grad_norm": 0.365234375, |
|
"learning_rate": 1.6759920035953093e-05, |
|
"loss": 0.6942, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 8.341346153846153, |
|
"grad_norm": 0.365234375, |
|
"learning_rate": 1.6297848097732338e-05, |
|
"loss": 0.7008, |
|
"step": 1735 |
|
}, |
|
{ |
|
"epoch": 8.365384615384615, |
|
"grad_norm": 0.41796875, |
|
"learning_rate": 1.584166950824061e-05, |
|
"loss": 0.7029, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 8.389423076923077, |
|
"grad_norm": 0.359375, |
|
"learning_rate": 1.5391416386359524e-05, |
|
"loss": 0.695, |
|
"step": 1745 |
|
}, |
|
{ |
|
"epoch": 8.413461538461538, |
|
"grad_norm": 0.359375, |
|
"learning_rate": 1.4947120433767047e-05, |
|
"loss": 0.7025, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 8.4375, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 1.4508812932705363e-05, |
|
"loss": 0.6916, |
|
"step": 1755 |
|
}, |
|
{ |
|
"epoch": 8.461538461538462, |
|
"grad_norm": 0.400390625, |
|
"learning_rate": 1.4076524743778319e-05, |
|
"loss": 0.7005, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 8.485576923076923, |
|
"grad_norm": 0.36328125, |
|
"learning_rate": 1.3650286303778714e-05, |
|
"loss": 0.6944, |
|
"step": 1765 |
|
}, |
|
{ |
|
"epoch": 8.509615384615385, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 1.3230127623545064e-05, |
|
"loss": 0.6951, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 8.533653846153847, |
|
"grad_norm": 0.375, |
|
"learning_rate": 1.2816078285848799e-05, |
|
"loss": 0.7039, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 8.557692307692308, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 1.2408167443311214e-05, |
|
"loss": 0.6976, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 8.58173076923077, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 1.2006423816350977e-05, |
|
"loss": 0.6949, |
|
"step": 1785 |
|
}, |
|
{ |
|
"epoch": 8.60576923076923, |
|
"grad_norm": 0.40625, |
|
"learning_rate": 1.1610875691161915e-05, |
|
"loss": 0.7008, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 8.629807692307692, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 1.1221550917721436e-05, |
|
"loss": 0.693, |
|
"step": 1795 |
|
}, |
|
{ |
|
"epoch": 8.653846153846153, |
|
"grad_norm": 0.390625, |
|
"learning_rate": 1.083847690782972e-05, |
|
"loss": 0.6887, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 8.677884615384615, |
|
"grad_norm": 0.365234375, |
|
"learning_rate": 1.046168063317956e-05, |
|
"loss": 0.7022, |
|
"step": 1805 |
|
}, |
|
{ |
|
"epoch": 8.701923076923077, |
|
"grad_norm": 0.375, |
|
"learning_rate": 1.0091188623457415e-05, |
|
"loss": 0.7058, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 8.725961538461538, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 9.727026964475483e-06, |
|
"loss": 0.6948, |
|
"step": 1815 |
|
}, |
|
{ |
|
"epoch": 8.75, |
|
"grad_norm": 0.3828125, |
|
"learning_rate": 9.369221296335006e-06, |
|
"loss": 0.7025, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 8.774038461538462, |
|
"grad_norm": 0.375, |
|
"learning_rate": 9.017796811621049e-06, |
|
"loss": 0.7007, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 8.798076923076923, |
|
"grad_norm": 0.365234375, |
|
"learning_rate": 8.672778253628621e-06, |
|
"loss": 0.6992, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 8.822115384615385, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 8.334189914620671e-06, |
|
"loss": 0.6934, |
|
"step": 1835 |
|
}, |
|
{ |
|
"epoch": 8.846153846153847, |
|
"grad_norm": 0.35546875, |
|
"learning_rate": 8.002055634117578e-06, |
|
"loss": 0.6955, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 8.870192307692308, |
|
"grad_norm": 0.357421875, |
|
"learning_rate": 7.67639879721872e-06, |
|
"loss": 0.7001, |
|
"step": 1845 |
|
}, |
|
{ |
|
"epoch": 8.89423076923077, |
|
"grad_norm": 0.36328125, |
|
"learning_rate": 7.357242332955916e-06, |
|
"loss": 0.6982, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 8.91826923076923, |
|
"grad_norm": 0.376953125, |
|
"learning_rate": 7.0446087126790575e-06, |
|
"loss": 0.702, |
|
"step": 1855 |
|
}, |
|
{ |
|
"epoch": 8.942307692307692, |
|
"grad_norm": 0.376953125, |
|
"learning_rate": 6.738519948473976e-06, |
|
"loss": 0.7033, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 8.966346153846153, |
|
"grad_norm": 0.384765625, |
|
"learning_rate": 6.4389975916125056e-06, |
|
"loss": 0.6961, |
|
"step": 1865 |
|
}, |
|
{ |
|
"epoch": 8.990384615384615, |
|
"grad_norm": 0.359375, |
|
"learning_rate": 6.146062731035129e-06, |
|
"loss": 0.6934, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"eval_loss": 2.4781441688537598, |
|
"eval_runtime": 0.2396, |
|
"eval_samples_per_second": 41.728, |
|
"eval_steps_per_second": 4.173, |
|
"step": 1872 |
|
}, |
|
{ |
|
"epoch": 9.014423076923077, |
|
"grad_norm": 0.349609375, |
|
"learning_rate": 5.859735991866166e-06, |
|
"loss": 0.6903, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 9.038461538461538, |
|
"grad_norm": 0.375, |
|
"learning_rate": 5.580037533961546e-06, |
|
"loss": 0.6989, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 9.0625, |
|
"grad_norm": 0.359375, |
|
"learning_rate": 5.306987050489442e-06, |
|
"loss": 0.6981, |
|
"step": 1885 |
|
}, |
|
{ |
|
"epoch": 9.086538461538462, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 5.040603766543594e-06, |
|
"loss": 0.6943, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 9.110576923076923, |
|
"grad_norm": 0.36328125, |
|
"learning_rate": 4.780906437789845e-06, |
|
"loss": 0.6896, |
|
"step": 1895 |
|
}, |
|
{ |
|
"epoch": 9.134615384615385, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 4.527913349145441e-06, |
|
"loss": 0.6998, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 9.158653846153847, |
|
"grad_norm": 0.388671875, |
|
"learning_rate": 4.281642313491685e-06, |
|
"loss": 0.6957, |
|
"step": 1905 |
|
}, |
|
{ |
|
"epoch": 9.182692307692308, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 4.042110670419763e-06, |
|
"loss": 0.7036, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 9.20673076923077, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 3.809335285009863e-06, |
|
"loss": 0.7007, |
|
"step": 1915 |
|
}, |
|
{ |
|
"epoch": 9.23076923076923, |
|
"grad_norm": 0.3515625, |
|
"learning_rate": 3.5833325466437694e-06, |
|
"loss": 0.6963, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 9.254807692307692, |
|
"grad_norm": 0.400390625, |
|
"learning_rate": 3.3641183678508327e-06, |
|
"loss": 0.6982, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 9.278846153846153, |
|
"grad_norm": 0.353515625, |
|
"learning_rate": 3.1517081831876737e-06, |
|
"loss": 0.689, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 9.302884615384615, |
|
"grad_norm": 0.361328125, |
|
"learning_rate": 2.9461169481514005e-06, |
|
"loss": 0.6964, |
|
"step": 1935 |
|
}, |
|
{ |
|
"epoch": 9.326923076923077, |
|
"grad_norm": 0.396484375, |
|
"learning_rate": 2.7473591381266708e-06, |
|
"loss": 0.6911, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 9.350961538461538, |
|
"grad_norm": 0.359375, |
|
"learning_rate": 2.55544874736644e-06, |
|
"loss": 0.6924, |
|
"step": 1945 |
|
}, |
|
{ |
|
"epoch": 9.375, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 2.3703992880066638e-06, |
|
"loss": 0.6946, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 9.399038461538462, |
|
"grad_norm": 0.365234375, |
|
"learning_rate": 2.1922237891149667e-06, |
|
"loss": 0.7009, |
|
"step": 1955 |
|
}, |
|
{ |
|
"epoch": 9.423076923076923, |
|
"grad_norm": 0.361328125, |
|
"learning_rate": 2.0209347957732328e-06, |
|
"loss": 0.693, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 9.447115384615385, |
|
"grad_norm": 0.353515625, |
|
"learning_rate": 1.8565443681943461e-06, |
|
"loss": 0.6947, |
|
"step": 1965 |
|
}, |
|
{ |
|
"epoch": 9.471153846153847, |
|
"grad_norm": 0.36328125, |
|
"learning_rate": 1.6990640808730696e-06, |
|
"loss": 0.7001, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 9.495192307692308, |
|
"grad_norm": 0.39453125, |
|
"learning_rate": 1.5485050217710295e-06, |
|
"loss": 0.6956, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 9.51923076923077, |
|
"grad_norm": 0.376953125, |
|
"learning_rate": 1.404877791536141e-06, |
|
"loss": 0.6952, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 9.54326923076923, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 1.2681925027561382e-06, |
|
"loss": 0.6965, |
|
"step": 1985 |
|
}, |
|
{ |
|
"epoch": 9.567307692307692, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 1.1384587792465872e-06, |
|
"loss": 0.6953, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 9.591346153846153, |
|
"grad_norm": 0.388671875, |
|
"learning_rate": 1.0156857553732857e-06, |
|
"loss": 0.694, |
|
"step": 1995 |
|
}, |
|
{ |
|
"epoch": 9.615384615384615, |
|
"grad_norm": 0.36328125, |
|
"learning_rate": 8.998820754091531e-07, |
|
"loss": 0.692, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 9.639423076923077, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 7.910558929255851e-07, |
|
"loss": 0.6951, |
|
"step": 2005 |
|
}, |
|
{ |
|
"epoch": 9.663461538461538, |
|
"grad_norm": 0.361328125, |
|
"learning_rate": 6.892148702183133e-07, |
|
"loss": 0.6955, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 9.6875, |
|
"grad_norm": 0.361328125, |
|
"learning_rate": 5.943661777680354e-07, |
|
"loss": 0.6957, |
|
"step": 2015 |
|
}, |
|
{ |
|
"epoch": 9.711538461538462, |
|
"grad_norm": 0.359375, |
|
"learning_rate": 5.065164937354428e-07, |
|
"loss": 0.698, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 9.735576923076923, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 4.256720034910511e-07, |
|
"loss": 0.6988, |
|
"step": 2025 |
|
}, |
|
{ |
|
"epoch": 9.759615384615385, |
|
"grad_norm": 0.40234375, |
|
"learning_rate": 3.5183839917972697e-07, |
|
"loss": 0.6933, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 9.783653846153847, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 2.850208793198861e-07, |
|
"loss": 0.705, |
|
"step": 2035 |
|
}, |
|
{ |
|
"epoch": 9.807692307692308, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 2.2522414843748618e-07, |
|
"loss": 0.6993, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 9.83173076923077, |
|
"grad_norm": 0.359375, |
|
"learning_rate": 1.7245241673476965e-07, |
|
"loss": 0.6974, |
|
"step": 2045 |
|
}, |
|
{ |
|
"epoch": 9.85576923076923, |
|
"grad_norm": 0.359375, |
|
"learning_rate": 1.2670939979384512e-07, |
|
"loss": 0.7062, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 9.879807692307692, |
|
"grad_norm": 0.365234375, |
|
"learning_rate": 8.799831831506344e-08, |
|
"loss": 0.691, |
|
"step": 2055 |
|
}, |
|
{ |
|
"epoch": 9.903846153846153, |
|
"grad_norm": 0.359375, |
|
"learning_rate": 5.632189789027687e-08, |
|
"loss": 0.6951, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 9.927884615384615, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 3.168236881092579e-08, |
|
"loss": 0.698, |
|
"step": 2065 |
|
}, |
|
{ |
|
"epoch": 9.951923076923077, |
|
"grad_norm": 0.33984375, |
|
"learning_rate": 1.4081465910975588e-08, |
|
"loss": 0.686, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 9.975961538461538, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 3.520428444825363e-09, |
|
"loss": 0.7005, |
|
"step": 2075 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 0.0, |
|
"loss": 0.7018, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"eval_loss": 2.4814841747283936, |
|
"eval_runtime": 0.2382, |
|
"eval_samples_per_second": 41.987, |
|
"eval_steps_per_second": 4.199, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"step": 2080, |
|
"total_flos": 6.350169378683617e+18, |
|
"train_loss": 1.6486573993013456, |
|
"train_runtime": 5073.9705, |
|
"train_samples_per_second": 26.214, |
|
"train_steps_per_second": 0.41 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 2080, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 10, |
|
"save_steps": 100, |
|
"total_flos": 6.350169378683617e+18, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|