|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 8.0, |
|
"eval_steps": 400, |
|
"global_step": 1712, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.04672897196261682, |
|
"grad_norm": 0.8108399564287991, |
|
"learning_rate": 9.999158178436007e-05, |
|
"loss": 1.1115, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.09345794392523364, |
|
"grad_norm": 0.19874272531557952, |
|
"learning_rate": 9.996632997209443e-05, |
|
"loss": 0.1905, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.14018691588785046, |
|
"grad_norm": 0.12703843810365584, |
|
"learning_rate": 9.992425306621115e-05, |
|
"loss": 0.0789, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.18691588785046728, |
|
"grad_norm": 0.10355469925054432, |
|
"learning_rate": 9.986536523520889e-05, |
|
"loss": 0.0924, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.2336448598130841, |
|
"grad_norm": 0.1852892166670333, |
|
"learning_rate": 9.978968630830607e-05, |
|
"loss": 0.065, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.2803738317757009, |
|
"grad_norm": 0.09323366928517218, |
|
"learning_rate": 9.969724176876373e-05, |
|
"loss": 0.0531, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.32710280373831774, |
|
"grad_norm": 0.08829483418959966, |
|
"learning_rate": 9.95880627453046e-05, |
|
"loss": 0.0782, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.37383177570093457, |
|
"grad_norm": 0.15247331956402047, |
|
"learning_rate": 9.94621860016312e-05, |
|
"loss": 0.0633, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.4205607476635514, |
|
"grad_norm": 0.14725457723867078, |
|
"learning_rate": 9.931965392404641e-05, |
|
"loss": 0.0501, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.4672897196261682, |
|
"grad_norm": 0.0922740352810748, |
|
"learning_rate": 9.916051450718084e-05, |
|
"loss": 0.0508, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.514018691588785, |
|
"grad_norm": 0.0938141317411703, |
|
"learning_rate": 9.89848213378316e-05, |
|
"loss": 0.0356, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.5607476635514018, |
|
"grad_norm": 0.12421407254599728, |
|
"learning_rate": 9.879263357691814e-05, |
|
"loss": 0.0416, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.6074766355140186, |
|
"grad_norm": 0.237166690677984, |
|
"learning_rate": 9.858401593956104e-05, |
|
"loss": 0.0407, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.6542056074766355, |
|
"grad_norm": 0.08880528008550467, |
|
"learning_rate": 9.83590386732906e-05, |
|
"loss": 0.0338, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.7009345794392523, |
|
"grad_norm": 0.11986338307776492, |
|
"learning_rate": 9.811777753439248e-05, |
|
"loss": 0.0341, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.7476635514018691, |
|
"grad_norm": 0.08032330195379932, |
|
"learning_rate": 9.786031376239842e-05, |
|
"loss": 0.024, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.794392523364486, |
|
"grad_norm": 0.07663400363911589, |
|
"learning_rate": 9.758673405273046e-05, |
|
"loss": 0.0242, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.8411214953271028, |
|
"grad_norm": 0.14946426405800525, |
|
"learning_rate": 9.729713052750826e-05, |
|
"loss": 0.0237, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.8878504672897196, |
|
"grad_norm": 0.0759393683447156, |
|
"learning_rate": 9.699160070452882e-05, |
|
"loss": 0.0202, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.9345794392523364, |
|
"grad_norm": 0.12858143092057014, |
|
"learning_rate": 9.667024746442952e-05, |
|
"loss": 0.019, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.9813084112149533, |
|
"grad_norm": 0.10495732835434407, |
|
"learning_rate": 9.633317901604523e-05, |
|
"loss": 0.0183, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 1.02803738317757, |
|
"grad_norm": 0.06980923471022675, |
|
"learning_rate": 9.59805088599713e-05, |
|
"loss": 0.015, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 1.074766355140187, |
|
"grad_norm": 0.06932653950015637, |
|
"learning_rate": 9.561235575034469e-05, |
|
"loss": 0.0131, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 1.1214953271028036, |
|
"grad_norm": 0.058548744277500064, |
|
"learning_rate": 9.522884365485598e-05, |
|
"loss": 0.0118, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 1.1682242990654206, |
|
"grad_norm": 0.03726047410080607, |
|
"learning_rate": 9.483010171300602e-05, |
|
"loss": 0.0135, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 1.2149532710280373, |
|
"grad_norm": 0.05545270603496205, |
|
"learning_rate": 9.441626419262084e-05, |
|
"loss": 0.0121, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 1.2616822429906542, |
|
"grad_norm": 0.05893605931281947, |
|
"learning_rate": 9.398747044463992e-05, |
|
"loss": 0.012, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 1.308411214953271, |
|
"grad_norm": 0.052627420151130956, |
|
"learning_rate": 9.354386485619264e-05, |
|
"loss": 0.0136, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 1.355140186915888, |
|
"grad_norm": 0.051128701449751776, |
|
"learning_rate": 9.308559680197914e-05, |
|
"loss": 0.0137, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 1.4018691588785046, |
|
"grad_norm": 0.09981668657519205, |
|
"learning_rate": 9.261282059397145e-05, |
|
"loss": 0.0116, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 1.4485981308411215, |
|
"grad_norm": 0.04456617572462679, |
|
"learning_rate": 9.212569542945234e-05, |
|
"loss": 0.0128, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 1.4953271028037383, |
|
"grad_norm": 0.06065053274675731, |
|
"learning_rate": 9.162438533740892e-05, |
|
"loss": 0.0113, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 1.542056074766355, |
|
"grad_norm": 0.08108720647504085, |
|
"learning_rate": 9.110905912329949e-05, |
|
"loss": 0.012, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 1.588785046728972, |
|
"grad_norm": 0.04189447531626791, |
|
"learning_rate": 9.057989031221188e-05, |
|
"loss": 0.0112, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 1.6355140186915889, |
|
"grad_norm": 0.06292392241512725, |
|
"learning_rate": 9.003705709043253e-05, |
|
"loss": 0.0101, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 1.6822429906542056, |
|
"grad_norm": 0.16839424797262825, |
|
"learning_rate": 8.948074224544614e-05, |
|
"loss": 0.0113, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.7289719626168223, |
|
"grad_norm": 0.03631611864356132, |
|
"learning_rate": 8.891113310438587e-05, |
|
"loss": 0.0117, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 1.7757009345794392, |
|
"grad_norm": 0.02998579815737736, |
|
"learning_rate": 8.832842147095495e-05, |
|
"loss": 0.0109, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 1.8224299065420562, |
|
"grad_norm": 0.04479035362698208, |
|
"learning_rate": 8.773280356084077e-05, |
|
"loss": 0.0098, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.8691588785046729, |
|
"grad_norm": 0.05286540264614212, |
|
"learning_rate": 8.712447993564361e-05, |
|
"loss": 0.0109, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.8691588785046729, |
|
"eval_loss": 0.01072569377720356, |
|
"eval_runtime": 3.437, |
|
"eval_samples_per_second": 70.993, |
|
"eval_steps_per_second": 2.037, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.9158878504672896, |
|
"grad_norm": 0.05068443093268522, |
|
"learning_rate": 8.650365543534168e-05, |
|
"loss": 0.01, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 1.9626168224299065, |
|
"grad_norm": 0.034340227946281374, |
|
"learning_rate": 8.587053910931575e-05, |
|
"loss": 0.0093, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 2.0093457943925235, |
|
"grad_norm": 0.035620502174738417, |
|
"learning_rate": 8.522534414595609e-05, |
|
"loss": 0.0091, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 2.05607476635514, |
|
"grad_norm": 0.03143044172853539, |
|
"learning_rate": 8.456828780087598e-05, |
|
"loss": 0.008, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 2.102803738317757, |
|
"grad_norm": 0.05199083018672625, |
|
"learning_rate": 8.38995913237554e-05, |
|
"loss": 0.0078, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 2.149532710280374, |
|
"grad_norm": 0.03405283371576451, |
|
"learning_rate": 8.321947988384005e-05, |
|
"loss": 0.0089, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 2.196261682242991, |
|
"grad_norm": 0.050263061552753986, |
|
"learning_rate": 8.252818249412037e-05, |
|
"loss": 0.0084, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 2.2429906542056073, |
|
"grad_norm": 0.032967778518522166, |
|
"learning_rate": 8.182593193421625e-05, |
|
"loss": 0.0082, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 2.289719626168224, |
|
"grad_norm": 0.035321987909812884, |
|
"learning_rate": 8.111296467199356e-05, |
|
"loss": 0.008, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 2.336448598130841, |
|
"grad_norm": 0.029919394297831105, |
|
"learning_rate": 8.038952078393862e-05, |
|
"loss": 0.008, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 2.383177570093458, |
|
"grad_norm": 0.037839397396561614, |
|
"learning_rate": 7.96558438743175e-05, |
|
"loss": 0.0081, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 2.4299065420560746, |
|
"grad_norm": 0.03447748806162398, |
|
"learning_rate": 7.89121809931477e-05, |
|
"loss": 0.0082, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 2.4766355140186915, |
|
"grad_norm": 0.041992049585078906, |
|
"learning_rate": 7.815878255300901e-05, |
|
"loss": 0.0082, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 2.5233644859813085, |
|
"grad_norm": 0.034025872963113706, |
|
"learning_rate": 7.739590224472275e-05, |
|
"loss": 0.0078, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 2.5700934579439254, |
|
"grad_norm": 0.028042564915330366, |
|
"learning_rate": 7.662379695192663e-05, |
|
"loss": 0.0078, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 2.616822429906542, |
|
"grad_norm": 0.041319137226956335, |
|
"learning_rate": 7.58427266645747e-05, |
|
"loss": 0.0078, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 2.663551401869159, |
|
"grad_norm": 0.03522762783198424, |
|
"learning_rate": 7.505295439139133e-05, |
|
"loss": 0.0079, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 2.710280373831776, |
|
"grad_norm": 0.033750692975752054, |
|
"learning_rate": 7.425474607130858e-05, |
|
"loss": 0.0076, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 2.7570093457943923, |
|
"grad_norm": 0.030318374748605468, |
|
"learning_rate": 7.344837048391702e-05, |
|
"loss": 0.0081, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 2.803738317757009, |
|
"grad_norm": 0.0269893097675881, |
|
"learning_rate": 7.263409915895992e-05, |
|
"loss": 0.0075, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 2.850467289719626, |
|
"grad_norm": 0.080809904610704, |
|
"learning_rate": 7.181220628490135e-05, |
|
"loss": 0.0072, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 2.897196261682243, |
|
"grad_norm": 0.0479816994697567, |
|
"learning_rate": 7.098296861659925e-05, |
|
"loss": 0.0078, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 2.94392523364486, |
|
"grad_norm": 0.024727139983622894, |
|
"learning_rate": 7.014666538211391e-05, |
|
"loss": 0.0075, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 2.9906542056074765, |
|
"grad_norm": 0.035477577514644594, |
|
"learning_rate": 6.930357818868409e-05, |
|
"loss": 0.0076, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 3.0373831775700935, |
|
"grad_norm": 0.03489962140746076, |
|
"learning_rate": 6.845399092790168e-05, |
|
"loss": 0.0067, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 3.0841121495327104, |
|
"grad_norm": 0.028458398477187792, |
|
"learning_rate": 6.75981896801173e-05, |
|
"loss": 0.0069, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 3.130841121495327, |
|
"grad_norm": 0.029455198773575362, |
|
"learning_rate": 6.673646261810897e-05, |
|
"loss": 0.0073, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 3.177570093457944, |
|
"grad_norm": 0.021331692675162994, |
|
"learning_rate": 6.586909991004586e-05, |
|
"loss": 0.0063, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 3.2242990654205608, |
|
"grad_norm": 0.030623459310690392, |
|
"learning_rate": 6.499639362178057e-05, |
|
"loss": 0.0074, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 3.2710280373831777, |
|
"grad_norm": 0.033777185932132234, |
|
"learning_rate": 6.41186376185021e-05, |
|
"loss": 0.0073, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 3.317757009345794, |
|
"grad_norm": 0.03878683167456107, |
|
"learning_rate": 6.323612746578306e-05, |
|
"loss": 0.0071, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 3.364485981308411, |
|
"grad_norm": 0.0326477494007077, |
|
"learning_rate": 6.234916033005421e-05, |
|
"loss": 0.0073, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 3.411214953271028, |
|
"grad_norm": 0.03373929782607459, |
|
"learning_rate": 6.145803487854015e-05, |
|
"loss": 0.0067, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 3.457943925233645, |
|
"grad_norm": 0.023188946944247713, |
|
"learning_rate": 6.056305117868939e-05, |
|
"loss": 0.0065, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 3.5046728971962615, |
|
"grad_norm": 0.023508542077636894, |
|
"learning_rate": 5.966451059713311e-05, |
|
"loss": 0.0065, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 3.5514018691588785, |
|
"grad_norm": 0.02989633738957656, |
|
"learning_rate": 5.8762715698206385e-05, |
|
"loss": 0.0068, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 3.5981308411214954, |
|
"grad_norm": 0.02608338564335597, |
|
"learning_rate": 5.78579701420661e-05, |
|
"loss": 0.0065, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 3.6448598130841123, |
|
"grad_norm": 0.02843620648372604, |
|
"learning_rate": 5.695057858243988e-05, |
|
"loss": 0.0069, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 3.691588785046729, |
|
"grad_norm": 0.026903369236640558, |
|
"learning_rate": 5.604084656404052e-05, |
|
"loss": 0.0064, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 3.7383177570093458, |
|
"grad_norm": 0.03087535604554872, |
|
"learning_rate": 5.512908041968018e-05, |
|
"loss": 0.0068, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 3.7383177570093458, |
|
"eval_loss": 0.007949975319206715, |
|
"eval_runtime": 3.3099, |
|
"eval_samples_per_second": 73.719, |
|
"eval_steps_per_second": 2.115, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 3.7850467289719627, |
|
"grad_norm": 0.027092999369925153, |
|
"learning_rate": 5.4215587167119554e-05, |
|
"loss": 0.0066, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 3.831775700934579, |
|
"grad_norm": 0.022172386959882197, |
|
"learning_rate": 5.330067440568605e-05, |
|
"loss": 0.0063, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 3.878504672897196, |
|
"grad_norm": 0.02880464575204624, |
|
"learning_rate": 5.238465021269637e-05, |
|
"loss": 0.0072, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 3.925233644859813, |
|
"grad_norm": 0.026027691897489803, |
|
"learning_rate": 5.1467823039718044e-05, |
|
"loss": 0.0068, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 3.97196261682243, |
|
"grad_norm": 0.027074409669940073, |
|
"learning_rate": 5.0550501608704926e-05, |
|
"loss": 0.0064, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 4.018691588785047, |
|
"grad_norm": 0.022224303104632814, |
|
"learning_rate": 4.9632994808041724e-05, |
|
"loss": 0.0066, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 4.065420560747664, |
|
"grad_norm": 0.029720109312162384, |
|
"learning_rate": 4.871561158853241e-05, |
|
"loss": 0.0056, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 4.11214953271028, |
|
"grad_norm": 0.021302383660758806, |
|
"learning_rate": 4.7798660859367615e-05, |
|
"loss": 0.006, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 4.158878504672897, |
|
"grad_norm": 0.019221061294488036, |
|
"learning_rate": 4.688245138410612e-05, |
|
"loss": 0.006, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 4.205607476635514, |
|
"grad_norm": 0.025917557422674823, |
|
"learning_rate": 4.5967291676705286e-05, |
|
"loss": 0.0063, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 4.252336448598131, |
|
"grad_norm": 0.020256663017665554, |
|
"learning_rate": 4.5053489897635585e-05, |
|
"loss": 0.0066, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 4.299065420560748, |
|
"grad_norm": 0.017745913007780147, |
|
"learning_rate": 4.414135375011416e-05, |
|
"loss": 0.0063, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 4.345794392523365, |
|
"grad_norm": 0.02011370658432465, |
|
"learning_rate": 4.323119037649232e-05, |
|
"loss": 0.0057, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 4.392523364485982, |
|
"grad_norm": 0.024844301511041878, |
|
"learning_rate": 4.2323306254831934e-05, |
|
"loss": 0.0064, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 4.4392523364485985, |
|
"grad_norm": 0.022813937850513024, |
|
"learning_rate": 4.14180070957055e-05, |
|
"loss": 0.0067, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 4.485981308411215, |
|
"grad_norm": 0.020230098188474713, |
|
"learning_rate": 4.051559773925462e-05, |
|
"loss": 0.0062, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 4.5327102803738315, |
|
"grad_norm": 0.023811479597101767, |
|
"learning_rate": 3.961638205254161e-05, |
|
"loss": 0.0061, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 4.579439252336448, |
|
"grad_norm": 0.02740260066991885, |
|
"learning_rate": 3.872066282722877e-05, |
|
"loss": 0.0064, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 4.626168224299065, |
|
"grad_norm": 0.02317216889617009, |
|
"learning_rate": 3.782874167761977e-05, |
|
"loss": 0.006, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 4.672897196261682, |
|
"grad_norm": 0.012363068654073208, |
|
"learning_rate": 3.694091893909746e-05, |
|
"loss": 0.0062, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 4.719626168224299, |
|
"grad_norm": 0.03140973721401593, |
|
"learning_rate": 3.605749356699235e-05, |
|
"loss": 0.0066, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 4.766355140186916, |
|
"grad_norm": 0.02856466113066483, |
|
"learning_rate": 3.5178763035915804e-05, |
|
"loss": 0.0058, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 4.813084112149532, |
|
"grad_norm": 0.023964636381919646, |
|
"learning_rate": 3.430502323959185e-05, |
|
"loss": 0.0061, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 4.859813084112149, |
|
"grad_norm": 0.025128922566591057, |
|
"learning_rate": 3.343656839122121e-05, |
|
"loss": 0.0059, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 4.906542056074766, |
|
"grad_norm": 0.0186144001471827, |
|
"learning_rate": 3.257369092441137e-05, |
|
"loss": 0.0064, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 4.953271028037383, |
|
"grad_norm": 0.024922675182496204, |
|
"learning_rate": 3.171668139470578e-05, |
|
"loss": 0.0067, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"grad_norm": 0.023268694442631153, |
|
"learning_rate": 3.086582838174551e-05, |
|
"loss": 0.0066, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 5.046728971962617, |
|
"grad_norm": 0.022858435420561946, |
|
"learning_rate": 3.0021418392096213e-05, |
|
"loss": 0.0056, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 5.093457943925234, |
|
"grad_norm": 0.03037878789593789, |
|
"learning_rate": 2.9183735762773124e-05, |
|
"loss": 0.0058, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 5.140186915887851, |
|
"grad_norm": 0.027831633677455617, |
|
"learning_rate": 2.8353062565496713e-05, |
|
"loss": 0.0057, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 5.186915887850467, |
|
"grad_norm": 0.02234327350971588, |
|
"learning_rate": 2.7529678511711036e-05, |
|
"loss": 0.0058, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 5.233644859813084, |
|
"grad_norm": 0.02420276983311316, |
|
"learning_rate": 2.671386085839682e-05, |
|
"loss": 0.0056, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 5.280373831775701, |
|
"grad_norm": 0.026279801398446302, |
|
"learning_rate": 2.5905884314711238e-05, |
|
"loss": 0.0059, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 5.327102803738318, |
|
"grad_norm": 0.024113471920291712, |
|
"learning_rate": 2.5106020949485348e-05, |
|
"loss": 0.0055, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 5.373831775700935, |
|
"grad_norm": 0.02757405539371541, |
|
"learning_rate": 2.4314540099610812e-05, |
|
"loss": 0.0064, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 5.420560747663552, |
|
"grad_norm": 0.023071247903649506, |
|
"learning_rate": 2.353170827934635e-05, |
|
"loss": 0.006, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 5.4672897196261685, |
|
"grad_norm": 0.0288737767028276, |
|
"learning_rate": 2.27577890905749e-05, |
|
"loss": 0.0058, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 5.5140186915887845, |
|
"grad_norm": 0.029369556474304507, |
|
"learning_rate": 2.1993043134041214e-05, |
|
"loss": 0.0058, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 5.5607476635514015, |
|
"grad_norm": 0.021962327780855752, |
|
"learning_rate": 2.1237727921600194e-05, |
|
"loss": 0.0062, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 5.607476635514018, |
|
"grad_norm": 0.027762289058911312, |
|
"learning_rate": 2.0492097789505178e-05, |
|
"loss": 0.0059, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 5.607476635514018, |
|
"eval_loss": 0.007473748177289963, |
|
"eval_runtime": 3.3345, |
|
"eval_samples_per_second": 73.174, |
|
"eval_steps_per_second": 2.099, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 5.654205607476635, |
|
"grad_norm": 0.02469372448375762, |
|
"learning_rate": 1.9756403812765763e-05, |
|
"loss": 0.0056, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 5.700934579439252, |
|
"grad_norm": 0.025899196843355222, |
|
"learning_rate": 1.9030893720603605e-05, |
|
"loss": 0.0058, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 5.747663551401869, |
|
"grad_norm": 0.0265685769061, |
|
"learning_rate": 1.831581181303489e-05, |
|
"loss": 0.006, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 5.794392523364486, |
|
"grad_norm": 0.0236407517735495, |
|
"learning_rate": 1.7611398878607544e-05, |
|
"loss": 0.0056, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 5.841121495327103, |
|
"grad_norm": 0.028633962957475903, |
|
"learning_rate": 1.6917892113320826e-05, |
|
"loss": 0.0056, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 5.88785046728972, |
|
"grad_norm": 0.025134468654909488, |
|
"learning_rate": 1.6235525040754667e-05, |
|
"loss": 0.0059, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 5.934579439252336, |
|
"grad_norm": 0.018750521917658348, |
|
"learning_rate": 1.5564527433435565e-05, |
|
"loss": 0.0054, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 5.981308411214953, |
|
"grad_norm": 0.02286869577795185, |
|
"learning_rate": 1.4905125235465589e-05, |
|
"loss": 0.0056, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 6.02803738317757, |
|
"grad_norm": 0.02111534299450912, |
|
"learning_rate": 1.4257540486440596e-05, |
|
"loss": 0.0052, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 6.074766355140187, |
|
"grad_norm": 0.020056682398156774, |
|
"learning_rate": 1.362199124668309e-05, |
|
"loss": 0.0052, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 6.121495327102804, |
|
"grad_norm": 0.023191711564030777, |
|
"learning_rate": 1.2998691523815043e-05, |
|
"loss": 0.0053, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 6.168224299065421, |
|
"grad_norm": 0.026466885727658793, |
|
"learning_rate": 1.23878512006955e-05, |
|
"loss": 0.0051, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 6.214953271028038, |
|
"grad_norm": 0.02292053543721861, |
|
"learning_rate": 1.178967596474691e-05, |
|
"loss": 0.0048, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 6.261682242990654, |
|
"grad_norm": 0.020924799561897002, |
|
"learning_rate": 1.1204367238694358e-05, |
|
"loss": 0.0056, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 6.308411214953271, |
|
"grad_norm": 0.02136313146143031, |
|
"learning_rate": 1.06321221127407e-05, |
|
"loss": 0.0051, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 6.355140186915888, |
|
"grad_norm": 0.023684372680794725, |
|
"learning_rate": 1.0073133278200703e-05, |
|
"loss": 0.0053, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 6.401869158878505, |
|
"grad_norm": 0.02285715840701787, |
|
"learning_rate": 9.527588962616352e-06, |
|
"loss": 0.0058, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 6.4485981308411215, |
|
"grad_norm": 0.026613967435669312, |
|
"learning_rate": 8.995672866375237e-06, |
|
"loss": 0.0058, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 6.4953271028037385, |
|
"grad_norm": 0.02754174252021364, |
|
"learning_rate": 8.47756410085338e-06, |
|
"loss": 0.0055, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 6.542056074766355, |
|
"grad_norm": 0.02170488478202453, |
|
"learning_rate": 7.973437128103306e-06, |
|
"loss": 0.0055, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 6.588785046728972, |
|
"grad_norm": 0.02730782213919483, |
|
"learning_rate": 7.4834617021076695e-06, |
|
"loss": 0.0055, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 6.635514018691588, |
|
"grad_norm": 0.02739226459077928, |
|
"learning_rate": 7.007802811618258e-06, |
|
"loss": 0.0056, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 6.682242990654205, |
|
"grad_norm": 0.021614604070826122, |
|
"learning_rate": 6.546620624599497e-06, |
|
"loss": 0.0055, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 6.728971962616822, |
|
"grad_norm": 0.02122813708773405, |
|
"learning_rate": 6.100070434295379e-06, |
|
"loss": 0.0055, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 6.775700934579439, |
|
"grad_norm": 0.023837078920304434, |
|
"learning_rate": 5.6683026069377535e-06, |
|
"loss": 0.0052, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 6.822429906542056, |
|
"grad_norm": 0.01936888685997085, |
|
"learning_rate": 5.251462531113704e-06, |
|
"loss": 0.0053, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 6.869158878504673, |
|
"grad_norm": 0.027794129758199313, |
|
"learning_rate": 4.84969056880904e-06, |
|
"loss": 0.0056, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 6.91588785046729, |
|
"grad_norm": 0.02333499254194896, |
|
"learning_rate": 4.4631220081444495e-06, |
|
"loss": 0.0052, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 6.962616822429906, |
|
"grad_norm": 0.022923436069210098, |
|
"learning_rate": 4.091887017820051e-06, |
|
"loss": 0.0056, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 7.009345794392523, |
|
"grad_norm": 0.0225875119677355, |
|
"learning_rate": 3.7361106032839264e-06, |
|
"loss": 0.0055, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 7.05607476635514, |
|
"grad_norm": 0.01938821336041444, |
|
"learning_rate": 3.3959125646391476e-06, |
|
"loss": 0.0052, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 7.102803738317757, |
|
"grad_norm": 0.03088082827099114, |
|
"learning_rate": 3.0714074563037044e-06, |
|
"loss": 0.0051, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 7.149532710280374, |
|
"grad_norm": 0.021927820725786244, |
|
"learning_rate": 2.7627045484367232e-06, |
|
"loss": 0.005, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 7.196261682242991, |
|
"grad_norm": 0.02690920708848648, |
|
"learning_rate": 2.4699077901440883e-06, |
|
"loss": 0.0049, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 7.242990654205608, |
|
"grad_norm": 0.025630634718450354, |
|
"learning_rate": 2.193115774475807e-06, |
|
"loss": 0.0048, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 7.289719626168225, |
|
"grad_norm": 0.025670435693965685, |
|
"learning_rate": 1.9324217052268835e-06, |
|
"loss": 0.005, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 7.336448598130841, |
|
"grad_norm": 0.025967263979272275, |
|
"learning_rate": 1.6879133655529622e-06, |
|
"loss": 0.0052, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 7.383177570093458, |
|
"grad_norm": 0.018831317858244562, |
|
"learning_rate": 1.4596730884112008e-06, |
|
"loss": 0.0052, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 7.429906542056075, |
|
"grad_norm": 0.025607378308549714, |
|
"learning_rate": 1.2477777288364355e-06, |
|
"loss": 0.0051, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 7.4766355140186915, |
|
"grad_norm": 0.025037526298953592, |
|
"learning_rate": 1.0522986380618605e-06, |
|
"loss": 0.0053, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 7.4766355140186915, |
|
"eval_loss": 0.007368447724729776, |
|
"eval_runtime": 3.3243, |
|
"eval_samples_per_second": 73.4, |
|
"eval_steps_per_second": 2.106, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 7.5233644859813085, |
|
"grad_norm": 0.029090444318040674, |
|
"learning_rate": 8.733016394930571e-07, |
|
"loss": 0.0049, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 7.570093457943925, |
|
"grad_norm": 0.02787466565009468, |
|
"learning_rate": 7.108470065433193e-07, |
|
"loss": 0.0054, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 7.616822429906542, |
|
"grad_norm": 0.025684874852680496, |
|
"learning_rate": 5.649894423379376e-07, |
|
"loss": 0.0053, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 7.663551401869158, |
|
"grad_norm": 0.02409464783376482, |
|
"learning_rate": 4.357780612940343e-07, |
|
"loss": 0.005, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 7.710280373831775, |
|
"grad_norm": 0.024379690680637514, |
|
"learning_rate": 3.232563725823645e-07, |
|
"loss": 0.005, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 7.757009345794392, |
|
"grad_norm": 0.027633151279458295, |
|
"learning_rate": 2.274622654765135e-07, |
|
"loss": 0.0052, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 7.803738317757009, |
|
"grad_norm": 0.0303886393499085, |
|
"learning_rate": 1.484279965945079e-07, |
|
"loss": 0.0054, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 7.850467289719626, |
|
"grad_norm": 0.028466293354854937, |
|
"learning_rate": 8.618017903708197e-08, |
|
"loss": 0.0053, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 7.897196261682243, |
|
"grad_norm": 0.021774487133470112, |
|
"learning_rate": 4.073977342629598e-08, |
|
"loss": 0.0052, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 7.94392523364486, |
|
"grad_norm": 0.021060893164801635, |
|
"learning_rate": 1.2122080847470906e-08, |
|
"loss": 0.0051, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 7.990654205607477, |
|
"grad_norm": 0.02474060027341605, |
|
"learning_rate": 3.367376968932412e-10, |
|
"loss": 0.0051, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"step": 1712, |
|
"total_flos": 30760457011200.0, |
|
"train_loss": 0.01869776981195139, |
|
"train_runtime": 4171.5414, |
|
"train_samples_per_second": 22.925, |
|
"train_steps_per_second": 0.41 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 1712, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 8, |
|
"save_steps": 3000, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 30760457011200.0, |
|
"train_batch_size": 14, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|