|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 3.0, |
|
"eval_steps": 500, |
|
"global_step": 291, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.010309278350515464, |
|
"grad_norm": 56.530502862669195, |
|
"learning_rate": 8.333333333333333e-07, |
|
"loss": 1.0062, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.020618556701030927, |
|
"grad_norm": 56.85347935908914, |
|
"learning_rate": 1.6666666666666667e-06, |
|
"loss": 1.0254, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.030927835051546393, |
|
"grad_norm": 52.80642762797087, |
|
"learning_rate": 2.5e-06, |
|
"loss": 0.9343, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.041237113402061855, |
|
"grad_norm": 14.460626748248101, |
|
"learning_rate": 3.3333333333333333e-06, |
|
"loss": 0.7756, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.05154639175257732, |
|
"grad_norm": 33.77862103842921, |
|
"learning_rate": 4.166666666666667e-06, |
|
"loss": 0.7197, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.061855670103092786, |
|
"grad_norm": 19.887408911246457, |
|
"learning_rate": 5e-06, |
|
"loss": 0.6989, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.07216494845360824, |
|
"grad_norm": 18.88102575457493, |
|
"learning_rate": 4.999650952964643e-06, |
|
"loss": 0.7176, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.08247422680412371, |
|
"grad_norm": 14.291434565106155, |
|
"learning_rate": 4.998603909325636e-06, |
|
"loss": 0.7018, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.09278350515463918, |
|
"grad_norm": 17.830491359442135, |
|
"learning_rate": 4.996859161456965e-06, |
|
"loss": 0.6979, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.10309278350515463, |
|
"grad_norm": 22.258090145030344, |
|
"learning_rate": 4.994417196557884e-06, |
|
"loss": 0.6798, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.1134020618556701, |
|
"grad_norm": 22.704164044100065, |
|
"learning_rate": 4.991278696516879e-06, |
|
"loss": 0.6649, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.12371134020618557, |
|
"grad_norm": 18.16341564708439, |
|
"learning_rate": 4.98744453772126e-06, |
|
"loss": 0.6478, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.13402061855670103, |
|
"grad_norm": 14.932166260573126, |
|
"learning_rate": 4.982915790812436e-06, |
|
"loss": 0.6316, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.14432989690721648, |
|
"grad_norm": 12.635262425742601, |
|
"learning_rate": 4.977693720386951e-06, |
|
"loss": 0.6594, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.15463917525773196, |
|
"grad_norm": 11.94842253292838, |
|
"learning_rate": 4.9717797846433655e-06, |
|
"loss": 0.6488, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.16494845360824742, |
|
"grad_norm": 12.427372014843383, |
|
"learning_rate": 4.965175634975072e-06, |
|
"loss": 0.6507, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.17525773195876287, |
|
"grad_norm": 15.721796366237712, |
|
"learning_rate": 4.9578831155091585e-06, |
|
"loss": 0.6349, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.18556701030927836, |
|
"grad_norm": 11.441586957645997, |
|
"learning_rate": 4.949904262591467e-06, |
|
"loss": 0.62, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.1958762886597938, |
|
"grad_norm": 17.146139866007378, |
|
"learning_rate": 4.941241304217962e-06, |
|
"loss": 0.6201, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.20618556701030927, |
|
"grad_norm": 13.569191402668975, |
|
"learning_rate": 4.931896659412593e-06, |
|
"loss": 0.6181, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.21649484536082475, |
|
"grad_norm": 13.738152410897145, |
|
"learning_rate": 4.921872937551814e-06, |
|
"loss": 0.6449, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.2268041237113402, |
|
"grad_norm": 11.330378951412754, |
|
"learning_rate": 4.911172937635942e-06, |
|
"loss": 0.5972, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.23711340206185566, |
|
"grad_norm": 17.60859685226136, |
|
"learning_rate": 4.899799647507577e-06, |
|
"loss": 0.6069, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.24742268041237114, |
|
"grad_norm": 24.313662307531697, |
|
"learning_rate": 4.887756243017282e-06, |
|
"loss": 0.6588, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.25773195876288657, |
|
"grad_norm": 20.364424318699296, |
|
"learning_rate": 4.87504608713676e-06, |
|
"loss": 0.6561, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.26804123711340205, |
|
"grad_norm": 16.583878449086104, |
|
"learning_rate": 4.861672729019798e-06, |
|
"loss": 0.6385, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.27835051546391754, |
|
"grad_norm": 13.788499236016973, |
|
"learning_rate": 4.847639903011196e-06, |
|
"loss": 0.6265, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.28865979381443296, |
|
"grad_norm": 12.707772131029879, |
|
"learning_rate": 4.832951527604007e-06, |
|
"loss": 0.6332, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.29896907216494845, |
|
"grad_norm": 7.810381548716006, |
|
"learning_rate": 4.817611704345344e-06, |
|
"loss": 0.6151, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.30927835051546393, |
|
"grad_norm": 9.728230357128046, |
|
"learning_rate": 4.801624716691072e-06, |
|
"loss": 0.6125, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.31958762886597936, |
|
"grad_norm": 10.944377720387918, |
|
"learning_rate": 4.784995028809707e-06, |
|
"loss": 0.58, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.32989690721649484, |
|
"grad_norm": 12.674255975258161, |
|
"learning_rate": 4.767727284335852e-06, |
|
"loss": 0.6082, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.3402061855670103, |
|
"grad_norm": 9.393558697710056, |
|
"learning_rate": 4.74982630507352e-06, |
|
"loss": 0.5954, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.35051546391752575, |
|
"grad_norm": 8.86444796956786, |
|
"learning_rate": 4.731297089649704e-06, |
|
"loss": 0.6217, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.36082474226804123, |
|
"grad_norm": 12.651955410449714, |
|
"learning_rate": 4.7121448121185716e-06, |
|
"loss": 0.6329, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.3711340206185567, |
|
"grad_norm": 8.240685956235492, |
|
"learning_rate": 4.692374820516679e-06, |
|
"loss": 0.5939, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.38144329896907214, |
|
"grad_norm": 13.17155584670435, |
|
"learning_rate": 4.671992635369592e-06, |
|
"loss": 0.6052, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.3917525773195876, |
|
"grad_norm": 11.537115822592154, |
|
"learning_rate": 4.651003948150349e-06, |
|
"loss": 0.6137, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.4020618556701031, |
|
"grad_norm": 8.541467382075664, |
|
"learning_rate": 4.62941461969019e-06, |
|
"loss": 0.5953, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.41237113402061853, |
|
"grad_norm": 10.448612350979204, |
|
"learning_rate": 4.607230678541993e-06, |
|
"loss": 0.6003, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.422680412371134, |
|
"grad_norm": 10.535341277458436, |
|
"learning_rate": 4.584458319296868e-06, |
|
"loss": 0.6091, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.4329896907216495, |
|
"grad_norm": 9.278483893756524, |
|
"learning_rate": 4.561103900854401e-06, |
|
"loss": 0.6025, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.44329896907216493, |
|
"grad_norm": 9.020197774744622, |
|
"learning_rate": 4.5371739446470085e-06, |
|
"loss": 0.6007, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.4536082474226804, |
|
"grad_norm": 8.745226620703487, |
|
"learning_rate": 4.512675132818908e-06, |
|
"loss": 0.6172, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.4639175257731959, |
|
"grad_norm": 8.731588120354488, |
|
"learning_rate": 4.487614306360208e-06, |
|
"loss": 0.5845, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.4742268041237113, |
|
"grad_norm": 11.556358562184432, |
|
"learning_rate": 4.461998463196653e-06, |
|
"loss": 0.6059, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.4845360824742268, |
|
"grad_norm": 5.371593726729439, |
|
"learning_rate": 4.435834756235534e-06, |
|
"loss": 0.584, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.4948453608247423, |
|
"grad_norm": 8.615012315419184, |
|
"learning_rate": 4.409130491368331e-06, |
|
"loss": 0.6078, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.5051546391752577, |
|
"grad_norm": 9.933496976524289, |
|
"learning_rate": 4.381893125430629e-06, |
|
"loss": 0.6279, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.5154639175257731, |
|
"grad_norm": 9.416756778612212, |
|
"learning_rate": 4.354130264119894e-06, |
|
"loss": 0.6115, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.5257731958762887, |
|
"grad_norm": 10.168866112438264, |
|
"learning_rate": 4.325849659871674e-06, |
|
"loss": 0.6157, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.5360824742268041, |
|
"grad_norm": 7.283913317646919, |
|
"learning_rate": 4.297059209694824e-06, |
|
"loss": 0.5992, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.5463917525773195, |
|
"grad_norm": 10.672694251588519, |
|
"learning_rate": 4.267766952966369e-06, |
|
"loss": 0.6183, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.5567010309278351, |
|
"grad_norm": 9.525243048182714, |
|
"learning_rate": 4.237981069186606e-06, |
|
"loss": 0.6132, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.5670103092783505, |
|
"grad_norm": 8.914430094168027, |
|
"learning_rate": 4.207709875695078e-06, |
|
"loss": 0.6117, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.5773195876288659, |
|
"grad_norm": 9.559556695084922, |
|
"learning_rate": 4.176961825348059e-06, |
|
"loss": 0.5758, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.5876288659793815, |
|
"grad_norm": 8.900834428053642, |
|
"learning_rate": 4.1457455041582044e-06, |
|
"loss": 0.6044, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.5979381443298969, |
|
"grad_norm": 8.490217076773844, |
|
"learning_rate": 4.114069628897006e-06, |
|
"loss": 0.5871, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.6082474226804123, |
|
"grad_norm": 10.200895711427977, |
|
"learning_rate": 4.081943044660746e-06, |
|
"loss": 0.6148, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.6185567010309279, |
|
"grad_norm": 6.428485373269192, |
|
"learning_rate": 4.049374722400613e-06, |
|
"loss": 0.5836, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.6288659793814433, |
|
"grad_norm": 7.53120416017883, |
|
"learning_rate": 4.016373756417669e-06, |
|
"loss": 0.5851, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.6391752577319587, |
|
"grad_norm": 7.011364295097242, |
|
"learning_rate": 3.982949361823388e-06, |
|
"loss": 0.5867, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.6494845360824743, |
|
"grad_norm": 6.839016326755402, |
|
"learning_rate": 3.949110871966444e-06, |
|
"loss": 0.6056, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.6597938144329897, |
|
"grad_norm": 6.692603329875056, |
|
"learning_rate": 3.914867735826489e-06, |
|
"loss": 0.5795, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.6701030927835051, |
|
"grad_norm": 8.96125889994131, |
|
"learning_rate": 3.880229515375642e-06, |
|
"loss": 0.601, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.6804123711340206, |
|
"grad_norm": 6.438652453919476, |
|
"learning_rate": 3.845205882908432e-06, |
|
"loss": 0.5852, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.6907216494845361, |
|
"grad_norm": 5.959909972976633, |
|
"learning_rate": 3.8098066183409223e-06, |
|
"loss": 0.5908, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.7010309278350515, |
|
"grad_norm": 11.353215075638747, |
|
"learning_rate": 3.774041606479794e-06, |
|
"loss": 0.5967, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.711340206185567, |
|
"grad_norm": 8.149197273273636, |
|
"learning_rate": 3.737920834262134e-06, |
|
"loss": 0.5736, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.7216494845360825, |
|
"grad_norm": 11.946806310904744, |
|
"learning_rate": 3.7014543879667097e-06, |
|
"loss": 0.5635, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.7319587628865979, |
|
"grad_norm": 7.235189107810607, |
|
"learning_rate": 3.6646524503974955e-06, |
|
"loss": 0.5988, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.7422680412371134, |
|
"grad_norm": 9.00748429109896, |
|
"learning_rate": 3.627525298040255e-06, |
|
"loss": 0.6066, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.7525773195876289, |
|
"grad_norm": 5.543402990170008, |
|
"learning_rate": 3.5900832981929574e-06, |
|
"loss": 0.5766, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.7628865979381443, |
|
"grad_norm": 10.622044769393877, |
|
"learning_rate": 3.552336906070838e-06, |
|
"loss": 0.5984, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.7731958762886598, |
|
"grad_norm": 6.322017029596627, |
|
"learning_rate": 3.5142966618869096e-06, |
|
"loss": 0.6064, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.7835051546391752, |
|
"grad_norm": 8.698612269075731, |
|
"learning_rate": 3.4759731879087373e-06, |
|
"loss": 0.5691, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.7938144329896907, |
|
"grad_norm": 7.1078847811435235, |
|
"learning_rate": 3.4373771854923032e-06, |
|
"loss": 0.5993, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.8041237113402062, |
|
"grad_norm": 7.990782195608653, |
|
"learning_rate": 3.398519432093782e-06, |
|
"loss": 0.5884, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.8144329896907216, |
|
"grad_norm": 9.607410558859467, |
|
"learning_rate": 3.3594107782600754e-06, |
|
"loss": 0.5982, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.8247422680412371, |
|
"grad_norm": 14.083966363226939, |
|
"learning_rate": 3.3200621445989227e-06, |
|
"loss": 0.5839, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.8350515463917526, |
|
"grad_norm": 5.701568641983366, |
|
"learning_rate": 3.2804845187294666e-06, |
|
"loss": 0.6011, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.845360824742268, |
|
"grad_norm": 6.773029741907651, |
|
"learning_rate": 3.2406889522140854e-06, |
|
"loss": 0.5522, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.8556701030927835, |
|
"grad_norm": 10.147092476062955, |
|
"learning_rate": 3.2006865574723907e-06, |
|
"loss": 0.5822, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.865979381443299, |
|
"grad_norm": 8.873878085722918, |
|
"learning_rate": 3.1604885046782158e-06, |
|
"loss": 0.5997, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.8762886597938144, |
|
"grad_norm": 9.962104711025551, |
|
"learning_rate": 3.1201060186404836e-06, |
|
"loss": 0.5698, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.8865979381443299, |
|
"grad_norm": 11.526239728138046, |
|
"learning_rate": 3.0795503756688212e-06, |
|
"loss": 0.5893, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.8969072164948454, |
|
"grad_norm": 13.53827694453593, |
|
"learning_rate": 3.038832900424784e-06, |
|
"loss": 0.565, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.9072164948453608, |
|
"grad_norm": 7.373403085352247, |
|
"learning_rate": 2.9979649627595904e-06, |
|
"loss": 0.5865, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.9175257731958762, |
|
"grad_norm": 8.327398959176447, |
|
"learning_rate": 2.9569579745392263e-06, |
|
"loss": 0.5704, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.9278350515463918, |
|
"grad_norm": 7.734529867452049, |
|
"learning_rate": 2.9158233864578256e-06, |
|
"loss": 0.5702, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.9381443298969072, |
|
"grad_norm": 11.545112692182581, |
|
"learning_rate": 2.8745726848402037e-06, |
|
"loss": 0.5901, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.9484536082474226, |
|
"grad_norm": 8.481813159931104, |
|
"learning_rate": 2.8332173884344477e-06, |
|
"loss": 0.5799, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.9587628865979382, |
|
"grad_norm": 5.62478688154214, |
|
"learning_rate": 2.791769045195441e-06, |
|
"loss": 0.5757, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.9690721649484536, |
|
"grad_norm": 12.580963383160984, |
|
"learning_rate": 2.7502392290602463e-06, |
|
"loss": 0.6143, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.979381443298969, |
|
"grad_norm": 8.615561365578158, |
|
"learning_rate": 2.708639536716225e-06, |
|
"loss": 0.5856, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.9896907216494846, |
|
"grad_norm": 8.26963135051012, |
|
"learning_rate": 2.6669815843628043e-06, |
|
"loss": 0.584, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 8.96198935018297, |
|
"learning_rate": 2.625277004467798e-06, |
|
"loss": 0.5692, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 1.0103092783505154, |
|
"grad_norm": 9.223202215415496, |
|
"learning_rate": 2.5835374425191867e-06, |
|
"loss": 0.5844, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 1.0206185567010309, |
|
"grad_norm": 9.335854961716732, |
|
"learning_rate": 2.5417745537732524e-06, |
|
"loss": 0.5834, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 1.0309278350515463, |
|
"grad_norm": 8.859234628793157, |
|
"learning_rate": 2.5e-06, |
|
"loss": 0.5766, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.041237113402062, |
|
"grad_norm": 11.10978876086468, |
|
"learning_rate": 2.4582254462267476e-06, |
|
"loss": 0.5481, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 1.0515463917525774, |
|
"grad_norm": 14.13721579241571, |
|
"learning_rate": 2.4164625574808145e-06, |
|
"loss": 0.5391, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 1.0618556701030928, |
|
"grad_norm": 10.171726864440725, |
|
"learning_rate": 2.3747229955322022e-06, |
|
"loss": 0.5645, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 1.0721649484536082, |
|
"grad_norm": 8.353171526332968, |
|
"learning_rate": 2.333018415637196e-06, |
|
"loss": 0.554, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 1.0824742268041236, |
|
"grad_norm": 5.776771584137868, |
|
"learning_rate": 2.291360463283776e-06, |
|
"loss": 0.5149, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 1.0927835051546393, |
|
"grad_norm": 5.645891177548563, |
|
"learning_rate": 2.249760770939754e-06, |
|
"loss": 0.5341, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 1.1030927835051547, |
|
"grad_norm": 17.194939796051738, |
|
"learning_rate": 2.2082309548045595e-06, |
|
"loss": 0.5509, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 1.1134020618556701, |
|
"grad_norm": 11.494112580944973, |
|
"learning_rate": 2.1667826115655536e-06, |
|
"loss": 0.5619, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 1.1237113402061856, |
|
"grad_norm": 12.344445935682346, |
|
"learning_rate": 2.1254273151597967e-06, |
|
"loss": 0.5609, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 1.134020618556701, |
|
"grad_norm": 8.048599443563438, |
|
"learning_rate": 2.0841766135421753e-06, |
|
"loss": 0.5341, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 1.1443298969072164, |
|
"grad_norm": 7.901613828746177, |
|
"learning_rate": 2.043042025460775e-06, |
|
"loss": 0.5673, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 1.1546391752577319, |
|
"grad_norm": 15.559956905825056, |
|
"learning_rate": 2.0020350372404104e-06, |
|
"loss": 0.5358, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 1.1649484536082475, |
|
"grad_norm": 8.318249542521562, |
|
"learning_rate": 1.9611670995752164e-06, |
|
"loss": 0.548, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 1.175257731958763, |
|
"grad_norm": 7.70563136678249, |
|
"learning_rate": 1.920449624331179e-06, |
|
"loss": 0.5217, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 1.1855670103092784, |
|
"grad_norm": 10.581055723999143, |
|
"learning_rate": 1.8798939813595169e-06, |
|
"loss": 0.536, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 1.1958762886597938, |
|
"grad_norm": 8.42303175758336, |
|
"learning_rate": 1.8395114953217853e-06, |
|
"loss": 0.5326, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 1.2061855670103092, |
|
"grad_norm": 8.916178847424373, |
|
"learning_rate": 1.7993134425276095e-06, |
|
"loss": 0.5423, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 1.2164948453608249, |
|
"grad_norm": 7.749722137392225, |
|
"learning_rate": 1.7593110477859155e-06, |
|
"loss": 0.5524, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 1.2268041237113403, |
|
"grad_norm": 6.204568152143213, |
|
"learning_rate": 1.7195154812705344e-06, |
|
"loss": 0.5162, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 1.2371134020618557, |
|
"grad_norm": 6.79722773195719, |
|
"learning_rate": 1.6799378554010773e-06, |
|
"loss": 0.5556, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 1.2474226804123711, |
|
"grad_norm": 7.992796754408021, |
|
"learning_rate": 1.640589221739926e-06, |
|
"loss": 0.5373, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 1.2577319587628866, |
|
"grad_norm": 8.603985381968347, |
|
"learning_rate": 1.6014805679062185e-06, |
|
"loss": 0.514, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 1.268041237113402, |
|
"grad_norm": 9.407620250978518, |
|
"learning_rate": 1.5626228145076976e-06, |
|
"loss": 0.5259, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 1.2783505154639174, |
|
"grad_norm": 7.062284213120056, |
|
"learning_rate": 1.5240268120912631e-06, |
|
"loss": 0.5356, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 1.2886597938144329, |
|
"grad_norm": 7.5572557157103475, |
|
"learning_rate": 1.4857033381130912e-06, |
|
"loss": 0.5116, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 1.2989690721649485, |
|
"grad_norm": 7.805829671961577, |
|
"learning_rate": 1.4476630939291631e-06, |
|
"loss": 0.5565, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 1.309278350515464, |
|
"grad_norm": 10.378458572627729, |
|
"learning_rate": 1.4099167018070436e-06, |
|
"loss": 0.5427, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 1.3195876288659794, |
|
"grad_norm": 7.5706417929381935, |
|
"learning_rate": 1.372474701959745e-06, |
|
"loss": 0.5143, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 1.3298969072164948, |
|
"grad_norm": 7.007827469587551, |
|
"learning_rate": 1.3353475496025049e-06, |
|
"loss": 0.559, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 1.3402061855670104, |
|
"grad_norm": 6.710862466838978, |
|
"learning_rate": 1.2985456120332907e-06, |
|
"loss": 0.5696, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 1.3505154639175259, |
|
"grad_norm": 9.333878728281068, |
|
"learning_rate": 1.2620791657378664e-06, |
|
"loss": 0.5554, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 1.3608247422680413, |
|
"grad_norm": 12.936977504923266, |
|
"learning_rate": 1.2259583935202063e-06, |
|
"loss": 0.553, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 1.3711340206185567, |
|
"grad_norm": 7.957065527893153, |
|
"learning_rate": 1.1901933816590787e-06, |
|
"loss": 0.5388, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 1.3814432989690721, |
|
"grad_norm": 6.7484097903496, |
|
"learning_rate": 1.1547941170915686e-06, |
|
"loss": 0.5363, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 1.3917525773195876, |
|
"grad_norm": 6.361664231123078, |
|
"learning_rate": 1.1197704846243587e-06, |
|
"loss": 0.5446, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 1.402061855670103, |
|
"grad_norm": 8.087687103135398, |
|
"learning_rate": 1.0851322641735119e-06, |
|
"loss": 0.5431, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 1.4123711340206184, |
|
"grad_norm": 5.308480571020336, |
|
"learning_rate": 1.0508891280335562e-06, |
|
"loss": 0.5482, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 1.422680412371134, |
|
"grad_norm": 7.8000899969924475, |
|
"learning_rate": 1.0170506381766121e-06, |
|
"loss": 0.5712, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 1.4329896907216495, |
|
"grad_norm": 8.473337430115116, |
|
"learning_rate": 9.836262435823316e-07, |
|
"loss": 0.5389, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 1.443298969072165, |
|
"grad_norm": 5.576258584866546, |
|
"learning_rate": 9.506252775993882e-07, |
|
"loss": 0.5234, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 1.4536082474226804, |
|
"grad_norm": 7.140077933091837, |
|
"learning_rate": 9.180569553392535e-07, |
|
"loss": 0.5593, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 1.463917525773196, |
|
"grad_norm": 7.173765643870978, |
|
"learning_rate": 8.85930371102994e-07, |
|
"loss": 0.5084, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 1.4742268041237114, |
|
"grad_norm": 7.911307786551771, |
|
"learning_rate": 8.542544958417962e-07, |
|
"loss": 0.534, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 1.4845360824742269, |
|
"grad_norm": 6.200762166583492, |
|
"learning_rate": 8.23038174651942e-07, |
|
"loss": 0.5332, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 1.4948453608247423, |
|
"grad_norm": 6.031208888096604, |
|
"learning_rate": 7.922901243049231e-07, |
|
"loss": 0.5348, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 1.5051546391752577, |
|
"grad_norm": 5.448085084528034, |
|
"learning_rate": 7.620189308133943e-07, |
|
"loss": 0.5244, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 1.5154639175257731, |
|
"grad_norm": 8.397115629983924, |
|
"learning_rate": 7.322330470336314e-07, |
|
"loss": 0.526, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 1.5257731958762886, |
|
"grad_norm": 7.963177815905807, |
|
"learning_rate": 7.029407903051771e-07, |
|
"loss": 0.5409, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 1.536082474226804, |
|
"grad_norm": 5.610457547002628, |
|
"learning_rate": 6.741503401283273e-07, |
|
"loss": 0.525, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 1.5463917525773194, |
|
"grad_norm": 6.0265411811750225, |
|
"learning_rate": 6.458697358801061e-07, |
|
"loss": 0.5224, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.556701030927835, |
|
"grad_norm": 8.139901449370793, |
|
"learning_rate": 6.181068745693716e-07, |
|
"loss": 0.5153, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 1.5670103092783505, |
|
"grad_norm": 8.698508772807294, |
|
"learning_rate": 5.908695086316701e-07, |
|
"loss": 0.5391, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 1.577319587628866, |
|
"grad_norm": 7.077929091952878, |
|
"learning_rate": 5.641652437644668e-07, |
|
"loss": 0.5123, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 1.5876288659793816, |
|
"grad_norm": 6.897372101956063, |
|
"learning_rate": 5.380015368033476e-07, |
|
"loss": 0.5383, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 1.597938144329897, |
|
"grad_norm": 7.544723172377018, |
|
"learning_rate": 5.123856936397925e-07, |
|
"loss": 0.5008, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 1.6082474226804124, |
|
"grad_norm": 8.15135168556466, |
|
"learning_rate": 4.873248671810929e-07, |
|
"loss": 0.4988, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 1.6185567010309279, |
|
"grad_norm": 9.054018879676315, |
|
"learning_rate": 4.628260553529917e-07, |
|
"loss": 0.5297, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 1.6288659793814433, |
|
"grad_norm": 5.634019698515158, |
|
"learning_rate": 4.388960991455998e-07, |
|
"loss": 0.5355, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 1.6391752577319587, |
|
"grad_norm": 7.717492869935791, |
|
"learning_rate": 4.155416807031326e-07, |
|
"loss": 0.4994, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 1.6494845360824741, |
|
"grad_norm": 5.70781191187035, |
|
"learning_rate": 3.927693214580075e-07, |
|
"loss": 0.5264, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 1.6597938144329896, |
|
"grad_norm": 6.247396918854984, |
|
"learning_rate": 3.7058538030980946e-07, |
|
"loss": 0.5238, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 1.670103092783505, |
|
"grad_norm": 8.970736081000318, |
|
"learning_rate": 3.489960518496521e-07, |
|
"loss": 0.5176, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 1.6804123711340206, |
|
"grad_norm": 6.52050803188651, |
|
"learning_rate": 3.2800736463040883e-07, |
|
"loss": 0.5333, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 1.690721649484536, |
|
"grad_norm": 8.678189564713009, |
|
"learning_rate": 3.076251794833213e-07, |
|
"loss": 0.5462, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 1.7010309278350515, |
|
"grad_norm": 7.63188885788194, |
|
"learning_rate": 2.878551878814287e-07, |
|
"loss": 0.5382, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 1.7113402061855671, |
|
"grad_norm": 6.734689090068864, |
|
"learning_rate": 2.6870291035029724e-07, |
|
"loss": 0.546, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 1.7216494845360826, |
|
"grad_norm": 6.992143309043317, |
|
"learning_rate": 2.501736949264805e-07, |
|
"loss": 0.5486, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 1.731958762886598, |
|
"grad_norm": 9.076794104921877, |
|
"learning_rate": 2.3227271566414827e-07, |
|
"loss": 0.5289, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 1.7422680412371134, |
|
"grad_norm": 7.405248131571063, |
|
"learning_rate": 2.1500497119029324e-07, |
|
"loss": 0.5419, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 1.7525773195876289, |
|
"grad_norm": 7.261676461936444, |
|
"learning_rate": 1.9837528330892781e-07, |
|
"loss": 0.5412, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 1.7628865979381443, |
|
"grad_norm": 6.772741086153254, |
|
"learning_rate": 1.823882956546566e-07, |
|
"loss": 0.5324, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 1.7731958762886597, |
|
"grad_norm": 7.0347083508864285, |
|
"learning_rate": 1.6704847239599364e-07, |
|
"loss": 0.5336, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 1.7835051546391751, |
|
"grad_norm": 7.381399190947017, |
|
"learning_rate": 1.5236009698880532e-07, |
|
"loss": 0.5235, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 1.7938144329896906, |
|
"grad_norm": 5.087687623409436, |
|
"learning_rate": 1.3832727098020333e-07, |
|
"loss": 0.5317, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 1.8041237113402062, |
|
"grad_norm": 6.551357146160097, |
|
"learning_rate": 1.2495391286323988e-07, |
|
"loss": 0.5555, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 1.8144329896907216, |
|
"grad_norm": 5.892841923681646, |
|
"learning_rate": 1.1224375698271894e-07, |
|
"loss": 0.536, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 1.824742268041237, |
|
"grad_norm": 6.6646217388808555, |
|
"learning_rate": 1.0020035249242304e-07, |
|
"loss": 0.5387, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 1.8350515463917527, |
|
"grad_norm": 6.8242492151044365, |
|
"learning_rate": 8.882706236405886e-08, |
|
"loss": 0.4794, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 1.8453608247422681, |
|
"grad_norm": 7.100289040713055, |
|
"learning_rate": 7.812706244818669e-08, |
|
"loss": 0.4986, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 1.8556701030927836, |
|
"grad_norm": 5.845790241687149, |
|
"learning_rate": 6.810334058740736e-08, |
|
"loss": 0.5535, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.865979381443299, |
|
"grad_norm": 5.68361589879705, |
|
"learning_rate": 5.8758695782038245e-08, |
|
"loss": 0.5437, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 1.8762886597938144, |
|
"grad_norm": 7.994426454699525, |
|
"learning_rate": 5.009573740853313e-08, |
|
"loss": 0.5086, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 1.8865979381443299, |
|
"grad_norm": 6.571363294762953, |
|
"learning_rate": 4.211688449084123e-08, |
|
"loss": 0.5167, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 1.8969072164948453, |
|
"grad_norm": 8.467171947790893, |
|
"learning_rate": 3.4824365024928585e-08, |
|
"loss": 0.5244, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 1.9072164948453607, |
|
"grad_norm": 9.804710065960087, |
|
"learning_rate": 2.8220215356634662e-08, |
|
"loss": 0.5255, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 1.9175257731958761, |
|
"grad_norm": 7.2557000739578825, |
|
"learning_rate": 2.230627961304993e-08, |
|
"loss": 0.4976, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 1.9278350515463918, |
|
"grad_norm": 7.499859617826814, |
|
"learning_rate": 1.708420918756476e-08, |
|
"loss": 0.515, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 1.9381443298969072, |
|
"grad_norm": 7.626640018381011, |
|
"learning_rate": 1.255546227873966e-08, |
|
"loss": 0.4789, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 1.9484536082474226, |
|
"grad_norm": 6.930888281586003, |
|
"learning_rate": 8.721303483121002e-09, |
|
"loss": 0.5328, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 1.9587628865979383, |
|
"grad_norm": 6.843490525992252, |
|
"learning_rate": 5.582803442117091e-09, |
|
"loss": 0.5124, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.9690721649484537, |
|
"grad_norm": 6.535875936350926, |
|
"learning_rate": 3.1408385430356513e-09, |
|
"loss": 0.5221, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 1.9793814432989691, |
|
"grad_norm": 6.59860470212397, |
|
"learning_rate": 1.3960906743634706e-09, |
|
"loss": 0.5168, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 1.9896907216494846, |
|
"grad_norm": 9.644943778515387, |
|
"learning_rate": 3.490470353573194e-10, |
|
"loss": 0.5166, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 6.0885009068108324, |
|
"learning_rate": 0.0, |
|
"loss": 0.5012, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 2.0103092783505154, |
|
"grad_norm": 6.7429744751815, |
|
"learning_rate": 1.2985456120332907e-06, |
|
"loss": 0.5071, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 2.020618556701031, |
|
"grad_norm": 7.653675627458454, |
|
"learning_rate": 1.27419674050107e-06, |
|
"loss": 0.4717, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 2.0309278350515463, |
|
"grad_norm": 10.177207568583976, |
|
"learning_rate": 1.2500000000000007e-06, |
|
"loss": 0.5465, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 2.0412371134020617, |
|
"grad_norm": 5.872999784168543, |
|
"learning_rate": 1.2259583935202063e-06, |
|
"loss": 0.4681, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 2.051546391752577, |
|
"grad_norm": 8.033564727531939, |
|
"learning_rate": 1.2020749047985627e-06, |
|
"loss": 0.4898, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 2.0618556701030926, |
|
"grad_norm": 10.111825165552998, |
|
"learning_rate": 1.178352497948384e-06, |
|
"loss": 0.4888, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 2.0721649484536084, |
|
"grad_norm": 9.056178341326012, |
|
"learning_rate": 1.1547941170915686e-06, |
|
"loss": 0.4933, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 2.082474226804124, |
|
"grad_norm": 8.801087260270211, |
|
"learning_rate": 1.131402685993201e-06, |
|
"loss": 0.5084, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 2.0927835051546393, |
|
"grad_norm": 8.35974781951926, |
|
"learning_rate": 1.1081811076986965e-06, |
|
"loss": 0.5066, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 2.1030927835051547, |
|
"grad_norm": 11.00021188705284, |
|
"learning_rate": 1.0851322641735119e-06, |
|
"loss": 0.5176, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 2.11340206185567, |
|
"grad_norm": 10.087911233945551, |
|
"learning_rate": 1.0622590159454741e-06, |
|
"loss": 0.4977, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 2.1237113402061856, |
|
"grad_norm": 7.390720229714829, |
|
"learning_rate": 1.0395642017497648e-06, |
|
"loss": 0.495, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 2.134020618556701, |
|
"grad_norm": 7.781154402180078, |
|
"learning_rate": 1.0170506381766121e-06, |
|
"loss": 0.4662, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 2.1443298969072164, |
|
"grad_norm": 8.64835633995342, |
|
"learning_rate": 9.94721119321739e-07, |
|
"loss": 0.4963, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 2.154639175257732, |
|
"grad_norm": 8.758238483451327, |
|
"learning_rate": 9.72578416439587e-07, |
|
"loss": 0.5021, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 2.1649484536082473, |
|
"grad_norm": 7.794658338144042, |
|
"learning_rate": 9.506252775993882e-07, |
|
"loss": 0.4839, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 2.1752577319587627, |
|
"grad_norm": 10.778506784155853, |
|
"learning_rate": 9.288644273441083e-07, |
|
"loss": 0.4862, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 2.1855670103092786, |
|
"grad_norm": 10.070848117786477, |
|
"learning_rate": 9.072985663523137e-07, |
|
"loss": 0.5122, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 2.195876288659794, |
|
"grad_norm": 7.398951801603466, |
|
"learning_rate": 8.85930371102994e-07, |
|
"loss": 0.4785, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 2.2061855670103094, |
|
"grad_norm": 9.910836394550703, |
|
"learning_rate": 8.647624935433949e-07, |
|
"loss": 0.5005, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 2.216494845360825, |
|
"grad_norm": 6.958374510622299, |
|
"learning_rate": 8.437975607598889e-07, |
|
"loss": 0.4773, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 2.2268041237113403, |
|
"grad_norm": 7.528132256313958, |
|
"learning_rate": 8.23038174651942e-07, |
|
"loss": 0.5051, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 2.2371134020618557, |
|
"grad_norm": 11.559406258405966, |
|
"learning_rate": 8.02486911609188e-07, |
|
"loss": 0.4966, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 2.247422680412371, |
|
"grad_norm": 8.045424247257529, |
|
"learning_rate": 7.8214632219169e-07, |
|
"loss": 0.4942, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 2.2577319587628866, |
|
"grad_norm": 9.024692147623375, |
|
"learning_rate": 7.620189308133943e-07, |
|
"loss": 0.4592, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 2.268041237113402, |
|
"grad_norm": 8.608428072215723, |
|
"learning_rate": 7.421072354288303e-07, |
|
"loss": 0.4692, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 2.2783505154639174, |
|
"grad_norm": 8.94979119936365, |
|
"learning_rate": 7.224137072230982e-07, |
|
"loss": 0.4988, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 2.288659793814433, |
|
"grad_norm": 8.663269061674175, |
|
"learning_rate": 7.029407903051771e-07, |
|
"loss": 0.5092, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 2.2989690721649483, |
|
"grad_norm": 10.165201133618265, |
|
"learning_rate": 6.836909014045925e-07, |
|
"loss": 0.5058, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 2.3092783505154637, |
|
"grad_norm": 9.604858329399347, |
|
"learning_rate": 6.646664295714835e-07, |
|
"loss": 0.4779, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 2.319587628865979, |
|
"grad_norm": 8.752834623859673, |
|
"learning_rate": 6.458697358801061e-07, |
|
"loss": 0.4756, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 2.329896907216495, |
|
"grad_norm": 9.898712660194837, |
|
"learning_rate": 6.273031531358034e-07, |
|
"loss": 0.4821, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 2.3402061855670104, |
|
"grad_norm": 10.040266601548774, |
|
"learning_rate": 6.089689855854869e-07, |
|
"loss": 0.4922, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 2.350515463917526, |
|
"grad_norm": 8.965499873865442, |
|
"learning_rate": 5.908695086316701e-07, |
|
"loss": 0.4953, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 2.3608247422680413, |
|
"grad_norm": 7.950737195653351, |
|
"learning_rate": 5.730069685500669e-07, |
|
"loss": 0.4983, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 2.3711340206185567, |
|
"grad_norm": 11.903188557153433, |
|
"learning_rate": 5.553835822108152e-07, |
|
"loss": 0.4854, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 2.381443298969072, |
|
"grad_norm": 8.455526352825133, |
|
"learning_rate": 5.380015368033476e-07, |
|
"loss": 0.49, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 2.3917525773195876, |
|
"grad_norm": 9.999344995027917, |
|
"learning_rate": 5.20862989564947e-07, |
|
"loss": 0.4923, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 2.402061855670103, |
|
"grad_norm": 9.5259722996865, |
|
"learning_rate": 5.039700675130144e-07, |
|
"loss": 0.4807, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 2.4123711340206184, |
|
"grad_norm": 7.4446205225882265, |
|
"learning_rate": 4.873248671810929e-07, |
|
"loss": 0.4855, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 2.422680412371134, |
|
"grad_norm": 8.216411301343888, |
|
"learning_rate": 4.7092945435867205e-07, |
|
"loss": 0.4598, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 2.4329896907216497, |
|
"grad_norm": 9.208869423659547, |
|
"learning_rate": 4.547858638348107e-07, |
|
"loss": 0.5051, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 2.443298969072165, |
|
"grad_norm": 9.70808242233489, |
|
"learning_rate": 4.388960991455998e-07, |
|
"loss": 0.4822, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 2.4536082474226806, |
|
"grad_norm": 9.645304874804616, |
|
"learning_rate": 4.23262132325514e-07, |
|
"loss": 0.461, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 2.463917525773196, |
|
"grad_norm": 9.228997796596868, |
|
"learning_rate": 4.0788590366266754e-07, |
|
"loss": 0.4737, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 2.4742268041237114, |
|
"grad_norm": 7.779464578125827, |
|
"learning_rate": 3.927693214580075e-07, |
|
"loss": 0.4784, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 2.484536082474227, |
|
"grad_norm": 7.548505713522384, |
|
"learning_rate": 3.7791426178848235e-07, |
|
"loss": 0.4996, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 2.4948453608247423, |
|
"grad_norm": 7.480983276201638, |
|
"learning_rate": 3.633225682742053e-07, |
|
"loss": 0.452, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 2.5051546391752577, |
|
"grad_norm": 9.60264825850974, |
|
"learning_rate": 3.489960518496521e-07, |
|
"loss": 0.4875, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 2.515463917525773, |
|
"grad_norm": 8.226938092387504, |
|
"learning_rate": 3.3493649053890325e-07, |
|
"loss": 0.4689, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 2.5257731958762886, |
|
"grad_norm": 10.802468293312753, |
|
"learning_rate": 3.2114562923498765e-07, |
|
"loss": 0.4757, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 2.536082474226804, |
|
"grad_norm": 9.606058084808302, |
|
"learning_rate": 3.076251794833213e-07, |
|
"loss": 0.4963, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 2.5463917525773194, |
|
"grad_norm": 9.436856481578294, |
|
"learning_rate": 2.9437681926929583e-07, |
|
"loss": 0.4452, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 2.556701030927835, |
|
"grad_norm": 9.725834168890692, |
|
"learning_rate": 2.8140219281002717e-07, |
|
"loss": 0.4968, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 2.5670103092783503, |
|
"grad_norm": 9.488327886875917, |
|
"learning_rate": 2.6870291035029724e-07, |
|
"loss": 0.4682, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 2.5773195876288657, |
|
"grad_norm": 9.82180209155548, |
|
"learning_rate": 2.5628054796271064e-07, |
|
"loss": 0.4688, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 2.5876288659793816, |
|
"grad_norm": 9.093873747412966, |
|
"learning_rate": 2.441366473520909e-07, |
|
"loss": 0.4852, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 2.597938144329897, |
|
"grad_norm": 7.449464038797156, |
|
"learning_rate": 2.3227271566414827e-07, |
|
"loss": 0.4805, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 2.6082474226804124, |
|
"grad_norm": 10.772309684780893, |
|
"learning_rate": 2.2069022529842664e-07, |
|
"loss": 0.4902, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 2.618556701030928, |
|
"grad_norm": 10.691112242486527, |
|
"learning_rate": 2.0939061372557136e-07, |
|
"loss": 0.5194, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 2.6288659793814433, |
|
"grad_norm": 10.964675015957674, |
|
"learning_rate": 1.9837528330892781e-07, |
|
"loss": 0.4933, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 2.6391752577319587, |
|
"grad_norm": 7.146996257972618, |
|
"learning_rate": 1.876456011305003e-07, |
|
"loss": 0.4736, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 2.649484536082474, |
|
"grad_norm": 8.527751722375397, |
|
"learning_rate": 1.7720289882128095e-07, |
|
"loss": 0.4789, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 2.6597938144329896, |
|
"grad_norm": 9.492478053037697, |
|
"learning_rate": 1.6704847239599364e-07, |
|
"loss": 0.4701, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 2.670103092783505, |
|
"grad_norm": 10.196087733936238, |
|
"learning_rate": 1.5718358209224155e-07, |
|
"loss": 0.4562, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 2.680412371134021, |
|
"grad_norm": 10.20078352368436, |
|
"learning_rate": 1.4760945221410638e-07, |
|
"loss": 0.499, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 2.6907216494845363, |
|
"grad_norm": 9.305265548008038, |
|
"learning_rate": 1.3832727098020333e-07, |
|
"loss": 0.4913, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 2.7010309278350517, |
|
"grad_norm": 8.908621703126363, |
|
"learning_rate": 1.2933819037621227e-07, |
|
"loss": 0.4843, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 2.711340206185567, |
|
"grad_norm": 9.697082505509188, |
|
"learning_rate": 1.2064332601191163e-07, |
|
"loss": 0.4572, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 2.7216494845360826, |
|
"grad_norm": 7.970433913687569, |
|
"learning_rate": 1.1224375698271894e-07, |
|
"loss": 0.4756, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 2.731958762886598, |
|
"grad_norm": 8.87972810552369, |
|
"learning_rate": 1.0414052573577137e-07, |
|
"loss": 0.459, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 2.7422680412371134, |
|
"grad_norm": 8.46520426378118, |
|
"learning_rate": 9.633463794054776e-08, |
|
"loss": 0.4698, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 2.752577319587629, |
|
"grad_norm": 8.323482285728723, |
|
"learning_rate": 8.882706236405886e-08, |
|
"loss": 0.4617, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 2.7628865979381443, |
|
"grad_norm": 9.704235560062203, |
|
"learning_rate": 8.1618730750615e-08, |
|
"loss": 0.4627, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 2.7731958762886597, |
|
"grad_norm": 8.52883239697346, |
|
"learning_rate": 7.471053770619352e-08, |
|
"loss": 0.4473, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 2.783505154639175, |
|
"grad_norm": 9.049891046532084, |
|
"learning_rate": 6.810334058740736e-08, |
|
"loss": 0.512, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 2.7938144329896906, |
|
"grad_norm": 9.351205154513341, |
|
"learning_rate": 6.179795939510264e-08, |
|
"loss": 0.4642, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 2.804123711340206, |
|
"grad_norm": 9.158307466016296, |
|
"learning_rate": 5.5795176672591714e-08, |
|
"loss": 0.4471, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 2.8144329896907214, |
|
"grad_norm": 7.714445945044509, |
|
"learning_rate": 5.009573740853313e-08, |
|
"loss": 0.4613, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 2.824742268041237, |
|
"grad_norm": 10.072671050292586, |
|
"learning_rate": 4.4700348944471946e-08, |
|
"loss": 0.4608, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 2.8350515463917527, |
|
"grad_norm": 9.221620067665079, |
|
"learning_rate": 3.9609680887055247e-08, |
|
"loss": 0.4601, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 2.845360824742268, |
|
"grad_norm": 9.452260581603698, |
|
"learning_rate": 3.4824365024928585e-08, |
|
"loss": 0.467, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 2.8556701030927836, |
|
"grad_norm": 10.946772367065448, |
|
"learning_rate": 3.034499525032625e-08, |
|
"loss": 0.4567, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 2.865979381443299, |
|
"grad_norm": 8.94754765333994, |
|
"learning_rate": 2.6172127485364905e-08, |
|
"loss": 0.4738, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 2.8762886597938144, |
|
"grad_norm": 8.822797237760415, |
|
"learning_rate": 2.230627961304993e-08, |
|
"loss": 0.4521, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 2.88659793814433, |
|
"grad_norm": 8.953604746908766, |
|
"learning_rate": 1.8747931413001795e-08, |
|
"loss": 0.4849, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 2.8969072164948453, |
|
"grad_norm": 8.1573186298565, |
|
"learning_rate": 1.5497524501913163e-08, |
|
"loss": 0.4743, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 2.9072164948453607, |
|
"grad_norm": 7.678394662843834, |
|
"learning_rate": 1.255546227873966e-08, |
|
"loss": 0.4394, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 2.917525773195876, |
|
"grad_norm": 10.108055724388532, |
|
"learning_rate": 9.922109874636875e-09, |
|
"loss": 0.4715, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 2.927835051546392, |
|
"grad_norm": 8.599838042397586, |
|
"learning_rate": 7.597794107641887e-09, |
|
"loss": 0.4954, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 2.9381443298969074, |
|
"grad_norm": 9.481046139581798, |
|
"learning_rate": 5.582803442117091e-09, |
|
"loss": 0.4657, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 2.948453608247423, |
|
"grad_norm": 9.453713340420872, |
|
"learning_rate": 3.877387952945788e-09, |
|
"loss": 0.4788, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 2.9587628865979383, |
|
"grad_norm": 8.763446611978422, |
|
"learning_rate": 2.4817592944983983e-09, |
|
"loss": 0.4363, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 2.9690721649484537, |
|
"grad_norm": 9.418576702303165, |
|
"learning_rate": 1.3960906743634706e-09, |
|
"loss": 0.4728, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 2.979381443298969, |
|
"grad_norm": 7.824055175290262, |
|
"learning_rate": 6.205168318523802e-10, |
|
"loss": 0.4648, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 2.9896907216494846, |
|
"grad_norm": 10.036077270769853, |
|
"learning_rate": 1.5513402127603772e-10, |
|
"loss": 0.4502, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"grad_norm": 9.520420413311138, |
|
"learning_rate": 0.0, |
|
"loss": 0.4383, |
|
"step": 291 |
|
} |
|
], |
|
"logging_steps": 1.0, |
|
"max_steps": 291, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 5000.0, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 15989412249600.0, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|