|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"eval_steps": 500, |
|
"global_step": 1661, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0006020469596628537, |
|
"grad_norm": 27.23306162664463, |
|
"learning_rate": 5.98802395209581e-08, |
|
"loss": 1.971, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0030102347983142685, |
|
"grad_norm": 13.650791380879662, |
|
"learning_rate": 2.9940119760479047e-07, |
|
"loss": 1.9968, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.006020469596628537, |
|
"grad_norm": 12.306747766476132, |
|
"learning_rate": 5.988023952095809e-07, |
|
"loss": 1.9812, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.009030704394942806, |
|
"grad_norm": 9.02007086489199, |
|
"learning_rate": 8.982035928143713e-07, |
|
"loss": 1.9655, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.012040939193257074, |
|
"grad_norm": 7.401304214197773, |
|
"learning_rate": 1.1976047904191619e-06, |
|
"loss": 1.9352, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.015051173991571343, |
|
"grad_norm": 10.329535150502213, |
|
"learning_rate": 1.4970059880239521e-06, |
|
"loss": 1.892, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.018061408789885613, |
|
"grad_norm": 3.0948017863977086, |
|
"learning_rate": 1.7964071856287426e-06, |
|
"loss": 1.8184, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.02107164358819988, |
|
"grad_norm": 3.6935226911228827, |
|
"learning_rate": 2.095808383233533e-06, |
|
"loss": 1.7491, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.024081878386514148, |
|
"grad_norm": 6.79455681089959, |
|
"learning_rate": 2.3952095808383237e-06, |
|
"loss": 1.7084, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.027092113184828417, |
|
"grad_norm": 1.7513542710333883, |
|
"learning_rate": 2.694610778443114e-06, |
|
"loss": 1.6927, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.030102347983142687, |
|
"grad_norm": 5.378774459518788, |
|
"learning_rate": 2.9940119760479042e-06, |
|
"loss": 1.6141, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.033112582781456956, |
|
"grad_norm": 1.7236309463564579, |
|
"learning_rate": 3.2934131736526947e-06, |
|
"loss": 1.5602, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.036122817579771226, |
|
"grad_norm": 4.263330975488616, |
|
"learning_rate": 3.592814371257485e-06, |
|
"loss": 1.5176, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.03913305237808549, |
|
"grad_norm": 3.2624764680298513, |
|
"learning_rate": 3.892215568862276e-06, |
|
"loss": 1.4552, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.04214328717639976, |
|
"grad_norm": 1.114188393507732, |
|
"learning_rate": 4.191616766467066e-06, |
|
"loss": 1.4621, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.04515352197471403, |
|
"grad_norm": 18.7316085941672, |
|
"learning_rate": 4.4910179640718566e-06, |
|
"loss": 1.4325, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.048163756773028296, |
|
"grad_norm": 0.9868351913311059, |
|
"learning_rate": 4.7904191616766475e-06, |
|
"loss": 1.3936, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.051173991571342566, |
|
"grad_norm": 1.7253367860105293, |
|
"learning_rate": 5.0898203592814375e-06, |
|
"loss": 1.4142, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.054184226369656835, |
|
"grad_norm": 0.9242524570526937, |
|
"learning_rate": 5.389221556886228e-06, |
|
"loss": 1.3915, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.057194461167971104, |
|
"grad_norm": 0.9116834724175251, |
|
"learning_rate": 5.6886227544910184e-06, |
|
"loss": 1.3879, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.060204695966285374, |
|
"grad_norm": 0.9760286306029387, |
|
"learning_rate": 5.9880239520958085e-06, |
|
"loss": 1.3615, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.06321493076459964, |
|
"grad_norm": 0.9533636268140245, |
|
"learning_rate": 6.2874251497005985e-06, |
|
"loss": 1.3516, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.06622516556291391, |
|
"grad_norm": 0.9318286267990689, |
|
"learning_rate": 6.586826347305389e-06, |
|
"loss": 1.3374, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.06923540036122817, |
|
"grad_norm": 0.9861214247946927, |
|
"learning_rate": 6.88622754491018e-06, |
|
"loss": 1.3381, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.07224563515954245, |
|
"grad_norm": 1.0087362740253691, |
|
"learning_rate": 7.18562874251497e-06, |
|
"loss": 1.3287, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.07525586995785671, |
|
"grad_norm": 3.800322225138513, |
|
"learning_rate": 7.485029940119761e-06, |
|
"loss": 1.3317, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.07826610475617098, |
|
"grad_norm": 3.4474877346813013, |
|
"learning_rate": 7.784431137724551e-06, |
|
"loss": 1.3036, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.08127633955448525, |
|
"grad_norm": 0.9416140469616731, |
|
"learning_rate": 8.083832335329342e-06, |
|
"loss": 1.3208, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.08428657435279951, |
|
"grad_norm": 0.930813958919029, |
|
"learning_rate": 8.383233532934131e-06, |
|
"loss": 1.2844, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.08729680915111379, |
|
"grad_norm": 0.9070530897870428, |
|
"learning_rate": 8.682634730538922e-06, |
|
"loss": 1.2861, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.09030704394942805, |
|
"grad_norm": 1.0007264664595041, |
|
"learning_rate": 8.982035928143713e-06, |
|
"loss": 1.2707, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.09331727874774233, |
|
"grad_norm": 0.9553156573254931, |
|
"learning_rate": 9.281437125748504e-06, |
|
"loss": 1.3042, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.09632751354605659, |
|
"grad_norm": 0.957996113896581, |
|
"learning_rate": 9.580838323353295e-06, |
|
"loss": 1.2973, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.09933774834437085, |
|
"grad_norm": 0.9845982064473081, |
|
"learning_rate": 9.880239520958084e-06, |
|
"loss": 1.2698, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.10234798314268513, |
|
"grad_norm": 0.9872519714050891, |
|
"learning_rate": 9.999900509954779e-06, |
|
"loss": 1.2678, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.1053582179409994, |
|
"grad_norm": 1.0192244610172112, |
|
"learning_rate": 9.999292529572152e-06, |
|
"loss": 1.2685, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.10836845273931367, |
|
"grad_norm": 0.9727786532787158, |
|
"learning_rate": 9.998131908181262e-06, |
|
"loss": 1.2697, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.11137868753762793, |
|
"grad_norm": 0.961370749302959, |
|
"learning_rate": 9.996418774081658e-06, |
|
"loss": 1.2525, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.11438892233594221, |
|
"grad_norm": 1.0654272615306832, |
|
"learning_rate": 9.994153316649769e-06, |
|
"loss": 1.2731, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.11739915713425647, |
|
"grad_norm": 0.9755470408109456, |
|
"learning_rate": 9.991335786317964e-06, |
|
"loss": 1.2556, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.12040939193257075, |
|
"grad_norm": 1.0036625167312827, |
|
"learning_rate": 9.987966494546873e-06, |
|
"loss": 1.2258, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.12341962673088501, |
|
"grad_norm": 1.003865146067013, |
|
"learning_rate": 9.984045813790959e-06, |
|
"loss": 1.2469, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.12642986152919927, |
|
"grad_norm": 0.9786952100452682, |
|
"learning_rate": 9.979574177457337e-06, |
|
"loss": 1.2468, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.12944009632751355, |
|
"grad_norm": 0.9740050481925707, |
|
"learning_rate": 9.974552079857873e-06, |
|
"loss": 1.2427, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.13245033112582782, |
|
"grad_norm": 0.9630667645499639, |
|
"learning_rate": 9.968980076154533e-06, |
|
"loss": 1.2521, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.13546056592414207, |
|
"grad_norm": 0.9815376328795034, |
|
"learning_rate": 9.962858782298023e-06, |
|
"loss": 1.232, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.13847080072245635, |
|
"grad_norm": 1.1558477232615034, |
|
"learning_rate": 9.956188874959686e-06, |
|
"loss": 1.2306, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.14148103552077063, |
|
"grad_norm": 1.010662989571516, |
|
"learning_rate": 9.948971091456715e-06, |
|
"loss": 1.2316, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.1444912703190849, |
|
"grad_norm": 1.016349108965321, |
|
"learning_rate": 9.941206229670634e-06, |
|
"loss": 1.2202, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.14750150511739915, |
|
"grad_norm": 1.012113158589424, |
|
"learning_rate": 9.932895147959106e-06, |
|
"loss": 1.2042, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.15051173991571343, |
|
"grad_norm": 1.0179802871184889, |
|
"learning_rate": 9.924038765061042e-06, |
|
"loss": 1.1872, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.1535219747140277, |
|
"grad_norm": 1.0631543044499743, |
|
"learning_rate": 9.91463805999504e-06, |
|
"loss": 1.2117, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.15653220951234195, |
|
"grad_norm": 1.0508741168922897, |
|
"learning_rate": 9.904694071951167e-06, |
|
"loss": 1.2122, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.15954244431065623, |
|
"grad_norm": 1.0059019047260795, |
|
"learning_rate": 9.894207900176074e-06, |
|
"loss": 1.1955, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.1625526791089705, |
|
"grad_norm": 1.0033418286179807, |
|
"learning_rate": 9.883180703851488e-06, |
|
"loss": 1.2012, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.16556291390728478, |
|
"grad_norm": 0.9877620622300878, |
|
"learning_rate": 9.871613701966067e-06, |
|
"loss": 1.1805, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.16857314870559903, |
|
"grad_norm": 0.9400638251546493, |
|
"learning_rate": 9.859508173180653e-06, |
|
"loss": 1.1719, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.1715833835039133, |
|
"grad_norm": 0.9874311482394339, |
|
"learning_rate": 9.846865455686915e-06, |
|
"loss": 1.1755, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.17459361830222758, |
|
"grad_norm": 0.9914705153634205, |
|
"learning_rate": 9.833686947059436e-06, |
|
"loss": 1.166, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.17760385310054183, |
|
"grad_norm": 1.023017013404828, |
|
"learning_rate": 9.819974104101198e-06, |
|
"loss": 1.1611, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.1806140878988561, |
|
"grad_norm": 0.9792763961764498, |
|
"learning_rate": 9.80572844268256e-06, |
|
"loss": 1.1493, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.18362432269717038, |
|
"grad_norm": 1.0359591597037352, |
|
"learning_rate": 9.790951537573686e-06, |
|
"loss": 1.1832, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.18663455749548466, |
|
"grad_norm": 1.453908493308493, |
|
"learning_rate": 9.775645022270448e-06, |
|
"loss": 1.1641, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.1896447922937989, |
|
"grad_norm": 0.9821000141264724, |
|
"learning_rate": 9.759810588813872e-06, |
|
"loss": 1.1648, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.19265502709211318, |
|
"grad_norm": 1.1094696031738442, |
|
"learning_rate": 9.743449987603082e-06, |
|
"loss": 1.1602, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.19566526189042746, |
|
"grad_norm": 1.0229747825382478, |
|
"learning_rate": 9.726565027201813e-06, |
|
"loss": 1.1732, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.1986754966887417, |
|
"grad_norm": 1.0330731927842094, |
|
"learning_rate": 9.70915757413847e-06, |
|
"loss": 1.1613, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.20168573148705599, |
|
"grad_norm": 1.0671526485809508, |
|
"learning_rate": 9.691229552699817e-06, |
|
"loss": 1.1404, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.20469596628537026, |
|
"grad_norm": 1.1599170579317017, |
|
"learning_rate": 9.672782944718234e-06, |
|
"loss": 1.1652, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.20770620108368454, |
|
"grad_norm": 1.078143063135002, |
|
"learning_rate": 9.65381978935266e-06, |
|
"loss": 1.1393, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.2107164358819988, |
|
"grad_norm": 1.051353820041791, |
|
"learning_rate": 9.634342182863163e-06, |
|
"loss": 1.1159, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.21372667068031306, |
|
"grad_norm": 1.0456430688838831, |
|
"learning_rate": 9.614352278379217e-06, |
|
"loss": 1.149, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.21673690547862734, |
|
"grad_norm": 1.091953950241735, |
|
"learning_rate": 9.593852285661684e-06, |
|
"loss": 1.1552, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.2197471402769416, |
|
"grad_norm": 1.015428888405563, |
|
"learning_rate": 9.572844470858537e-06, |
|
"loss": 1.1369, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.22275737507525586, |
|
"grad_norm": 1.1341410310607158, |
|
"learning_rate": 9.551331156254358e-06, |
|
"loss": 1.138, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.22576760987357014, |
|
"grad_norm": 1.0098698732632645, |
|
"learning_rate": 9.529314720013618e-06, |
|
"loss": 1.1249, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.22877784467188442, |
|
"grad_norm": 1.0306055734848716, |
|
"learning_rate": 9.506797595917787e-06, |
|
"loss": 1.1309, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.23178807947019867, |
|
"grad_norm": 1.0832575075897741, |
|
"learning_rate": 9.483782273096295e-06, |
|
"loss": 1.1296, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.23479831426851294, |
|
"grad_norm": 1.0215735515542592, |
|
"learning_rate": 9.460271295751373e-06, |
|
"loss": 1.1436, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.23780854906682722, |
|
"grad_norm": 1.0127022199777589, |
|
"learning_rate": 9.436267262876808e-06, |
|
"loss": 1.1202, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.2408187838651415, |
|
"grad_norm": 1.0677989226321623, |
|
"learning_rate": 9.411772827970642e-06, |
|
"loss": 1.1182, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.24382901866345574, |
|
"grad_norm": 1.0863425275828655, |
|
"learning_rate": 9.38679069874184e-06, |
|
"loss": 1.1192, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.24683925346177002, |
|
"grad_norm": 1.003581452419569, |
|
"learning_rate": 9.36132363681097e-06, |
|
"loss": 1.1195, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.2498494882600843, |
|
"grad_norm": 1.0173137664236545, |
|
"learning_rate": 9.335374457404928e-06, |
|
"loss": 1.127, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.25285972305839854, |
|
"grad_norm": 1.0256692516120043, |
|
"learning_rate": 9.308946029045726e-06, |
|
"loss": 1.1002, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.25586995785671285, |
|
"grad_norm": 1.1681651650487053, |
|
"learning_rate": 9.282041273233402e-06, |
|
"loss": 1.0926, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.2588801926550271, |
|
"grad_norm": 1.046473420004503, |
|
"learning_rate": 9.254663164123052e-06, |
|
"loss": 1.0937, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.26189042745334135, |
|
"grad_norm": 1.0077718278045176, |
|
"learning_rate": 9.226814728196072e-06, |
|
"loss": 1.096, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.26490066225165565, |
|
"grad_norm": 1.003746070586757, |
|
"learning_rate": 9.198499043925591e-06, |
|
"loss": 1.1047, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.2679108970499699, |
|
"grad_norm": 1.007686883808698, |
|
"learning_rate": 9.169719241436162e-06, |
|
"loss": 1.0996, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.27092113184828415, |
|
"grad_norm": 1.002375870866833, |
|
"learning_rate": 9.14047850215775e-06, |
|
"loss": 1.082, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.27393136664659845, |
|
"grad_norm": 1.0070740266122291, |
|
"learning_rate": 9.110780058474052e-06, |
|
"loss": 1.0766, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.2769416014449127, |
|
"grad_norm": 1.030435067694461, |
|
"learning_rate": 9.080627193365155e-06, |
|
"loss": 1.0948, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.27995183624322695, |
|
"grad_norm": 0.9984477878066329, |
|
"learning_rate": 9.050023240044649e-06, |
|
"loss": 1.0944, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.28296207104154125, |
|
"grad_norm": 0.9909219592815518, |
|
"learning_rate": 9.018971581591141e-06, |
|
"loss": 1.0817, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.2859723058398555, |
|
"grad_norm": 0.9836671240022327, |
|
"learning_rate": 8.987475650574289e-06, |
|
"loss": 1.1055, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.2889825406381698, |
|
"grad_norm": 1.052915518162781, |
|
"learning_rate": 8.955538928675343e-06, |
|
"loss": 1.0675, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.29199277543648405, |
|
"grad_norm": 1.0046123233442037, |
|
"learning_rate": 8.923164946302274e-06, |
|
"loss": 1.0846, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.2950030102347983, |
|
"grad_norm": 1.0927507536443555, |
|
"learning_rate": 8.890357282199504e-06, |
|
"loss": 1.0938, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.2980132450331126, |
|
"grad_norm": 1.0365967534347835, |
|
"learning_rate": 8.857119563052301e-06, |
|
"loss": 1.0638, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.30102347983142685, |
|
"grad_norm": 0.9900367247286117, |
|
"learning_rate": 8.823455463085873e-06, |
|
"loss": 1.0784, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.3040337146297411, |
|
"grad_norm": 1.0187836052415793, |
|
"learning_rate": 8.789368703659199e-06, |
|
"loss": 1.086, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.3070439494280554, |
|
"grad_norm": 1.4170045215112093, |
|
"learning_rate": 8.754863052853658e-06, |
|
"loss": 1.0786, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.31005418422636966, |
|
"grad_norm": 1.0563490229181656, |
|
"learning_rate": 8.719942325056496e-06, |
|
"loss": 1.0552, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.3130644190246839, |
|
"grad_norm": 0.9581250571752027, |
|
"learning_rate": 8.68461038053916e-06, |
|
"loss": 1.0611, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.3160746538229982, |
|
"grad_norm": 1.0882591426901487, |
|
"learning_rate": 8.648871125030576e-06, |
|
"loss": 1.0674, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.31908488862131246, |
|
"grad_norm": 0.9748747047047535, |
|
"learning_rate": 8.612728509285395e-06, |
|
"loss": 1.0569, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.3220951234196267, |
|
"grad_norm": 1.0007083161058548, |
|
"learning_rate": 8.576186528647253e-06, |
|
"loss": 1.0773, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.325105358217941, |
|
"grad_norm": 1.0024226084870622, |
|
"learning_rate": 8.53924922260712e-06, |
|
"loss": 1.0688, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.32811559301625526, |
|
"grad_norm": 1.0647162244790471, |
|
"learning_rate": 8.501920674356755e-06, |
|
"loss": 1.0471, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.33112582781456956, |
|
"grad_norm": 0.993432183224602, |
|
"learning_rate": 8.46420501033733e-06, |
|
"loss": 1.0615, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.3341360626128838, |
|
"grad_norm": 1.0547538639367902, |
|
"learning_rate": 8.42610639978329e-06, |
|
"loss": 1.0568, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.33714629741119806, |
|
"grad_norm": 0.9966572739245336, |
|
"learning_rate": 8.387629054261454e-06, |
|
"loss": 1.0523, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.34015653220951236, |
|
"grad_norm": 0.9680858165661955, |
|
"learning_rate": 8.348777227205462e-06, |
|
"loss": 1.0499, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.3431667670078266, |
|
"grad_norm": 0.9422881419321228, |
|
"learning_rate": 8.309555213445583e-06, |
|
"loss": 1.0745, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.34617700180614086, |
|
"grad_norm": 0.9684939972353577, |
|
"learning_rate": 8.269967348733947e-06, |
|
"loss": 1.046, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.34918723660445516, |
|
"grad_norm": 0.9621359710421508, |
|
"learning_rate": 8.230018009265255e-06, |
|
"loss": 1.0602, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.3521974714027694, |
|
"grad_norm": 0.9496961252830911, |
|
"learning_rate": 8.189711611193012e-06, |
|
"loss": 1.0517, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.35520770620108366, |
|
"grad_norm": 0.9723471751859206, |
|
"learning_rate": 8.149052610141357e-06, |
|
"loss": 1.044, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.35821794099939797, |
|
"grad_norm": 0.9399615087193514, |
|
"learning_rate": 8.108045500712518e-06, |
|
"loss": 1.0588, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.3612281757977122, |
|
"grad_norm": 0.9845428819310491, |
|
"learning_rate": 8.066694815989961e-06, |
|
"loss": 1.061, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.36423841059602646, |
|
"grad_norm": 0.9891275737820486, |
|
"learning_rate": 8.025005127037282e-06, |
|
"loss": 1.0615, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.36724864539434077, |
|
"grad_norm": 0.9954042733317254, |
|
"learning_rate": 7.982981042392907e-06, |
|
"loss": 1.0497, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.370258880192655, |
|
"grad_norm": 1.000537961876559, |
|
"learning_rate": 7.940627207560655e-06, |
|
"loss": 1.0441, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.3732691149909693, |
|
"grad_norm": 0.9496111907275188, |
|
"learning_rate": 7.897948304496189e-06, |
|
"loss": 1.0648, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.37627934978928357, |
|
"grad_norm": 0.9393968941122551, |
|
"learning_rate": 7.854949051089467e-06, |
|
"loss": 1.0676, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.3792895845875978, |
|
"grad_norm": 0.9380308472022797, |
|
"learning_rate": 7.811634200643202e-06, |
|
"loss": 1.0739, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.3822998193859121, |
|
"grad_norm": 0.9486486114086516, |
|
"learning_rate": 7.768008541347423e-06, |
|
"loss": 1.0221, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.38531005418422637, |
|
"grad_norm": 68.18970323635645, |
|
"learning_rate": 7.72407689575016e-06, |
|
"loss": 1.0515, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.3883202889825406, |
|
"grad_norm": 0.9508633036813551, |
|
"learning_rate": 7.67984412022434e-06, |
|
"loss": 1.0467, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.3913305237808549, |
|
"grad_norm": 0.9916855461468728, |
|
"learning_rate": 7.635315104430959e-06, |
|
"loss": 1.0367, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.39434075857916917, |
|
"grad_norm": 0.9446909197494837, |
|
"learning_rate": 7.5904947707785434e-06, |
|
"loss": 1.0305, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.3973509933774834, |
|
"grad_norm": 0.9210322564666947, |
|
"learning_rate": 7.545388073879018e-06, |
|
"loss": 1.0591, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.4003612281757977, |
|
"grad_norm": 0.9306461151238373, |
|
"learning_rate": 7.500000000000001e-06, |
|
"loss": 1.0413, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.40337146297411197, |
|
"grad_norm": 0.9658878999866418, |
|
"learning_rate": 7.454335566513603e-06, |
|
"loss": 1.0465, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.4063816977724263, |
|
"grad_norm": 0.9583821484101821, |
|
"learning_rate": 7.408399821341787e-06, |
|
"loss": 1.0544, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.4093919325707405, |
|
"grad_norm": 0.9127670551716062, |
|
"learning_rate": 7.362197842398355e-06, |
|
"loss": 1.0346, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.4124021673690548, |
|
"grad_norm": 0.9278308537424353, |
|
"learning_rate": 7.315734737027612e-06, |
|
"loss": 1.0453, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.4154124021673691, |
|
"grad_norm": 0.9723444779061409, |
|
"learning_rate": 7.2690156414397775e-06, |
|
"loss": 1.0258, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.4184226369656833, |
|
"grad_norm": 0.9347486140545276, |
|
"learning_rate": 7.22204572014322e-06, |
|
"loss": 1.0338, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.4214328717639976, |
|
"grad_norm": 3.85387878818612, |
|
"learning_rate": 7.174830165373542e-06, |
|
"loss": 1.0431, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.4244431065623119, |
|
"grad_norm": 0.9143452453786128, |
|
"learning_rate": 7.127374196519616e-06, |
|
"loss": 1.0336, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.4274533413606261, |
|
"grad_norm": 0.9482641660012492, |
|
"learning_rate": 7.079683059546607e-06, |
|
"loss": 1.0454, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.4304635761589404, |
|
"grad_norm": 1.04293445699686, |
|
"learning_rate": 7.031762026416074e-06, |
|
"loss": 1.0261, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.4334738109572547, |
|
"grad_norm": 0.9875257232657468, |
|
"learning_rate": 6.983616394503177e-06, |
|
"loss": 1.0448, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.43648404575556893, |
|
"grad_norm": 2.4930481995359015, |
|
"learning_rate": 6.9352514860110876e-06, |
|
"loss": 1.0688, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.4394942805538832, |
|
"grad_norm": 0.9536222920261503, |
|
"learning_rate": 6.886672647382653e-06, |
|
"loss": 1.0338, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.4425045153521975, |
|
"grad_norm": 1.079837034688785, |
|
"learning_rate": 6.837885248709386e-06, |
|
"loss": 1.0675, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.44551475015051173, |
|
"grad_norm": 0.9267764883427583, |
|
"learning_rate": 6.788894683137822e-06, |
|
"loss": 1.0309, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.44852498494882603, |
|
"grad_norm": 1.0722868967903327, |
|
"learning_rate": 6.739706366273346e-06, |
|
"loss": 1.0475, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.4515352197471403, |
|
"grad_norm": 1.1196709849520698, |
|
"learning_rate": 6.690325735581532e-06, |
|
"loss": 1.0303, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.45454545454545453, |
|
"grad_norm": 0.9174528976707828, |
|
"learning_rate": 6.640758249787067e-06, |
|
"loss": 1.0494, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.45755568934376883, |
|
"grad_norm": 0.9480777677094587, |
|
"learning_rate": 6.591009388270315e-06, |
|
"loss": 1.0421, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.4605659241420831, |
|
"grad_norm": 2.212409600409066, |
|
"learning_rate": 6.54108465046161e-06, |
|
"loss": 1.0426, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.46357615894039733, |
|
"grad_norm": 1.0682566847564687, |
|
"learning_rate": 6.490989555233328e-06, |
|
"loss": 1.0404, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.46658639373871164, |
|
"grad_norm": 10.206202788584402, |
|
"learning_rate": 6.440729640289809e-06, |
|
"loss": 1.0292, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.4695966285370259, |
|
"grad_norm": 1.1020753443992015, |
|
"learning_rate": 6.3903104615551956e-06, |
|
"loss": 1.0232, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.47260686333534013, |
|
"grad_norm": 5.167899676012411, |
|
"learning_rate": 6.3397375925592675e-06, |
|
"loss": 1.006, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.47561709813365444, |
|
"grad_norm": 1.192166577504938, |
|
"learning_rate": 6.289016623821308e-06, |
|
"loss": 1.0419, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.4786273329319687, |
|
"grad_norm": 1.367111758271087, |
|
"learning_rate": 6.2381531622321234e-06, |
|
"loss": 1.0426, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.481637567730283, |
|
"grad_norm": 0.9109840034743182, |
|
"learning_rate": 6.18715283043422e-06, |
|
"loss": 1.0314, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.48464780252859724, |
|
"grad_norm": 0.8741141489406814, |
|
"learning_rate": 6.136021266200271e-06, |
|
"loss": 1.0328, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.4876580373269115, |
|
"grad_norm": 0.9172189163013432, |
|
"learning_rate": 6.084764121809878e-06, |
|
"loss": 1.0325, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.4906682721252258, |
|
"grad_norm": 0.9369840939625793, |
|
"learning_rate": 6.033387063424765e-06, |
|
"loss": 1.0228, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.49367850692354004, |
|
"grad_norm": 1.432152664217959, |
|
"learning_rate": 5.9818957704624046e-06, |
|
"loss": 1.0477, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.4966887417218543, |
|
"grad_norm": 0.8878345436889384, |
|
"learning_rate": 5.930295934968197e-06, |
|
"loss": 1.0238, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.4996989765201686, |
|
"grad_norm": 0.8962519472927415, |
|
"learning_rate": 5.878593260986256e-06, |
|
"loss": 1.0194, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.5027092113184829, |
|
"grad_norm": 1.499274394693842, |
|
"learning_rate": 5.8267934639288525e-06, |
|
"loss": 1.0432, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.5057194461167971, |
|
"grad_norm": 0.9364157701378347, |
|
"learning_rate": 5.77490226994462e-06, |
|
"loss": 1.0456, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.5087296809151114, |
|
"grad_norm": 0.9248923318440813, |
|
"learning_rate": 5.722925415285555e-06, |
|
"loss": 1.0528, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.5117399157134257, |
|
"grad_norm": 0.9622767369752785, |
|
"learning_rate": 5.670868645672916e-06, |
|
"loss": 1.0433, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.5147501505117399, |
|
"grad_norm": 1.170025668084364, |
|
"learning_rate": 5.618737715662067e-06, |
|
"loss": 1.03, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.5177603853100542, |
|
"grad_norm": 0.9043466850630412, |
|
"learning_rate": 5.566538388006351e-06, |
|
"loss": 1.039, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.5207706201083685, |
|
"grad_norm": 0.8849608499162146, |
|
"learning_rate": 5.514276433020044e-06, |
|
"loss": 1.0249, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.5237808549066827, |
|
"grad_norm": 0.9464553636560172, |
|
"learning_rate": 5.461957627940489e-06, |
|
"loss": 1.0198, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.526791089704997, |
|
"grad_norm": 0.9179569227254959, |
|
"learning_rate": 5.409587756289462e-06, |
|
"loss": 1.0205, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.5298013245033113, |
|
"grad_norm": 1.9045126443295857, |
|
"learning_rate": 5.357172607233831e-06, |
|
"loss": 1.0182, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.5328115593016255, |
|
"grad_norm": 1.4051118790220352, |
|
"learning_rate": 5.304717974945596e-06, |
|
"loss": 1.0221, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.5358217940999398, |
|
"grad_norm": 0.9078454332904633, |
|
"learning_rate": 5.252229657961394e-06, |
|
"loss": 1.0206, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.5388320288982541, |
|
"grad_norm": 0.8875050113196231, |
|
"learning_rate": 5.199713458541495e-06, |
|
"loss": 1.0352, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.5418422636965683, |
|
"grad_norm": 0.9480042480699051, |
|
"learning_rate": 5.1471751820284e-06, |
|
"loss": 1.0134, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.5448524984948826, |
|
"grad_norm": 0.902719752286893, |
|
"learning_rate": 5.094620636205096e-06, |
|
"loss": 1.01, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.5478627332931969, |
|
"grad_norm": 0.8905457235226512, |
|
"learning_rate": 5.042055630653042e-06, |
|
"loss": 1.0206, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.5508729680915111, |
|
"grad_norm": 0.9206243693271725, |
|
"learning_rate": 4.98948597610996e-06, |
|
"loss": 1.0121, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.5538832028898254, |
|
"grad_norm": 0.882467337650688, |
|
"learning_rate": 4.936917483827483e-06, |
|
"loss": 0.9989, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.5568934376881397, |
|
"grad_norm": 0.8674487532731974, |
|
"learning_rate": 4.884355964928767e-06, |
|
"loss": 1.0463, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.5599036724864539, |
|
"grad_norm": 0.8791260040749262, |
|
"learning_rate": 4.831807229766101e-06, |
|
"loss": 1.0047, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.5629139072847682, |
|
"grad_norm": 0.9167695803034189, |
|
"learning_rate": 4.779277087278615e-06, |
|
"loss": 1.03, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.5659241420830825, |
|
"grad_norm": 0.8934690190804779, |
|
"learning_rate": 4.7267713443501274e-06, |
|
"loss": 1.0381, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.5689343768813967, |
|
"grad_norm": 1.1473340162402226, |
|
"learning_rate": 4.67429580516724e-06, |
|
"loss": 1.0146, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.571944611679711, |
|
"grad_norm": 0.8911747505115302, |
|
"learning_rate": 4.6218562705777185e-06, |
|
"loss": 1.0142, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.5749548464780253, |
|
"grad_norm": 0.9543234233344876, |
|
"learning_rate": 4.5694585374492314e-06, |
|
"loss": 1.0243, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.5779650812763396, |
|
"grad_norm": 0.9223284966528281, |
|
"learning_rate": 4.517108398028566e-06, |
|
"loss": 1.0417, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.5809753160746538, |
|
"grad_norm": 0.8812509821809876, |
|
"learning_rate": 4.464811639301314e-06, |
|
"loss": 1.0195, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.5839855508729681, |
|
"grad_norm": 0.9091706304207465, |
|
"learning_rate": 4.412574042352156e-06, |
|
"loss": 1.0337, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.5869957856712824, |
|
"grad_norm": 0.8926473999654263, |
|
"learning_rate": 4.360401381725806e-06, |
|
"loss": 1.0221, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.5900060204695966, |
|
"grad_norm": 1.6530772893621817, |
|
"learning_rate": 4.308299424788667e-06, |
|
"loss": 1.0382, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.5930162552679109, |
|
"grad_norm": 0.9083816640002393, |
|
"learning_rate": 4.256273931091284e-06, |
|
"loss": 1.0239, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.5960264900662252, |
|
"grad_norm": 0.9029678699407463, |
|
"learning_rate": 4.204330651731662e-06, |
|
"loss": 1.0191, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.5990367248645394, |
|
"grad_norm": 0.8880707813892602, |
|
"learning_rate": 4.152475328719517e-06, |
|
"loss": 1.0205, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.6020469596628537, |
|
"grad_norm": 0.9115699732773067, |
|
"learning_rate": 4.1007136943415325e-06, |
|
"loss": 1.0072, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.605057194461168, |
|
"grad_norm": 0.8460435318858166, |
|
"learning_rate": 4.049051470527692e-06, |
|
"loss": 1.0262, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.6080674292594822, |
|
"grad_norm": 0.992395161503029, |
|
"learning_rate": 3.997494368218745e-06, |
|
"loss": 1.0165, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.6110776640577965, |
|
"grad_norm": 1.859721509427287, |
|
"learning_rate": 3.946048086734921e-06, |
|
"loss": 1.0347, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.6140878988561108, |
|
"grad_norm": 0.8921204238192161, |
|
"learning_rate": 3.894718313145873e-06, |
|
"loss": 1.0256, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.617098133654425, |
|
"grad_norm": 1.2681625671865207, |
|
"learning_rate": 3.843510721642036e-06, |
|
"loss": 1.0081, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.6201083684527393, |
|
"grad_norm": 0.8915300481916899, |
|
"learning_rate": 3.7924309729073616e-06, |
|
"loss": 1.0155, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.6231186032510536, |
|
"grad_norm": 0.9396438819521465, |
|
"learning_rate": 3.7414847134935716e-06, |
|
"loss": 1.0046, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.6261288380493678, |
|
"grad_norm": 3.922062228338965, |
|
"learning_rate": 3.6906775751959667e-06, |
|
"loss": 1.022, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.6291390728476821, |
|
"grad_norm": 0.9172947691983047, |
|
"learning_rate": 3.640015174430864e-06, |
|
"loss": 0.9998, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.6321493076459964, |
|
"grad_norm": 0.889016554505559, |
|
"learning_rate": 3.5895031116147355e-06, |
|
"loss": 1.0143, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.6351595424443106, |
|
"grad_norm": 1.0135918728882847, |
|
"learning_rate": 3.539146970545124e-06, |
|
"loss": 1.02, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 0.6381697772426249, |
|
"grad_norm": 0.8701415123045054, |
|
"learning_rate": 3.488952317783374e-06, |
|
"loss": 1.0127, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.6411800120409392, |
|
"grad_norm": 0.8880221243740891, |
|
"learning_rate": 3.438924702039301e-06, |
|
"loss": 1.0089, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.6441902468392534, |
|
"grad_norm": 0.8735477996007547, |
|
"learning_rate": 3.389069653557805e-06, |
|
"loss": 1.0216, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.6472004816375677, |
|
"grad_norm": 0.9072044456513535, |
|
"learning_rate": 3.3393926835075307e-06, |
|
"loss": 1.0115, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.650210716435882, |
|
"grad_norm": 0.8883607705827921, |
|
"learning_rate": 3.289899283371657e-06, |
|
"loss": 1.0014, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.6532209512341962, |
|
"grad_norm": 0.9518629250264209, |
|
"learning_rate": 3.240594924340835e-06, |
|
"loss": 1.0094, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 0.6562311860325105, |
|
"grad_norm": 0.877493524698432, |
|
"learning_rate": 3.1914850567083866e-06, |
|
"loss": 1.019, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.6592414208308248, |
|
"grad_norm": 0.9145761162077933, |
|
"learning_rate": 3.1425751092678064e-06, |
|
"loss": 1.005, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 0.6622516556291391, |
|
"grad_norm": 1.475049622537514, |
|
"learning_rate": 3.0938704887126425e-06, |
|
"loss": 0.9926, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.6652618904274533, |
|
"grad_norm": 0.8572700406837115, |
|
"learning_rate": 3.045376579038821e-06, |
|
"loss": 1.0021, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 0.6682721252257676, |
|
"grad_norm": 0.852655010706633, |
|
"learning_rate": 2.9970987409494784e-06, |
|
"loss": 1.0387, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.6712823600240819, |
|
"grad_norm": 0.9146351266641374, |
|
"learning_rate": 2.9490423112623646e-06, |
|
"loss": 1.0042, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 0.6742925948223961, |
|
"grad_norm": 0.9064467261792086, |
|
"learning_rate": 2.9012126023198973e-06, |
|
"loss": 1.017, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.6773028296207104, |
|
"grad_norm": 1.3125167951045396, |
|
"learning_rate": 2.853614901401909e-06, |
|
"loss": 0.9869, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.6803130644190247, |
|
"grad_norm": 0.8839117074053637, |
|
"learning_rate": 2.806254470141174e-06, |
|
"loss": 1.0214, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.6833232992173389, |
|
"grad_norm": 0.8715135185909122, |
|
"learning_rate": 2.759136543941773e-06, |
|
"loss": 0.9828, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 0.6863335340156532, |
|
"grad_norm": 0.8908461313314309, |
|
"learning_rate": 2.712266331400332e-06, |
|
"loss": 1.0032, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.6893437688139675, |
|
"grad_norm": 0.8884047218150805, |
|
"learning_rate": 2.66564901373027e-06, |
|
"loss": 1.0124, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 0.6923540036122817, |
|
"grad_norm": 0.9119096888988255, |
|
"learning_rate": 2.6192897441890337e-06, |
|
"loss": 1.003, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.695364238410596, |
|
"grad_norm": 4.230701533700929, |
|
"learning_rate": 2.573193647508426e-06, |
|
"loss": 1.0037, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 0.6983744732089103, |
|
"grad_norm": 0.8664627590837305, |
|
"learning_rate": 2.5273658193281252e-06, |
|
"loss": 0.9871, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.7013847080072245, |
|
"grad_norm": 1.3686260181680083, |
|
"learning_rate": 2.4818113256323745e-06, |
|
"loss": 1.0031, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 0.7043949428055388, |
|
"grad_norm": 0.8810846166712105, |
|
"learning_rate": 2.4365352021899635e-06, |
|
"loss": 1.0075, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.7074051776038531, |
|
"grad_norm": 0.9013744748771672, |
|
"learning_rate": 2.391542453997578e-06, |
|
"loss": 0.9983, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.7104154124021673, |
|
"grad_norm": 0.8824175367501911, |
|
"learning_rate": 2.346838054726505e-06, |
|
"loss": 1.0049, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.7134256472004816, |
|
"grad_norm": 0.8647677789614462, |
|
"learning_rate": 2.302426946172836e-06, |
|
"loss": 1.0199, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 0.7164358819987959, |
|
"grad_norm": 0.8974669950167197, |
|
"learning_rate": 2.258314037711184e-06, |
|
"loss": 1.0078, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.7194461167971101, |
|
"grad_norm": 0.8942689905651798, |
|
"learning_rate": 2.214504205751971e-06, |
|
"loss": 1.0248, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 0.7224563515954244, |
|
"grad_norm": 0.8613593925636255, |
|
"learning_rate": 2.1710022932023805e-06, |
|
"loss": 1.0041, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.7254665863937387, |
|
"grad_norm": 0.9273768242975111, |
|
"learning_rate": 2.127813108931007e-06, |
|
"loss": 1.0093, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 0.7284768211920529, |
|
"grad_norm": 0.8689116817146251, |
|
"learning_rate": 2.084941427236245e-06, |
|
"loss": 1.0165, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.7314870559903672, |
|
"grad_norm": 0.925626300168374, |
|
"learning_rate": 2.04239198731855e-06, |
|
"loss": 1.0004, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 0.7344972907886815, |
|
"grad_norm": 1.0155037341491024, |
|
"learning_rate": 2.000169492756523e-06, |
|
"loss": 1.0147, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.7375075255869958, |
|
"grad_norm": 0.9027560092938075, |
|
"learning_rate": 1.9582786109869713e-06, |
|
"loss": 1.0131, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 0.74051776038531, |
|
"grad_norm": 0.8889230240612882, |
|
"learning_rate": 1.9167239727889527e-06, |
|
"loss": 1.0106, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.7435279951836243, |
|
"grad_norm": 0.8859832869083346, |
|
"learning_rate": 1.875510171771865e-06, |
|
"loss": 0.9935, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 0.7465382299819386, |
|
"grad_norm": 0.8935698851896953, |
|
"learning_rate": 1.8346417638676533e-06, |
|
"loss": 0.9845, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.7495484647802528, |
|
"grad_norm": 0.9329442227378806, |
|
"learning_rate": 1.7941232668271863e-06, |
|
"loss": 1.0111, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 0.7525586995785671, |
|
"grad_norm": 0.8933360465539598, |
|
"learning_rate": 1.753959159720836e-06, |
|
"loss": 1.0013, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.7555689343768814, |
|
"grad_norm": 0.8850416820407446, |
|
"learning_rate": 1.7141538824433506e-06, |
|
"loss": 1.0092, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 0.7585791691751956, |
|
"grad_norm": 0.8911148955200473, |
|
"learning_rate": 1.6747118352230495e-06, |
|
"loss": 1.0035, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.7615894039735099, |
|
"grad_norm": 0.8485797781055803, |
|
"learning_rate": 1.6356373781354058e-06, |
|
"loss": 0.9819, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 0.7645996387718242, |
|
"grad_norm": 0.8845205626085196, |
|
"learning_rate": 1.5969348306210692e-06, |
|
"loss": 1.0027, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.7676098735701384, |
|
"grad_norm": 0.8858624981584671, |
|
"learning_rate": 1.5586084710083737e-06, |
|
"loss": 1.0093, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 0.7706201083684527, |
|
"grad_norm": 0.8814363753611782, |
|
"learning_rate": 1.5206625360403943e-06, |
|
"loss": 0.9979, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.773630343166767, |
|
"grad_norm": 0.8491013076963814, |
|
"learning_rate": 1.4831012204066114e-06, |
|
"loss": 1.0148, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 0.7766405779650812, |
|
"grad_norm": 0.8677146375124508, |
|
"learning_rate": 1.445928676279199e-06, |
|
"loss": 0.9916, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.7796508127633955, |
|
"grad_norm": 0.8646340907999152, |
|
"learning_rate": 1.4091490128540374e-06, |
|
"loss": 0.9836, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 0.7826610475617098, |
|
"grad_norm": 0.8451931482695525, |
|
"learning_rate": 1.3727662958964627e-06, |
|
"loss": 0.9995, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.785671282360024, |
|
"grad_norm": 0.8554438998915223, |
|
"learning_rate": 1.3367845472918272e-06, |
|
"loss": 1.0065, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 0.7886815171583383, |
|
"grad_norm": 0.8789308792137018, |
|
"learning_rate": 1.3012077446008969e-06, |
|
"loss": 1.0012, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.7916917519566526, |
|
"grad_norm": 0.8781828397403413, |
|
"learning_rate": 1.266039820620159e-06, |
|
"loss": 1.0042, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 0.7947019867549668, |
|
"grad_norm": 0.8625507845317792, |
|
"learning_rate": 1.2312846629470826e-06, |
|
"loss": 0.9886, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.7977122215532811, |
|
"grad_norm": 0.8959832830725274, |
|
"learning_rate": 1.1969461135503573e-06, |
|
"loss": 1.0059, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 0.8007224563515954, |
|
"grad_norm": 0.8678075235569929, |
|
"learning_rate": 1.163027968345195e-06, |
|
"loss": 1.0094, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.8037326911499096, |
|
"grad_norm": 1.0055210740771279, |
|
"learning_rate": 1.1295339767737125e-06, |
|
"loss": 0.9887, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 0.8067429259482239, |
|
"grad_norm": 1.6253290809007488, |
|
"learning_rate": 1.0964678413904529e-06, |
|
"loss": 1.0028, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.8097531607465382, |
|
"grad_norm": 0.9127445612020656, |
|
"learning_rate": 1.0638332174530953e-06, |
|
"loss": 1.0114, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 0.8127633955448526, |
|
"grad_norm": 0.8928296605602871, |
|
"learning_rate": 1.0316337125183817e-06, |
|
"loss": 0.9991, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.8157736303431667, |
|
"grad_norm": 1.1302003367983464, |
|
"learning_rate": 9.998728860433277e-07, |
|
"loss": 1.0002, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 0.818783865141481, |
|
"grad_norm": 0.8807431712720114, |
|
"learning_rate": 9.685542489917494e-07, |
|
"loss": 0.9819, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.8217940999397954, |
|
"grad_norm": 0.8894964118409194, |
|
"learning_rate": 9.376812634461418e-07, |
|
"loss": 0.9923, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 0.8248043347381095, |
|
"grad_norm": 0.8950990656247966, |
|
"learning_rate": 9.072573422249692e-07, |
|
"loss": 1.0165, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.8278145695364238, |
|
"grad_norm": 0.8600942880069391, |
|
"learning_rate": 8.772858485054042e-07, |
|
"loss": 1.0055, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 0.8308248043347382, |
|
"grad_norm": 1.0176570771410658, |
|
"learning_rate": 8.477700954515372e-07, |
|
"loss": 1.0034, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.8338350391330523, |
|
"grad_norm": 1.1332975568419288, |
|
"learning_rate": 8.187133458481416e-07, |
|
"loss": 1.0037, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 0.8368452739313667, |
|
"grad_norm": 0.8771143983027159, |
|
"learning_rate": 7.901188117399817e-07, |
|
"loss": 1.0134, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.839855508729681, |
|
"grad_norm": 0.8769447007569386, |
|
"learning_rate": 7.619896540767435e-07, |
|
"loss": 1.0137, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 0.8428657435279951, |
|
"grad_norm": 0.8715300102195939, |
|
"learning_rate": 7.343289823636168e-07, |
|
"loss": 1.0119, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.8458759783263095, |
|
"grad_norm": 3.8854548450704884, |
|
"learning_rate": 7.0713985431755e-07, |
|
"loss": 1.0101, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 0.8488862131246238, |
|
"grad_norm": 0.8550203433574886, |
|
"learning_rate": 6.804252755292429e-07, |
|
"loss": 0.9804, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.851896447922938, |
|
"grad_norm": 0.8574614831593009, |
|
"learning_rate": 6.541881991309013e-07, |
|
"loss": 0.9992, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 0.8549066827212523, |
|
"grad_norm": 0.8801899228066352, |
|
"learning_rate": 6.284315254697726e-07, |
|
"loss": 1.0055, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.8579169175195666, |
|
"grad_norm": 0.8646087529710637, |
|
"learning_rate": 6.031581017875482e-07, |
|
"loss": 1.0097, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 0.8609271523178808, |
|
"grad_norm": 0.8697784834913861, |
|
"learning_rate": 5.783707219056078e-07, |
|
"loss": 1.0004, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.863937387116195, |
|
"grad_norm": 0.8738950667485581, |
|
"learning_rate": 5.540721259161774e-07, |
|
"loss": 1.0078, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 0.8669476219145094, |
|
"grad_norm": 0.8961144717239811, |
|
"learning_rate": 5.302649998794368e-07, |
|
"loss": 1.0113, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.8699578567128236, |
|
"grad_norm": 0.870935027097703, |
|
"learning_rate": 5.0695197552659e-07, |
|
"loss": 0.9904, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 0.8729680915111379, |
|
"grad_norm": 0.8567929575500429, |
|
"learning_rate": 4.841356299689359e-07, |
|
"loss": 1.0028, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.8759783263094522, |
|
"grad_norm": 0.9123895694513533, |
|
"learning_rate": 4.618184854129981e-07, |
|
"loss": 1.0126, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 0.8789885611077664, |
|
"grad_norm": 1.0746686361139868, |
|
"learning_rate": 4.4000300888169753e-07, |
|
"loss": 1.0017, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.8819987959060807, |
|
"grad_norm": 0.8759725943448924, |
|
"learning_rate": 4.1869161194164565e-07, |
|
"loss": 1.0031, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 0.885009030704395, |
|
"grad_norm": 1.4852691417263835, |
|
"learning_rate": 3.9788665043656083e-07, |
|
"loss": 0.9965, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.8880192655027093, |
|
"grad_norm": 0.8929495276279715, |
|
"learning_rate": 3.775904242268391e-07, |
|
"loss": 1.0236, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 0.8910295003010235, |
|
"grad_norm": 0.8758884812335123, |
|
"learning_rate": 3.578051769353219e-07, |
|
"loss": 1.0118, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.8940397350993378, |
|
"grad_norm": 1.9572396797440648, |
|
"learning_rate": 3.385330956992816e-07, |
|
"loss": 0.9987, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 0.8970499698976521, |
|
"grad_norm": 0.8813248265767825, |
|
"learning_rate": 3.1977631092863613e-07, |
|
"loss": 1.004, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.9000602046959663, |
|
"grad_norm": 0.8785988861657655, |
|
"learning_rate": 3.015368960704584e-07, |
|
"loss": 0.9875, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 0.9030704394942806, |
|
"grad_norm": 0.8712193660484964, |
|
"learning_rate": 2.8381686737975867e-07, |
|
"loss": 1.0108, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.9060806742925949, |
|
"grad_norm": 0.8779863820284991, |
|
"learning_rate": 2.666181836966053e-07, |
|
"loss": 1.0141, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 0.9090909090909091, |
|
"grad_norm": 0.8640378775819231, |
|
"learning_rate": 2.4994274622958726e-07, |
|
"loss": 1.0136, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.9121011438892234, |
|
"grad_norm": 0.8607419097086582, |
|
"learning_rate": 2.3379239834564526e-07, |
|
"loss": 1.0044, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 0.9151113786875377, |
|
"grad_norm": 0.8927580080579623, |
|
"learning_rate": 2.1816892536629775e-07, |
|
"loss": 1.0114, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.9181216134858519, |
|
"grad_norm": 0.8535555555413261, |
|
"learning_rate": 2.0307405437029027e-07, |
|
"loss": 0.9961, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 0.9211318482841662, |
|
"grad_norm": 0.9049922865987879, |
|
"learning_rate": 1.8850945400266994e-07, |
|
"loss": 1.0163, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.9241420830824805, |
|
"grad_norm": 0.8499334715851968, |
|
"learning_rate": 1.7447673429033361e-07, |
|
"loss": 0.9946, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 0.9271523178807947, |
|
"grad_norm": 0.8909209774091112, |
|
"learning_rate": 1.6097744646404457e-07, |
|
"loss": 0.9961, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.930162552679109, |
|
"grad_norm": 0.8560970950116495, |
|
"learning_rate": 1.4801308278695636e-07, |
|
"loss": 1.0019, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 0.9331727874774233, |
|
"grad_norm": 0.883934931226354, |
|
"learning_rate": 1.3558507638965158e-07, |
|
"loss": 1.0061, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.9361830222757375, |
|
"grad_norm": 0.8961538667036166, |
|
"learning_rate": 1.2369480111171784e-07, |
|
"loss": 1.0054, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 0.9391932570740518, |
|
"grad_norm": 0.9001828543117812, |
|
"learning_rate": 1.1234357134987717e-07, |
|
"loss": 0.9876, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.9422034918723661, |
|
"grad_norm": 0.9606731439347472, |
|
"learning_rate": 1.0153264191269052e-07, |
|
"loss": 1.0106, |
|
"step": 1565 |
|
}, |
|
{ |
|
"epoch": 0.9452137266706803, |
|
"grad_norm": 0.9005969806614522, |
|
"learning_rate": 9.126320788184374e-08, |
|
"loss": 0.9934, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.9482239614689946, |
|
"grad_norm": 0.8701900370996407, |
|
"learning_rate": 8.153640448003875e-08, |
|
"loss": 0.9845, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 0.9512341962673089, |
|
"grad_norm": 0.8767879074096734, |
|
"learning_rate": 7.235330694550402e-08, |
|
"loss": 1.0158, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.9542444310656231, |
|
"grad_norm": 0.8630669248508293, |
|
"learning_rate": 6.371493041313126e-08, |
|
"loss": 0.9907, |
|
"step": 1585 |
|
}, |
|
{ |
|
"epoch": 0.9572546658639374, |
|
"grad_norm": 3.3701456799509244, |
|
"learning_rate": 5.562222980225907e-08, |
|
"loss": 0.9962, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.9602649006622517, |
|
"grad_norm": 0.8469748656240412, |
|
"learning_rate": 4.807609971111238e-08, |
|
"loss": 0.9891, |
|
"step": 1595 |
|
}, |
|
{ |
|
"epoch": 0.963275135460566, |
|
"grad_norm": 0.9099505574681345, |
|
"learning_rate": 4.107737431791159e-08, |
|
"loss": 1.0185, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.9662853702588802, |
|
"grad_norm": 0.8973725037838026, |
|
"learning_rate": 3.462682728865685e-08, |
|
"loss": 0.9981, |
|
"step": 1605 |
|
}, |
|
{ |
|
"epoch": 0.9692956050571945, |
|
"grad_norm": 0.9061860450514891, |
|
"learning_rate": 2.8725171691605934e-08, |
|
"loss": 1.0174, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.9723058398555088, |
|
"grad_norm": 0.981943649150158, |
|
"learning_rate": 2.3373059918448958e-08, |
|
"loss": 1.0, |
|
"step": 1615 |
|
}, |
|
{ |
|
"epoch": 0.975316074653823, |
|
"grad_norm": 0.8890699624303203, |
|
"learning_rate": 1.8571083612188845e-08, |
|
"loss": 1.0094, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.9783263094521373, |
|
"grad_norm": 0.8705689131489066, |
|
"learning_rate": 1.431977360173975e-08, |
|
"loss": 0.9934, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 0.9813365442504516, |
|
"grad_norm": 0.910606622312982, |
|
"learning_rate": 1.0619599843249006e-08, |
|
"loss": 1.001, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.9843467790487658, |
|
"grad_norm": 0.8711153771031834, |
|
"learning_rate": 7.470971368142011e-09, |
|
"loss": 1.0114, |
|
"step": 1635 |
|
}, |
|
{ |
|
"epoch": 0.9873570138470801, |
|
"grad_norm": 0.8635302129219209, |
|
"learning_rate": 4.874236237911723e-09, |
|
"loss": 1.0028, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.9903672486453944, |
|
"grad_norm": 0.8822503527540938, |
|
"learning_rate": 2.8296815056377824e-09, |
|
"loss": 1.0149, |
|
"step": 1645 |
|
}, |
|
{ |
|
"epoch": 0.9933774834437086, |
|
"grad_norm": 0.8786588529013926, |
|
"learning_rate": 1.3375331842574446e-09, |
|
"loss": 1.0336, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.9963877182420229, |
|
"grad_norm": 3.6164785189545476, |
|
"learning_rate": 3.9795622158111945e-10, |
|
"loss": 1.0063, |
|
"step": 1655 |
|
}, |
|
{ |
|
"epoch": 0.9993979530403372, |
|
"grad_norm": 0.8926911293866052, |
|
"learning_rate": 1.1054482056405136e-11, |
|
"loss": 1.002, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_runtime": 0.728, |
|
"eval_samples_per_second": 13.736, |
|
"eval_steps_per_second": 4.121, |
|
"step": 1661 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"step": 1661, |
|
"total_flos": 80925639573504.0, |
|
"train_loss": 1.0968368504149306, |
|
"train_runtime": 1531.4087, |
|
"train_samples_per_second": 17.352, |
|
"train_steps_per_second": 1.085 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 1661, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 100, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 80925639573504.0, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|