|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.0, |
|
"eval_steps": 200, |
|
"global_step": 1124, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0017793594306049821, |
|
"grad_norm": 6.087945809168618, |
|
"learning_rate": 9.99998046979289e-06, |
|
"loss": 0.2464, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0035587188612099642, |
|
"grad_norm": 5.380806015838377, |
|
"learning_rate": 9.999921879324127e-06, |
|
"loss": 0.2872, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.005338078291814947, |
|
"grad_norm": 5.343443282913692, |
|
"learning_rate": 9.999824229051425e-06, |
|
"loss": 0.2594, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0071174377224199285, |
|
"grad_norm": 5.236182827231183, |
|
"learning_rate": 9.999687519737639e-06, |
|
"loss": 0.2128, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.008896797153024912, |
|
"grad_norm": 5.176814353576444, |
|
"learning_rate": 9.99951175245075e-06, |
|
"loss": 0.188, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.010676156583629894, |
|
"grad_norm": 4.669600468249248, |
|
"learning_rate": 9.999296928563868e-06, |
|
"loss": 0.2471, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.012455516014234875, |
|
"grad_norm": 5.65583256312908, |
|
"learning_rate": 9.999043049755216e-06, |
|
"loss": 0.2529, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.014234875444839857, |
|
"grad_norm": 4.153885053321903, |
|
"learning_rate": 9.998750118008117e-06, |
|
"loss": 0.146, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.01601423487544484, |
|
"grad_norm": 5.038723683327088, |
|
"learning_rate": 9.998418135610974e-06, |
|
"loss": 0.1811, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.017793594306049824, |
|
"grad_norm": 6.704105948961755, |
|
"learning_rate": 9.998047105157265e-06, |
|
"loss": 0.3031, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.019572953736654804, |
|
"grad_norm": 5.95456127245007, |
|
"learning_rate": 9.997637029545509e-06, |
|
"loss": 0.1632, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.021352313167259787, |
|
"grad_norm": 6.31026446059828, |
|
"learning_rate": 9.997187911979252e-06, |
|
"loss": 0.1917, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.023131672597864767, |
|
"grad_norm": 5.754594406513585, |
|
"learning_rate": 9.996699755967035e-06, |
|
"loss": 0.1905, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.02491103202846975, |
|
"grad_norm": 4.662941591519252, |
|
"learning_rate": 9.996172565322375e-06, |
|
"loss": 0.1902, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.026690391459074734, |
|
"grad_norm": 4.736439471713021, |
|
"learning_rate": 9.995606344163728e-06, |
|
"loss": 0.1944, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.028469750889679714, |
|
"grad_norm": 4.486377936553121, |
|
"learning_rate": 9.995001096914462e-06, |
|
"loss": 0.2437, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.030249110320284697, |
|
"grad_norm": 4.30102080096006, |
|
"learning_rate": 9.994356828302818e-06, |
|
"loss": 0.2169, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.03202846975088968, |
|
"grad_norm": 3.592515779476466, |
|
"learning_rate": 9.993673543361874e-06, |
|
"loss": 0.1574, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.033807829181494664, |
|
"grad_norm": 5.100817741996862, |
|
"learning_rate": 9.992951247429512e-06, |
|
"loss": 0.2682, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.03558718861209965, |
|
"grad_norm": 4.371292623975572, |
|
"learning_rate": 9.992189946148366e-06, |
|
"loss": 0.1953, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.037366548042704624, |
|
"grad_norm": 3.2229310712910753, |
|
"learning_rate": 9.991389645465786e-06, |
|
"loss": 0.114, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.03914590747330961, |
|
"grad_norm": 4.263731304697079, |
|
"learning_rate": 9.990550351633784e-06, |
|
"loss": 0.196, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.04092526690391459, |
|
"grad_norm": 3.9158262915571505, |
|
"learning_rate": 9.989672071208993e-06, |
|
"loss": 0.2118, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.042704626334519574, |
|
"grad_norm": 4.617209475306654, |
|
"learning_rate": 9.988754811052616e-06, |
|
"loss": 0.2642, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.04448398576512456, |
|
"grad_norm": 4.515309619371239, |
|
"learning_rate": 9.987798578330365e-06, |
|
"loss": 0.2086, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.046263345195729534, |
|
"grad_norm": 4.337192727191005, |
|
"learning_rate": 9.986803380512406e-06, |
|
"loss": 0.1998, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.04804270462633452, |
|
"grad_norm": 4.1219829533226795, |
|
"learning_rate": 9.98576922537331e-06, |
|
"loss": 0.2041, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.0498220640569395, |
|
"grad_norm": 4.114929230143584, |
|
"learning_rate": 9.984696120991979e-06, |
|
"loss": 0.2135, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.051601423487544484, |
|
"grad_norm": 4.071344469298813, |
|
"learning_rate": 9.983584075751598e-06, |
|
"loss": 0.1814, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.05338078291814947, |
|
"grad_norm": 4.019620685354622, |
|
"learning_rate": 9.982433098339553e-06, |
|
"loss": 0.2013, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.05516014234875445, |
|
"grad_norm": 5.957555251452121, |
|
"learning_rate": 9.981243197747375e-06, |
|
"loss": 0.2543, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.05693950177935943, |
|
"grad_norm": 5.928572189081229, |
|
"learning_rate": 9.980014383270668e-06, |
|
"loss": 0.2411, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.05871886120996441, |
|
"grad_norm": 4.817413291411684, |
|
"learning_rate": 9.978746664509032e-06, |
|
"loss": 0.1993, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.060498220640569395, |
|
"grad_norm": 4.344728532867641, |
|
"learning_rate": 9.97744005136599e-06, |
|
"loss": 0.2218, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.06227758007117438, |
|
"grad_norm": 3.848089033829962, |
|
"learning_rate": 9.976094554048912e-06, |
|
"loss": 0.222, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.06405693950177936, |
|
"grad_norm": 4.858513297009362, |
|
"learning_rate": 9.974710183068935e-06, |
|
"loss": 0.2095, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.06583629893238434, |
|
"grad_norm": 4.5323701076327065, |
|
"learning_rate": 9.97328694924088e-06, |
|
"loss": 0.2085, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.06761565836298933, |
|
"grad_norm": 4.727847215467013, |
|
"learning_rate": 9.971824863683168e-06, |
|
"loss": 0.2459, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.0693950177935943, |
|
"grad_norm": 4.575506711991273, |
|
"learning_rate": 9.970323937817732e-06, |
|
"loss": 0.199, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.0711743772241993, |
|
"grad_norm": 4.578335722163164, |
|
"learning_rate": 9.968784183369929e-06, |
|
"loss": 0.1848, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.07295373665480427, |
|
"grad_norm": 4.4715758661585205, |
|
"learning_rate": 9.96720561236845e-06, |
|
"loss": 0.2035, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.07473309608540925, |
|
"grad_norm": 4.729299443995599, |
|
"learning_rate": 9.965588237145219e-06, |
|
"loss": 0.2582, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.07651245551601424, |
|
"grad_norm": 3.6946302672420974, |
|
"learning_rate": 9.963932070335307e-06, |
|
"loss": 0.1566, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.07829181494661921, |
|
"grad_norm": 4.284017266931831, |
|
"learning_rate": 9.962237124876828e-06, |
|
"loss": 0.1943, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.0800711743772242, |
|
"grad_norm": 4.2907767912391845, |
|
"learning_rate": 9.960503414010833e-06, |
|
"loss": 0.2213, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.08185053380782918, |
|
"grad_norm": 4.053926489163686, |
|
"learning_rate": 9.958730951281218e-06, |
|
"loss": 0.1864, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.08362989323843416, |
|
"grad_norm": 4.414505617880729, |
|
"learning_rate": 9.956919750534607e-06, |
|
"loss": 0.2079, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.08540925266903915, |
|
"grad_norm": 5.321000408289219, |
|
"learning_rate": 9.955069825920249e-06, |
|
"loss": 0.3046, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.08718861209964412, |
|
"grad_norm": 4.7592548077016845, |
|
"learning_rate": 9.953181191889913e-06, |
|
"loss": 0.2665, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.08896797153024912, |
|
"grad_norm": 4.510052721399061, |
|
"learning_rate": 9.95125386319776e-06, |
|
"loss": 0.1996, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.09074733096085409, |
|
"grad_norm": 5.418267148433161, |
|
"learning_rate": 9.949287854900243e-06, |
|
"loss": 0.1999, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.09252669039145907, |
|
"grad_norm": 4.163705847885295, |
|
"learning_rate": 9.947283182355982e-06, |
|
"loss": 0.1861, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.09430604982206406, |
|
"grad_norm": 4.450852673501345, |
|
"learning_rate": 9.945239861225644e-06, |
|
"loss": 0.1873, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.09608540925266904, |
|
"grad_norm": 5.733806666059855, |
|
"learning_rate": 9.943157907471825e-06, |
|
"loss": 0.2709, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.09786476868327403, |
|
"grad_norm": 3.8025992686343866, |
|
"learning_rate": 9.941037337358918e-06, |
|
"loss": 0.186, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.099644128113879, |
|
"grad_norm": 4.604147206737029, |
|
"learning_rate": 9.938878167452991e-06, |
|
"loss": 0.1925, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.10142348754448399, |
|
"grad_norm": 5.534265371100669, |
|
"learning_rate": 9.936680414621663e-06, |
|
"loss": 0.2694, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.10320284697508897, |
|
"grad_norm": 4.437323753211586, |
|
"learning_rate": 9.934444096033958e-06, |
|
"loss": 0.1646, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.10498220640569395, |
|
"grad_norm": 4.699157472361599, |
|
"learning_rate": 9.932169229160183e-06, |
|
"loss": 0.2313, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.10676156583629894, |
|
"grad_norm": 3.8102554943597404, |
|
"learning_rate": 9.929855831771787e-06, |
|
"loss": 0.1451, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.10854092526690391, |
|
"grad_norm": 5.109745691574468, |
|
"learning_rate": 9.927503921941218e-06, |
|
"loss": 0.2341, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.1103202846975089, |
|
"grad_norm": 4.125571123133517, |
|
"learning_rate": 9.925113518041796e-06, |
|
"loss": 0.267, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.11209964412811388, |
|
"grad_norm": 5.238232473734802, |
|
"learning_rate": 9.922684638747551e-06, |
|
"loss": 0.2659, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.11387900355871886, |
|
"grad_norm": 4.204364857518635, |
|
"learning_rate": 9.920217303033091e-06, |
|
"loss": 0.1791, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.11565836298932385, |
|
"grad_norm": 4.467070017261026, |
|
"learning_rate": 9.917711530173444e-06, |
|
"loss": 0.2322, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.11743772241992882, |
|
"grad_norm": 5.4243208062138555, |
|
"learning_rate": 9.91516733974392e-06, |
|
"loss": 0.2389, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.11921708185053381, |
|
"grad_norm": 3.7638172031814983, |
|
"learning_rate": 9.912584751619943e-06, |
|
"loss": 0.1859, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.12099644128113879, |
|
"grad_norm": 3.8922796601385254, |
|
"learning_rate": 9.909963785976902e-06, |
|
"loss": 0.1821, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.12277580071174377, |
|
"grad_norm": 4.18972732738472, |
|
"learning_rate": 9.907304463290004e-06, |
|
"loss": 0.1999, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.12455516014234876, |
|
"grad_norm": 4.302980543338975, |
|
"learning_rate": 9.904606804334094e-06, |
|
"loss": 0.1925, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.12633451957295375, |
|
"grad_norm": 3.743925628918166, |
|
"learning_rate": 9.901870830183506e-06, |
|
"loss": 0.1638, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.12811387900355872, |
|
"grad_norm": 3.818252110327654, |
|
"learning_rate": 9.899096562211902e-06, |
|
"loss": 0.2073, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.1298932384341637, |
|
"grad_norm": 4.216600822450395, |
|
"learning_rate": 9.896284022092088e-06, |
|
"loss": 0.1852, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.13167259786476868, |
|
"grad_norm": 5.810525149334552, |
|
"learning_rate": 9.893433231795864e-06, |
|
"loss": 0.265, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.13345195729537365, |
|
"grad_norm": 5.188144667097759, |
|
"learning_rate": 9.890544213593838e-06, |
|
"loss": 0.2383, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.13523131672597866, |
|
"grad_norm": 4.334031301696127, |
|
"learning_rate": 9.887616990055262e-06, |
|
"loss": 0.2759, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.13701067615658363, |
|
"grad_norm": 5.127305952447781, |
|
"learning_rate": 9.884651584047845e-06, |
|
"loss": 0.2352, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.1387900355871886, |
|
"grad_norm": 4.797471070852961, |
|
"learning_rate": 9.881648018737587e-06, |
|
"loss": 0.2476, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.14056939501779359, |
|
"grad_norm": 4.217574700988118, |
|
"learning_rate": 9.878606317588588e-06, |
|
"loss": 0.1801, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.1423487544483986, |
|
"grad_norm": 5.728731355296789, |
|
"learning_rate": 9.875526504362868e-06, |
|
"loss": 0.2359, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.14412811387900357, |
|
"grad_norm": 4.530854404152971, |
|
"learning_rate": 9.872408603120187e-06, |
|
"loss": 0.2103, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.14590747330960854, |
|
"grad_norm": 4.954781405431989, |
|
"learning_rate": 9.869252638217846e-06, |
|
"loss": 0.1954, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.14768683274021352, |
|
"grad_norm": 4.158423396387564, |
|
"learning_rate": 9.866058634310503e-06, |
|
"loss": 0.1976, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.1494661921708185, |
|
"grad_norm": 4.563224024612496, |
|
"learning_rate": 9.862826616349981e-06, |
|
"loss": 0.2538, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.1512455516014235, |
|
"grad_norm": 5.5535906878974055, |
|
"learning_rate": 9.859556609585075e-06, |
|
"loss": 0.268, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.15302491103202848, |
|
"grad_norm": 4.2238463358880205, |
|
"learning_rate": 9.856248639561346e-06, |
|
"loss": 0.197, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.15480427046263345, |
|
"grad_norm": 4.594899715847846, |
|
"learning_rate": 9.85290273212093e-06, |
|
"loss": 0.2227, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.15658362989323843, |
|
"grad_norm": 4.584977395679195, |
|
"learning_rate": 9.849518913402334e-06, |
|
"loss": 0.247, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.1583629893238434, |
|
"grad_norm": 5.184030674064547, |
|
"learning_rate": 9.84609720984023e-06, |
|
"loss": 0.2349, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.1601423487544484, |
|
"grad_norm": 3.96295162127913, |
|
"learning_rate": 9.84263764816525e-06, |
|
"loss": 0.2048, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.1619217081850534, |
|
"grad_norm": 4.31758088084495, |
|
"learning_rate": 9.839140255403776e-06, |
|
"loss": 0.184, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.16370106761565836, |
|
"grad_norm": 3.575166137317986, |
|
"learning_rate": 9.83560505887773e-06, |
|
"loss": 0.1644, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.16548042704626334, |
|
"grad_norm": 4.097779912170179, |
|
"learning_rate": 9.83203208620436e-06, |
|
"loss": 0.2144, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.16725978647686832, |
|
"grad_norm": 4.208802127005884, |
|
"learning_rate": 9.828421365296023e-06, |
|
"loss": 0.1786, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.16903914590747332, |
|
"grad_norm": 5.046457082447083, |
|
"learning_rate": 9.824772924359974e-06, |
|
"loss": 0.2424, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.1708185053380783, |
|
"grad_norm": 4.485659715727576, |
|
"learning_rate": 9.821086791898133e-06, |
|
"loss": 0.2266, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.17259786476868327, |
|
"grad_norm": 3.55367602545185, |
|
"learning_rate": 9.817362996706872e-06, |
|
"loss": 0.1964, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.17437722419928825, |
|
"grad_norm": 3.725185601492517, |
|
"learning_rate": 9.81360156787679e-06, |
|
"loss": 0.1722, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.17615658362989323, |
|
"grad_norm": 3.4842158114973008, |
|
"learning_rate": 9.809802534792477e-06, |
|
"loss": 0.1645, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.17793594306049823, |
|
"grad_norm": 5.159791330851206, |
|
"learning_rate": 9.805965927132294e-06, |
|
"loss": 0.2205, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.1797153024911032, |
|
"grad_norm": 4.988575759812273, |
|
"learning_rate": 9.802091774868143e-06, |
|
"loss": 0.2125, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.18149466192170818, |
|
"grad_norm": 3.9554160830864213, |
|
"learning_rate": 9.798180108265218e-06, |
|
"loss": 0.1995, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.18327402135231316, |
|
"grad_norm": 4.249696327233751, |
|
"learning_rate": 9.794230957881785e-06, |
|
"loss": 0.1886, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.18505338078291814, |
|
"grad_norm": 5.2823119355107595, |
|
"learning_rate": 9.79024435456893e-06, |
|
"loss": 0.1901, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.18683274021352314, |
|
"grad_norm": 4.2301645540904556, |
|
"learning_rate": 9.786220329470334e-06, |
|
"loss": 0.2452, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.18861209964412812, |
|
"grad_norm": 4.915432243559754, |
|
"learning_rate": 9.782158914022011e-06, |
|
"loss": 0.2274, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.1903914590747331, |
|
"grad_norm": 3.8055053439224302, |
|
"learning_rate": 9.778060139952075e-06, |
|
"loss": 0.1612, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.19217081850533807, |
|
"grad_norm": 4.560380521431433, |
|
"learning_rate": 9.773924039280488e-06, |
|
"loss": 0.2023, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.19395017793594305, |
|
"grad_norm": 3.9868367762840964, |
|
"learning_rate": 9.769750644318814e-06, |
|
"loss": 0.1555, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.19572953736654805, |
|
"grad_norm": 4.235987518976843, |
|
"learning_rate": 9.765539987669956e-06, |
|
"loss": 0.2091, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.19750889679715303, |
|
"grad_norm": 4.752245750351429, |
|
"learning_rate": 9.761292102227917e-06, |
|
"loss": 0.2581, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.199288256227758, |
|
"grad_norm": 5.415700464011365, |
|
"learning_rate": 9.757007021177529e-06, |
|
"loss": 0.2707, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.20106761565836298, |
|
"grad_norm": 3.4253057183669626, |
|
"learning_rate": 9.752684777994197e-06, |
|
"loss": 0.1603, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.20284697508896798, |
|
"grad_norm": 4.421608921551121, |
|
"learning_rate": 9.748325406443647e-06, |
|
"loss": 0.1885, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.20462633451957296, |
|
"grad_norm": 4.705537191395626, |
|
"learning_rate": 9.743928940581646e-06, |
|
"loss": 0.2558, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.20640569395017794, |
|
"grad_norm": 5.085113367398359, |
|
"learning_rate": 9.739495414753754e-06, |
|
"loss": 0.2014, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.20818505338078291, |
|
"grad_norm": 3.531588223196664, |
|
"learning_rate": 9.73502486359504e-06, |
|
"loss": 0.1654, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.2099644128113879, |
|
"grad_norm": 4.858658695164487, |
|
"learning_rate": 9.73051732202982e-06, |
|
"loss": 0.1946, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.2117437722419929, |
|
"grad_norm": 4.149662256847569, |
|
"learning_rate": 9.725972825271381e-06, |
|
"loss": 0.1956, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.21352313167259787, |
|
"grad_norm": 3.951604780762163, |
|
"learning_rate": 9.721391408821713e-06, |
|
"loss": 0.1843, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.21530249110320285, |
|
"grad_norm": 4.27351047793533, |
|
"learning_rate": 9.716773108471213e-06, |
|
"loss": 0.2399, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.21708185053380782, |
|
"grad_norm": 4.763430246399859, |
|
"learning_rate": 9.712117960298433e-06, |
|
"loss": 0.2331, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.2188612099644128, |
|
"grad_norm": 4.435535137963621, |
|
"learning_rate": 9.707426000669773e-06, |
|
"loss": 0.1897, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.2206405693950178, |
|
"grad_norm": 4.515964075661597, |
|
"learning_rate": 9.702697266239211e-06, |
|
"loss": 0.2425, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.22241992882562278, |
|
"grad_norm": 4.501991218830376, |
|
"learning_rate": 9.697931793948012e-06, |
|
"loss": 0.2928, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.22419928825622776, |
|
"grad_norm": 3.7514734033466963, |
|
"learning_rate": 9.693129621024441e-06, |
|
"loss": 0.2328, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.22597864768683273, |
|
"grad_norm": 3.864371998998751, |
|
"learning_rate": 9.68829078498347e-06, |
|
"loss": 0.1909, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.2277580071174377, |
|
"grad_norm": 4.947057701330422, |
|
"learning_rate": 9.683415323626487e-06, |
|
"loss": 0.2023, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.22953736654804271, |
|
"grad_norm": 5.4919627878972195, |
|
"learning_rate": 9.678503275040997e-06, |
|
"loss": 0.2473, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.2313167259786477, |
|
"grad_norm": 3.9203452593235, |
|
"learning_rate": 9.673554677600336e-06, |
|
"loss": 0.1735, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.23309608540925267, |
|
"grad_norm": 4.064740024476549, |
|
"learning_rate": 9.668569569963355e-06, |
|
"loss": 0.2135, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.23487544483985764, |
|
"grad_norm": 4.265140549599708, |
|
"learning_rate": 9.663547991074129e-06, |
|
"loss": 0.1659, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.23665480427046262, |
|
"grad_norm": 4.406993206920881, |
|
"learning_rate": 9.658489980161643e-06, |
|
"loss": 0.1963, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.23843416370106763, |
|
"grad_norm": 4.344436021865113, |
|
"learning_rate": 9.653395576739504e-06, |
|
"loss": 0.2031, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.2402135231316726, |
|
"grad_norm": 4.577120739362587, |
|
"learning_rate": 9.648264820605611e-06, |
|
"loss": 0.1941, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.24199288256227758, |
|
"grad_norm": 4.041450121122198, |
|
"learning_rate": 9.643097751841854e-06, |
|
"loss": 0.2595, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.24377224199288255, |
|
"grad_norm": 4.016977085500883, |
|
"learning_rate": 9.637894410813803e-06, |
|
"loss": 0.2341, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.24555160142348753, |
|
"grad_norm": 4.598570395014166, |
|
"learning_rate": 9.632654838170393e-06, |
|
"loss": 0.2017, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.24733096085409254, |
|
"grad_norm": 4.4082260080488895, |
|
"learning_rate": 9.627379074843595e-06, |
|
"loss": 0.2057, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.2491103202846975, |
|
"grad_norm": 3.939096958807172, |
|
"learning_rate": 9.622067162048111e-06, |
|
"loss": 0.1866, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.2508896797153025, |
|
"grad_norm": 3.5920167076742966, |
|
"learning_rate": 9.616719141281044e-06, |
|
"loss": 0.1791, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.2526690391459075, |
|
"grad_norm": 4.65351999021107, |
|
"learning_rate": 9.611335054321576e-06, |
|
"loss": 0.2276, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.25444839857651247, |
|
"grad_norm": 4.0692095390500285, |
|
"learning_rate": 9.605914943230637e-06, |
|
"loss": 0.1875, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.25622775800711745, |
|
"grad_norm": 4.2355191629844375, |
|
"learning_rate": 9.600458850350588e-06, |
|
"loss": 0.2271, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.2580071174377224, |
|
"grad_norm": 4.276131511594283, |
|
"learning_rate": 9.594966818304875e-06, |
|
"loss": 0.2281, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.2597864768683274, |
|
"grad_norm": 3.937776931772761, |
|
"learning_rate": 9.589438889997712e-06, |
|
"loss": 0.1793, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.2615658362989324, |
|
"grad_norm": 3.82727698275284, |
|
"learning_rate": 9.583875108613727e-06, |
|
"loss": 0.1485, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.26334519572953735, |
|
"grad_norm": 4.194240188598135, |
|
"learning_rate": 9.578275517617646e-06, |
|
"loss": 0.2147, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.26512455516014233, |
|
"grad_norm": 4.460431992599825, |
|
"learning_rate": 9.572640160753936e-06, |
|
"loss": 0.2228, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.2669039145907473, |
|
"grad_norm": 3.3881502841234212, |
|
"learning_rate": 9.566969082046471e-06, |
|
"loss": 0.1857, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.26868327402135234, |
|
"grad_norm": 3.728908251674804, |
|
"learning_rate": 9.561262325798188e-06, |
|
"loss": 0.1656, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.2704626334519573, |
|
"grad_norm": 3.357703160877988, |
|
"learning_rate": 9.555519936590739e-06, |
|
"loss": 0.1614, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.2722419928825623, |
|
"grad_norm": 4.004889738489129, |
|
"learning_rate": 9.549741959284147e-06, |
|
"loss": 0.2042, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.27402135231316727, |
|
"grad_norm": 4.034604388839263, |
|
"learning_rate": 9.543928439016445e-06, |
|
"loss": 0.1911, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.27580071174377224, |
|
"grad_norm": 3.722371450490722, |
|
"learning_rate": 9.538079421203339e-06, |
|
"loss": 0.1693, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.2775800711743772, |
|
"grad_norm": 3.9804830600170478, |
|
"learning_rate": 9.532194951537838e-06, |
|
"loss": 0.2186, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.2793594306049822, |
|
"grad_norm": 4.26283512255699, |
|
"learning_rate": 9.52627507598991e-06, |
|
"loss": 0.1574, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.28113879003558717, |
|
"grad_norm": 3.2386124067603577, |
|
"learning_rate": 9.52031984080611e-06, |
|
"loss": 0.1413, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.28291814946619215, |
|
"grad_norm": 4.303220454966736, |
|
"learning_rate": 9.514329292509227e-06, |
|
"loss": 0.1862, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.2846975088967972, |
|
"grad_norm": 5.202940276580929, |
|
"learning_rate": 9.508303477897925e-06, |
|
"loss": 0.1881, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.28647686832740216, |
|
"grad_norm": 5.280976795911324, |
|
"learning_rate": 9.502242444046365e-06, |
|
"loss": 0.2289, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.28825622775800713, |
|
"grad_norm": 4.650656600764109, |
|
"learning_rate": 9.496146238303846e-06, |
|
"loss": 0.1883, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.2900355871886121, |
|
"grad_norm": 4.358663207590764, |
|
"learning_rate": 9.49001490829443e-06, |
|
"loss": 0.1729, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.2918149466192171, |
|
"grad_norm": 5.655679889599393, |
|
"learning_rate": 9.483848501916578e-06, |
|
"loss": 0.2828, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.29359430604982206, |
|
"grad_norm": 4.88994197941077, |
|
"learning_rate": 9.477647067342766e-06, |
|
"loss": 0.2251, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.29537366548042704, |
|
"grad_norm": 4.811141004065822, |
|
"learning_rate": 9.471410653019115e-06, |
|
"loss": 0.1882, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.297153024911032, |
|
"grad_norm": 4.610159000614841, |
|
"learning_rate": 9.46513930766501e-06, |
|
"loss": 0.197, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.298932384341637, |
|
"grad_norm": 4.321423960587968, |
|
"learning_rate": 9.458833080272723e-06, |
|
"loss": 0.1977, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.30071174377224197, |
|
"grad_norm": 4.736304802512031, |
|
"learning_rate": 9.45249202010702e-06, |
|
"loss": 0.1795, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.302491103202847, |
|
"grad_norm": 4.646917500635698, |
|
"learning_rate": 9.446116176704791e-06, |
|
"loss": 0.2385, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.304270462633452, |
|
"grad_norm": 4.599050329975436, |
|
"learning_rate": 9.439705599874653e-06, |
|
"loss": 0.2344, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.30604982206405695, |
|
"grad_norm": 4.067723593037202, |
|
"learning_rate": 9.433260339696564e-06, |
|
"loss": 0.2088, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.30782918149466193, |
|
"grad_norm": 4.2580921423640685, |
|
"learning_rate": 9.426780446521429e-06, |
|
"loss": 0.236, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.3096085409252669, |
|
"grad_norm": 3.604677560960492, |
|
"learning_rate": 9.42026597097071e-06, |
|
"loss": 0.2368, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.3113879003558719, |
|
"grad_norm": 3.1396325634667774, |
|
"learning_rate": 9.413716963936033e-06, |
|
"loss": 0.1772, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.31316725978647686, |
|
"grad_norm": 3.2760580966094834, |
|
"learning_rate": 9.407133476578778e-06, |
|
"loss": 0.1478, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.31494661921708184, |
|
"grad_norm": 4.377224357825041, |
|
"learning_rate": 9.400515560329698e-06, |
|
"loss": 0.2656, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.3167259786476868, |
|
"grad_norm": 4.235460499202126, |
|
"learning_rate": 9.393863266888501e-06, |
|
"loss": 0.1865, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.3185053380782918, |
|
"grad_norm": 4.874782939039627, |
|
"learning_rate": 9.387176648223457e-06, |
|
"loss": 0.2357, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.3202846975088968, |
|
"grad_norm": 5.724770204976305, |
|
"learning_rate": 9.38045575657098e-06, |
|
"loss": 0.2686, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.3220640569395018, |
|
"grad_norm": 4.584854534779475, |
|
"learning_rate": 9.37370064443524e-06, |
|
"loss": 0.1793, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.3238434163701068, |
|
"grad_norm": 3.3346334425699844, |
|
"learning_rate": 9.366911364587726e-06, |
|
"loss": 0.1601, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.32562277580071175, |
|
"grad_norm": 4.004407393119814, |
|
"learning_rate": 9.360087970066854e-06, |
|
"loss": 0.1667, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.3274021352313167, |
|
"grad_norm": 4.132619374509711, |
|
"learning_rate": 9.353230514177553e-06, |
|
"loss": 0.1661, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.3291814946619217, |
|
"grad_norm": 3.8700631991321375, |
|
"learning_rate": 9.346339050490832e-06, |
|
"loss": 0.2178, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.3309608540925267, |
|
"grad_norm": 4.5030841246316085, |
|
"learning_rate": 9.33941363284338e-06, |
|
"loss": 0.1951, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.33274021352313166, |
|
"grad_norm": 4.12230226751847, |
|
"learning_rate": 9.332454315337129e-06, |
|
"loss": 0.1934, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.33451957295373663, |
|
"grad_norm": 4.511652192208775, |
|
"learning_rate": 9.325461152338846e-06, |
|
"loss": 0.1982, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.33629893238434166, |
|
"grad_norm": 4.303877298404574, |
|
"learning_rate": 9.3184341984797e-06, |
|
"loss": 0.2137, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.33807829181494664, |
|
"grad_norm": 4.310094110882736, |
|
"learning_rate": 9.311373508654838e-06, |
|
"loss": 0.1788, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.3398576512455516, |
|
"grad_norm": 4.919934175232768, |
|
"learning_rate": 9.30427913802295e-06, |
|
"loss": 0.2381, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.3416370106761566, |
|
"grad_norm": 4.538088441646977, |
|
"learning_rate": 9.297151142005852e-06, |
|
"loss": 0.214, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.34341637010676157, |
|
"grad_norm": 3.7743339918514125, |
|
"learning_rate": 9.289989576288035e-06, |
|
"loss": 0.172, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.34519572953736655, |
|
"grad_norm": 5.1586500269119355, |
|
"learning_rate": 9.282794496816244e-06, |
|
"loss": 0.2784, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.3469750889679715, |
|
"grad_norm": 5.12186047434381, |
|
"learning_rate": 9.27556595979904e-06, |
|
"loss": 0.1948, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.3487544483985765, |
|
"grad_norm": 4.535696118952801, |
|
"learning_rate": 9.26830402170635e-06, |
|
"loss": 0.2343, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.3505338078291815, |
|
"grad_norm": 4.108826644638565, |
|
"learning_rate": 9.261008739269035e-06, |
|
"loss": 0.2225, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.35231316725978645, |
|
"grad_norm": 3.9924045693017307, |
|
"learning_rate": 9.253680169478448e-06, |
|
"loss": 0.1898, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.3540925266903915, |
|
"grad_norm": 3.9701071807766555, |
|
"learning_rate": 9.246318369585983e-06, |
|
"loss": 0.1766, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.35587188612099646, |
|
"grad_norm": 4.955635311625639, |
|
"learning_rate": 9.238923397102629e-06, |
|
"loss": 0.2399, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.35587188612099646, |
|
"eval_loss": 0.24725185334682465, |
|
"eval_runtime": 1.5589, |
|
"eval_samples_per_second": 29.507, |
|
"eval_steps_per_second": 7.698, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.35765124555160144, |
|
"grad_norm": 3.300242485587473, |
|
"learning_rate": 9.231495309798525e-06, |
|
"loss": 0.1405, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.3594306049822064, |
|
"grad_norm": 3.8823029088763135, |
|
"learning_rate": 9.224034165702506e-06, |
|
"loss": 0.1915, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.3612099644128114, |
|
"grad_norm": 2.9762912413550735, |
|
"learning_rate": 9.216540023101646e-06, |
|
"loss": 0.1772, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.36298932384341637, |
|
"grad_norm": 4.149641411773965, |
|
"learning_rate": 9.209012940540806e-06, |
|
"loss": 0.2453, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.36476868327402134, |
|
"grad_norm": 3.9158381848436297, |
|
"learning_rate": 9.20145297682218e-06, |
|
"loss": 0.2082, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.3665480427046263, |
|
"grad_norm": 5.159097616016432, |
|
"learning_rate": 9.193860191004833e-06, |
|
"loss": 0.2743, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.3683274021352313, |
|
"grad_norm": 4.26683213415737, |
|
"learning_rate": 9.186234642404234e-06, |
|
"loss": 0.2305, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.3701067615658363, |
|
"grad_norm": 4.906525810146449, |
|
"learning_rate": 9.178576390591803e-06, |
|
"loss": 0.2272, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.3718861209964413, |
|
"grad_norm": 3.6719703777256276, |
|
"learning_rate": 9.170885495394435e-06, |
|
"loss": 0.1858, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.3736654804270463, |
|
"grad_norm": 4.7426482103753935, |
|
"learning_rate": 9.16316201689404e-06, |
|
"loss": 0.2322, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.37544483985765126, |
|
"grad_norm": 4.602667230613287, |
|
"learning_rate": 9.155406015427076e-06, |
|
"loss": 0.1904, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.37722419928825623, |
|
"grad_norm": 4.393764728089408, |
|
"learning_rate": 9.147617551584066e-06, |
|
"loss": 0.2147, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.3790035587188612, |
|
"grad_norm": 3.6605483753726102, |
|
"learning_rate": 9.139796686209135e-06, |
|
"loss": 0.2209, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.3807829181494662, |
|
"grad_norm": 3.666976846905609, |
|
"learning_rate": 9.131943480399531e-06, |
|
"loss": 0.1846, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.38256227758007116, |
|
"grad_norm": 4.026113784933919, |
|
"learning_rate": 9.124057995505148e-06, |
|
"loss": 0.1898, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.38434163701067614, |
|
"grad_norm": 6.467409657688619, |
|
"learning_rate": 9.11614029312805e-06, |
|
"loss": 0.3174, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.3861209964412811, |
|
"grad_norm": 4.710630840976778, |
|
"learning_rate": 9.108190435121982e-06, |
|
"loss": 0.2237, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.3879003558718861, |
|
"grad_norm": 4.388002651551463, |
|
"learning_rate": 9.100208483591892e-06, |
|
"loss": 0.1877, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.3896797153024911, |
|
"grad_norm": 4.720883787941838, |
|
"learning_rate": 9.092194500893448e-06, |
|
"loss": 0.2397, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.3914590747330961, |
|
"grad_norm": 4.142151835876762, |
|
"learning_rate": 9.084148549632547e-06, |
|
"loss": 0.1935, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.3932384341637011, |
|
"grad_norm": 4.477432089370019, |
|
"learning_rate": 9.076070692664827e-06, |
|
"loss": 0.2379, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.39501779359430605, |
|
"grad_norm": 4.40851233992806, |
|
"learning_rate": 9.067960993095176e-06, |
|
"loss": 0.1992, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.39679715302491103, |
|
"grad_norm": 4.309806554805521, |
|
"learning_rate": 9.059819514277238e-06, |
|
"loss": 0.2202, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.398576512455516, |
|
"grad_norm": 3.6174066394799183, |
|
"learning_rate": 9.05164631981292e-06, |
|
"loss": 0.1721, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.400355871886121, |
|
"grad_norm": 3.8906408017314646, |
|
"learning_rate": 9.043441473551893e-06, |
|
"loss": 0.2138, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.40213523131672596, |
|
"grad_norm": 4.275332116455088, |
|
"learning_rate": 9.035205039591099e-06, |
|
"loss": 0.197, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.40391459074733094, |
|
"grad_norm": 5.191616725728473, |
|
"learning_rate": 9.02693708227424e-06, |
|
"loss": 0.308, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.40569395017793597, |
|
"grad_norm": 3.2809899001655563, |
|
"learning_rate": 9.018637666191284e-06, |
|
"loss": 0.2095, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.40747330960854095, |
|
"grad_norm": 4.351544712400181, |
|
"learning_rate": 9.010306856177958e-06, |
|
"loss": 0.2369, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.4092526690391459, |
|
"grad_norm": 5.074643399043796, |
|
"learning_rate": 9.001944717315236e-06, |
|
"loss": 0.2192, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.4110320284697509, |
|
"grad_norm": 4.7200757679853655, |
|
"learning_rate": 8.993551314928846e-06, |
|
"loss": 0.2569, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.4128113879003559, |
|
"grad_norm": 3.510561702805233, |
|
"learning_rate": 8.985126714588739e-06, |
|
"loss": 0.1531, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.41459074733096085, |
|
"grad_norm": 3.723203602255659, |
|
"learning_rate": 8.976670982108591e-06, |
|
"loss": 0.1974, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.41637010676156583, |
|
"grad_norm": 4.595766268109947, |
|
"learning_rate": 8.968184183545285e-06, |
|
"loss": 0.2243, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.4181494661921708, |
|
"grad_norm": 4.887558142772728, |
|
"learning_rate": 8.959666385198396e-06, |
|
"loss": 0.2137, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.4199288256227758, |
|
"grad_norm": 3.7954300790127595, |
|
"learning_rate": 8.951117653609666e-06, |
|
"loss": 0.172, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.42170818505338076, |
|
"grad_norm": 4.20229797354761, |
|
"learning_rate": 8.9425380555625e-06, |
|
"loss": 0.1649, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.4234875444839858, |
|
"grad_norm": 3.0264107194511363, |
|
"learning_rate": 8.933927658081423e-06, |
|
"loss": 0.1372, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.42526690391459077, |
|
"grad_norm": 3.6257043190299383, |
|
"learning_rate": 8.925286528431578e-06, |
|
"loss": 0.1934, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.42704626334519574, |
|
"grad_norm": 4.529221445887275, |
|
"learning_rate": 8.916614734118184e-06, |
|
"loss": 0.1679, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.4288256227758007, |
|
"grad_norm": 3.808519208054077, |
|
"learning_rate": 8.907912342886016e-06, |
|
"loss": 0.1872, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.4306049822064057, |
|
"grad_norm": 4.379219377312406, |
|
"learning_rate": 8.899179422718877e-06, |
|
"loss": 0.2179, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.43238434163701067, |
|
"grad_norm": 4.239983130712202, |
|
"learning_rate": 8.890416041839061e-06, |
|
"loss": 0.194, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.43416370106761565, |
|
"grad_norm": 3.5721580793948404, |
|
"learning_rate": 8.881622268706825e-06, |
|
"loss": 0.193, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.4359430604982206, |
|
"grad_norm": 3.693710858498779, |
|
"learning_rate": 8.872798172019856e-06, |
|
"loss": 0.1911, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.4377224199288256, |
|
"grad_norm": 3.8798409546253243, |
|
"learning_rate": 8.863943820712726e-06, |
|
"loss": 0.1775, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.4395017793594306, |
|
"grad_norm": 3.893578166822683, |
|
"learning_rate": 8.855059283956363e-06, |
|
"loss": 0.2151, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.4412811387900356, |
|
"grad_norm": 4.145701697456548, |
|
"learning_rate": 8.8461446311575e-06, |
|
"loss": 0.2182, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.4430604982206406, |
|
"grad_norm": 3.9741561440051654, |
|
"learning_rate": 8.837199931958147e-06, |
|
"loss": 0.1685, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.44483985765124556, |
|
"grad_norm": 3.75840443590278, |
|
"learning_rate": 8.828225256235035e-06, |
|
"loss": 0.2082, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.44661921708185054, |
|
"grad_norm": 4.514333175265878, |
|
"learning_rate": 8.819220674099074e-06, |
|
"loss": 0.1774, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.4483985765124555, |
|
"grad_norm": 3.9910076047900698, |
|
"learning_rate": 8.810186255894804e-06, |
|
"loss": 0.2151, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.4501779359430605, |
|
"grad_norm": 4.242807182049356, |
|
"learning_rate": 8.801122072199848e-06, |
|
"loss": 0.1879, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.45195729537366547, |
|
"grad_norm": 4.067309391648198, |
|
"learning_rate": 8.792028193824364e-06, |
|
"loss": 0.1992, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.45373665480427045, |
|
"grad_norm": 4.244087026774724, |
|
"learning_rate": 8.782904691810478e-06, |
|
"loss": 0.1812, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.4555160142348754, |
|
"grad_norm": 3.959043799817794, |
|
"learning_rate": 8.77375163743175e-06, |
|
"loss": 0.152, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.45729537366548045, |
|
"grad_norm": 3.831375118044189, |
|
"learning_rate": 8.764569102192593e-06, |
|
"loss": 0.2093, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.45907473309608543, |
|
"grad_norm": 3.7759019410390726, |
|
"learning_rate": 8.755357157827735e-06, |
|
"loss": 0.1715, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.4608540925266904, |
|
"grad_norm": 4.808819365721088, |
|
"learning_rate": 8.746115876301651e-06, |
|
"loss": 0.2367, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.4626334519572954, |
|
"grad_norm": 4.011816266710751, |
|
"learning_rate": 8.736845329807994e-06, |
|
"loss": 0.2019, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.46441281138790036, |
|
"grad_norm": 4.1587626949273755, |
|
"learning_rate": 8.727545590769044e-06, |
|
"loss": 0.2104, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.46619217081850534, |
|
"grad_norm": 4.542677204323108, |
|
"learning_rate": 8.718216731835131e-06, |
|
"loss": 0.2725, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.4679715302491103, |
|
"grad_norm": 3.922072031364713, |
|
"learning_rate": 8.708858825884075e-06, |
|
"loss": 0.2334, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.4697508896797153, |
|
"grad_norm": 3.9732660398364863, |
|
"learning_rate": 8.699471946020612e-06, |
|
"loss": 0.1965, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.47153024911032027, |
|
"grad_norm": 4.2394480183127055, |
|
"learning_rate": 8.690056165575825e-06, |
|
"loss": 0.1974, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.47330960854092524, |
|
"grad_norm": 3.945115261188095, |
|
"learning_rate": 8.680611558106571e-06, |
|
"loss": 0.1999, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.4750889679715303, |
|
"grad_norm": 4.364412596723452, |
|
"learning_rate": 8.671138197394907e-06, |
|
"loss": 0.203, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.47686832740213525, |
|
"grad_norm": 3.0179318002801705, |
|
"learning_rate": 8.661636157447511e-06, |
|
"loss": 0.1677, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.4786476868327402, |
|
"grad_norm": 5.011135989922916, |
|
"learning_rate": 8.652105512495106e-06, |
|
"loss": 0.1972, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.4804270462633452, |
|
"grad_norm": 3.7249324302900737, |
|
"learning_rate": 8.64254633699188e-06, |
|
"loss": 0.1529, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.4822064056939502, |
|
"grad_norm": 4.107762798124349, |
|
"learning_rate": 8.632958705614905e-06, |
|
"loss": 0.2019, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.48398576512455516, |
|
"grad_norm": 3.979895961788464, |
|
"learning_rate": 8.623342693263549e-06, |
|
"loss": 0.1658, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.48576512455516013, |
|
"grad_norm": 4.435362744242753, |
|
"learning_rate": 8.6136983750589e-06, |
|
"loss": 0.216, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.4875444839857651, |
|
"grad_norm": 4.019529119958633, |
|
"learning_rate": 8.604025826343167e-06, |
|
"loss": 0.1939, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.4893238434163701, |
|
"grad_norm": 4.186309809913972, |
|
"learning_rate": 8.594325122679107e-06, |
|
"loss": 0.1797, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.49110320284697506, |
|
"grad_norm": 3.9400362068238772, |
|
"learning_rate": 8.584596339849419e-06, |
|
"loss": 0.2367, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.4928825622775801, |
|
"grad_norm": 3.4534511141182906, |
|
"learning_rate": 8.574839553856157e-06, |
|
"loss": 0.1748, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.49466192170818507, |
|
"grad_norm": 4.77710632169166, |
|
"learning_rate": 8.565054840920145e-06, |
|
"loss": 0.2478, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.49644128113879005, |
|
"grad_norm": 3.6893615392895076, |
|
"learning_rate": 8.55524227748037e-06, |
|
"loss": 0.1722, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.498220640569395, |
|
"grad_norm": 3.7516667092356433, |
|
"learning_rate": 8.545401940193392e-06, |
|
"loss": 0.1504, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 3.9636149628465187, |
|
"learning_rate": 8.535533905932739e-06, |
|
"loss": 0.1928, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.501779359430605, |
|
"grad_norm": 5.205713752439229, |
|
"learning_rate": 8.525638251788312e-06, |
|
"loss": 0.2625, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.50355871886121, |
|
"grad_norm": 3.6514145749401785, |
|
"learning_rate": 8.515715055065783e-06, |
|
"loss": 0.2092, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.505338078291815, |
|
"grad_norm": 3.8034449023590895, |
|
"learning_rate": 8.505764393285985e-06, |
|
"loss": 0.204, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.5071174377224199, |
|
"grad_norm": 5.244530723051715, |
|
"learning_rate": 8.495786344184314e-06, |
|
"loss": 0.254, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.5088967971530249, |
|
"grad_norm": 4.300881310666334, |
|
"learning_rate": 8.485780985710113e-06, |
|
"loss": 0.1865, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.5106761565836299, |
|
"grad_norm": 3.627386627507635, |
|
"learning_rate": 8.475748396026074e-06, |
|
"loss": 0.1855, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.5124555160142349, |
|
"grad_norm": 4.487984049595306, |
|
"learning_rate": 8.46568865350762e-06, |
|
"loss": 0.1918, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.5142348754448398, |
|
"grad_norm": 4.919728744700227, |
|
"learning_rate": 8.45560183674229e-06, |
|
"loss": 0.2191, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.5160142348754448, |
|
"grad_norm": 4.1888004523174995, |
|
"learning_rate": 8.445488024529133e-06, |
|
"loss": 0.2085, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.5177935943060499, |
|
"grad_norm": 4.082382445796131, |
|
"learning_rate": 8.435347295878087e-06, |
|
"loss": 0.1956, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.5195729537366548, |
|
"grad_norm": 3.5276243451187637, |
|
"learning_rate": 8.425179730009368e-06, |
|
"loss": 0.1781, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.5213523131672598, |
|
"grad_norm": 3.656087805507791, |
|
"learning_rate": 8.41498540635284e-06, |
|
"loss": 0.1909, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.5231316725978647, |
|
"grad_norm": 4.294800624714889, |
|
"learning_rate": 8.404764404547404e-06, |
|
"loss": 0.1791, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.5249110320284698, |
|
"grad_norm": 6.113739070401009, |
|
"learning_rate": 8.394516804440374e-06, |
|
"loss": 0.272, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.5266903914590747, |
|
"grad_norm": 4.867977169856942, |
|
"learning_rate": 8.384242686086848e-06, |
|
"loss": 0.2439, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.5284697508896797, |
|
"grad_norm": 4.447032697896748, |
|
"learning_rate": 8.373942129749094e-06, |
|
"loss": 0.2164, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.5302491103202847, |
|
"grad_norm": 2.9509355261470693, |
|
"learning_rate": 8.363615215895908e-06, |
|
"loss": 0.1634, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.5320284697508897, |
|
"grad_norm": 4.450260153222401, |
|
"learning_rate": 8.353262025202e-06, |
|
"loss": 0.1857, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.5338078291814946, |
|
"grad_norm": 3.863692388140676, |
|
"learning_rate": 8.342882638547351e-06, |
|
"loss": 0.1703, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.5355871886120996, |
|
"grad_norm": 3.3240363423711385, |
|
"learning_rate": 8.332477137016587e-06, |
|
"loss": 0.1479, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.5373665480427047, |
|
"grad_norm": 3.685329845518728, |
|
"learning_rate": 8.322045601898354e-06, |
|
"loss": 0.2324, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.5391459074733096, |
|
"grad_norm": 3.934899091138423, |
|
"learning_rate": 8.311588114684665e-06, |
|
"loss": 0.1899, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.5409252669039146, |
|
"grad_norm": 4.517392751872078, |
|
"learning_rate": 8.301104757070276e-06, |
|
"loss": 0.3109, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.5427046263345195, |
|
"grad_norm": 3.96543656947418, |
|
"learning_rate": 8.290595610952045e-06, |
|
"loss": 0.1736, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.5444839857651246, |
|
"grad_norm": 4.257327547184006, |
|
"learning_rate": 8.280060758428294e-06, |
|
"loss": 0.2156, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.5462633451957295, |
|
"grad_norm": 3.5513570430807273, |
|
"learning_rate": 8.269500281798164e-06, |
|
"loss": 0.1476, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.5480427046263345, |
|
"grad_norm": 4.101712417482041, |
|
"learning_rate": 8.258914263560971e-06, |
|
"loss": 0.1841, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.5498220640569395, |
|
"grad_norm": 3.6446595373114983, |
|
"learning_rate": 8.248302786415567e-06, |
|
"loss": 0.1741, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.5516014234875445, |
|
"grad_norm": 4.420412460372305, |
|
"learning_rate": 8.237665933259693e-06, |
|
"loss": 0.1994, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.5533807829181495, |
|
"grad_norm": 4.332003328742741, |
|
"learning_rate": 8.227003787189323e-06, |
|
"loss": 0.209, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.5551601423487544, |
|
"grad_norm": 3.405159848450852, |
|
"learning_rate": 8.216316431498028e-06, |
|
"loss": 0.1467, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.5569395017793595, |
|
"grad_norm": 3.814127808192839, |
|
"learning_rate": 8.205603949676317e-06, |
|
"loss": 0.2536, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.5587188612099644, |
|
"grad_norm": 3.5369050607805286, |
|
"learning_rate": 8.194866425410984e-06, |
|
"loss": 0.21, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.5604982206405694, |
|
"grad_norm": 4.846395482050443, |
|
"learning_rate": 8.184103942584456e-06, |
|
"loss": 0.2111, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.5622775800711743, |
|
"grad_norm": 4.270852741539014, |
|
"learning_rate": 8.173316585274144e-06, |
|
"loss": 0.2038, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.5640569395017794, |
|
"grad_norm": 5.346203775390912, |
|
"learning_rate": 8.162504437751775e-06, |
|
"loss": 0.2951, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.5658362989323843, |
|
"grad_norm": 3.9308013146565433, |
|
"learning_rate": 8.151667584482742e-06, |
|
"loss": 0.1688, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.5676156583629893, |
|
"grad_norm": 4.3898903002456215, |
|
"learning_rate": 8.140806110125442e-06, |
|
"loss": 0.1921, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.5693950177935944, |
|
"grad_norm": 3.629111106804239, |
|
"learning_rate": 8.129920099530608e-06, |
|
"loss": 0.2207, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.5711743772241993, |
|
"grad_norm": 3.1019459499162427, |
|
"learning_rate": 8.119009637740663e-06, |
|
"loss": 0.1305, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.5729537366548043, |
|
"grad_norm": 3.3450238612119088, |
|
"learning_rate": 8.108074809989032e-06, |
|
"loss": 0.1721, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.5747330960854092, |
|
"grad_norm": 4.203075897166166, |
|
"learning_rate": 8.097115701699498e-06, |
|
"loss": 0.1556, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.5765124555160143, |
|
"grad_norm": 4.283287693790533, |
|
"learning_rate": 8.086132398485525e-06, |
|
"loss": 0.2392, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.5782918149466192, |
|
"grad_norm": 2.4854199054935973, |
|
"learning_rate": 8.075124986149583e-06, |
|
"loss": 0.1322, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.5800711743772242, |
|
"grad_norm": 4.294372022311221, |
|
"learning_rate": 8.064093550682494e-06, |
|
"loss": 0.217, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.5818505338078291, |
|
"grad_norm": 3.74806126773062, |
|
"learning_rate": 8.053038178262742e-06, |
|
"loss": 0.1728, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.5836298932384342, |
|
"grad_norm": 4.126557456890044, |
|
"learning_rate": 8.041958955255815e-06, |
|
"loss": 0.1714, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.5854092526690391, |
|
"grad_norm": 3.7525474596180386, |
|
"learning_rate": 8.030855968213518e-06, |
|
"loss": 0.1694, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.5871886120996441, |
|
"grad_norm": 4.527359413036257, |
|
"learning_rate": 8.019729303873307e-06, |
|
"loss": 0.2393, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.5889679715302492, |
|
"grad_norm": 3.771283994113975, |
|
"learning_rate": 8.008579049157607e-06, |
|
"loss": 0.1667, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.5907473309608541, |
|
"grad_norm": 3.8929343796906717, |
|
"learning_rate": 7.99740529117313e-06, |
|
"loss": 0.1978, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.5925266903914591, |
|
"grad_norm": 3.9460626989582277, |
|
"learning_rate": 7.986208117210198e-06, |
|
"loss": 0.2014, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.594306049822064, |
|
"grad_norm": 4.153856141472539, |
|
"learning_rate": 7.974987614742066e-06, |
|
"loss": 0.1845, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.5960854092526691, |
|
"grad_norm": 4.4298775270886575, |
|
"learning_rate": 7.963743871424224e-06, |
|
"loss": 0.1992, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.597864768683274, |
|
"grad_norm": 4.524178217793219, |
|
"learning_rate": 7.952476975093729e-06, |
|
"loss": 0.1746, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.599644128113879, |
|
"grad_norm": 4.568351569969921, |
|
"learning_rate": 7.941187013768508e-06, |
|
"loss": 0.2243, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.6014234875444839, |
|
"grad_norm": 3.8270537849368886, |
|
"learning_rate": 7.929874075646673e-06, |
|
"loss": 0.1811, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.603202846975089, |
|
"grad_norm": 4.578441256915849, |
|
"learning_rate": 7.918538249105835e-06, |
|
"loss": 0.1774, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.604982206405694, |
|
"grad_norm": 4.781351872240773, |
|
"learning_rate": 7.907179622702409e-06, |
|
"loss": 0.1693, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.6067615658362989, |
|
"grad_norm": 3.8229499441517585, |
|
"learning_rate": 7.895798285170927e-06, |
|
"loss": 0.1999, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.608540925266904, |
|
"grad_norm": 3.755976620077978, |
|
"learning_rate": 7.88439432542334e-06, |
|
"loss": 0.1761, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.6103202846975089, |
|
"grad_norm": 3.6331650781035933, |
|
"learning_rate": 7.872967832548327e-06, |
|
"loss": 0.1789, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.6120996441281139, |
|
"grad_norm": 4.74002537233505, |
|
"learning_rate": 7.861518895810597e-06, |
|
"loss": 0.2852, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.6138790035587188, |
|
"grad_norm": 4.140885005692224, |
|
"learning_rate": 7.850047604650188e-06, |
|
"loss": 0.2057, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.6156583629893239, |
|
"grad_norm": 4.164928809657661, |
|
"learning_rate": 7.838554048681783e-06, |
|
"loss": 0.2074, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.6174377224199288, |
|
"grad_norm": 4.554332532304381, |
|
"learning_rate": 7.827038317693988e-06, |
|
"loss": 0.2824, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.6192170818505338, |
|
"grad_norm": 3.35142435657989, |
|
"learning_rate": 7.815500501648654e-06, |
|
"loss": 0.1469, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.6209964412811388, |
|
"grad_norm": 4.233309265780434, |
|
"learning_rate": 7.80394069068015e-06, |
|
"loss": 0.2418, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.6227758007117438, |
|
"grad_norm": 3.840318747104304, |
|
"learning_rate": 7.79235897509468e-06, |
|
"loss": 0.1707, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.6245551601423488, |
|
"grad_norm": 5.152711534122949, |
|
"learning_rate": 7.780755445369563e-06, |
|
"loss": 0.23, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.6263345195729537, |
|
"grad_norm": 5.0618250470471935, |
|
"learning_rate": 7.769130192152538e-06, |
|
"loss": 0.2789, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.6281138790035588, |
|
"grad_norm": 4.230326231048685, |
|
"learning_rate": 7.757483306261042e-06, |
|
"loss": 0.2519, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.6298932384341637, |
|
"grad_norm": 4.586403132323308, |
|
"learning_rate": 7.745814878681516e-06, |
|
"loss": 0.2391, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.6316725978647687, |
|
"grad_norm": 4.061605424362457, |
|
"learning_rate": 7.734125000568684e-06, |
|
"loss": 0.1934, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.6334519572953736, |
|
"grad_norm": 4.1222581856718215, |
|
"learning_rate": 7.722413763244837e-06, |
|
"loss": 0.1859, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.6352313167259787, |
|
"grad_norm": 3.028393093623644, |
|
"learning_rate": 7.710681258199136e-06, |
|
"loss": 0.1815, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.6370106761565836, |
|
"grad_norm": 4.058027998981875, |
|
"learning_rate": 7.69892757708688e-06, |
|
"loss": 0.1856, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.6387900355871886, |
|
"grad_norm": 3.7087162854728577, |
|
"learning_rate": 7.687152811728799e-06, |
|
"loss": 0.1944, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.6405693950177936, |
|
"grad_norm": 4.4235018799473815, |
|
"learning_rate": 7.675357054110337e-06, |
|
"loss": 0.2385, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.6423487544483986, |
|
"grad_norm": 4.103319650804931, |
|
"learning_rate": 7.663540396380931e-06, |
|
"loss": 0.1777, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.6441281138790036, |
|
"grad_norm": 3.470558551652339, |
|
"learning_rate": 7.651702930853287e-06, |
|
"loss": 0.1627, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.6459074733096085, |
|
"grad_norm": 4.364729741913229, |
|
"learning_rate": 7.639844750002668e-06, |
|
"loss": 0.2012, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.6476868327402135, |
|
"grad_norm": 3.6128189211390542, |
|
"learning_rate": 7.627965946466167e-06, |
|
"loss": 0.1784, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.6494661921708185, |
|
"grad_norm": 4.033677351470828, |
|
"learning_rate": 7.616066613041977e-06, |
|
"loss": 0.1955, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.6512455516014235, |
|
"grad_norm": 3.8528810719351503, |
|
"learning_rate": 7.6041468426886785e-06, |
|
"loss": 0.2236, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.6530249110320284, |
|
"grad_norm": 3.733237889005623, |
|
"learning_rate": 7.592206728524507e-06, |
|
"loss": 0.1403, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.6548042704626335, |
|
"grad_norm": 3.999047222112807, |
|
"learning_rate": 7.580246363826621e-06, |
|
"loss": 0.2104, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.6565836298932385, |
|
"grad_norm": 4.959098651849355, |
|
"learning_rate": 7.568265842030381e-06, |
|
"loss": 0.2771, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.6583629893238434, |
|
"grad_norm": 3.7433121911612326, |
|
"learning_rate": 7.556265256728618e-06, |
|
"loss": 0.1807, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.6601423487544484, |
|
"grad_norm": 4.166049204343448, |
|
"learning_rate": 7.544244701670894e-06, |
|
"loss": 0.2341, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.6619217081850534, |
|
"grad_norm": 3.9433031295400167, |
|
"learning_rate": 7.532204270762786e-06, |
|
"loss": 0.1638, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.6637010676156584, |
|
"grad_norm": 4.013127261714967, |
|
"learning_rate": 7.520144058065133e-06, |
|
"loss": 0.1793, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.6654804270462633, |
|
"grad_norm": 3.9937927893942415, |
|
"learning_rate": 7.50806415779332e-06, |
|
"loss": 0.1765, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.6672597864768683, |
|
"grad_norm": 4.742948835352874, |
|
"learning_rate": 7.495964664316525e-06, |
|
"loss": 0.2014, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.6690391459074733, |
|
"grad_norm": 4.2302099313858275, |
|
"learning_rate": 7.4838456721569975e-06, |
|
"loss": 0.2504, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.6708185053380783, |
|
"grad_norm": 4.1489244786896196, |
|
"learning_rate": 7.471707275989304e-06, |
|
"loss": 0.1982, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.6725978647686833, |
|
"grad_norm": 4.332242489417489, |
|
"learning_rate": 7.459549570639602e-06, |
|
"loss": 0.176, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.6743772241992882, |
|
"grad_norm": 4.36468305351184, |
|
"learning_rate": 7.447372651084896e-06, |
|
"loss": 0.188, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.6761565836298933, |
|
"grad_norm": 3.288841989104811, |
|
"learning_rate": 7.435176612452286e-06, |
|
"loss": 0.1352, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.6779359430604982, |
|
"grad_norm": 3.8168771377187283, |
|
"learning_rate": 7.4229615500182396e-06, |
|
"loss": 0.2011, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.6797153024911032, |
|
"grad_norm": 3.836392598267191, |
|
"learning_rate": 7.4107275592078345e-06, |
|
"loss": 0.2121, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.6814946619217082, |
|
"grad_norm": 3.518134307158721, |
|
"learning_rate": 7.398474735594022e-06, |
|
"loss": 0.2109, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.6832740213523132, |
|
"grad_norm": 5.379902523298636, |
|
"learning_rate": 7.386203174896872e-06, |
|
"loss": 0.2396, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.6850533807829181, |
|
"grad_norm": 4.06342177947747, |
|
"learning_rate": 7.373912972982838e-06, |
|
"loss": 0.1863, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.6868327402135231, |
|
"grad_norm": 3.383418777796471, |
|
"learning_rate": 7.361604225863992e-06, |
|
"loss": 0.167, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.6886120996441281, |
|
"grad_norm": 4.471321334903843, |
|
"learning_rate": 7.349277029697287e-06, |
|
"loss": 0.2295, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.6903914590747331, |
|
"grad_norm": 3.851104743800462, |
|
"learning_rate": 7.336931480783801e-06, |
|
"loss": 0.1991, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.6921708185053381, |
|
"grad_norm": 3.9506039879692207, |
|
"learning_rate": 7.3245676755679854e-06, |
|
"loss": 0.1693, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.693950177935943, |
|
"grad_norm": 5.004328469240587, |
|
"learning_rate": 7.312185710636911e-06, |
|
"loss": 0.2522, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.6957295373665481, |
|
"grad_norm": 3.6950236172760946, |
|
"learning_rate": 7.299785682719512e-06, |
|
"loss": 0.1557, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.697508896797153, |
|
"grad_norm": 3.8879425444834346, |
|
"learning_rate": 7.287367688685835e-06, |
|
"loss": 0.1887, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.699288256227758, |
|
"grad_norm": 5.106676807320218, |
|
"learning_rate": 7.274931825546279e-06, |
|
"loss": 0.1992, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.701067615658363, |
|
"grad_norm": 4.080598447205462, |
|
"learning_rate": 7.262478190450834e-06, |
|
"loss": 0.1663, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.702846975088968, |
|
"grad_norm": 4.351318647512446, |
|
"learning_rate": 7.250006880688332e-06, |
|
"loss": 0.2579, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.7046263345195729, |
|
"grad_norm": 4.6947093387412036, |
|
"learning_rate": 7.2375179936856775e-06, |
|
"loss": 0.1948, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.7064056939501779, |
|
"grad_norm": 4.140636409548973, |
|
"learning_rate": 7.22501162700709e-06, |
|
"loss": 0.1846, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.708185053380783, |
|
"grad_norm": 3.731560696614587, |
|
"learning_rate": 7.21248787835334e-06, |
|
"loss": 0.1525, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.7099644128113879, |
|
"grad_norm": 3.8799475775859635, |
|
"learning_rate": 7.199946845560994e-06, |
|
"loss": 0.1849, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.7117437722419929, |
|
"grad_norm": 4.713978299468851, |
|
"learning_rate": 7.1873886266016365e-06, |
|
"loss": 0.2578, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.7117437722419929, |
|
"eval_loss": 0.2340700775384903, |
|
"eval_runtime": 1.5654, |
|
"eval_samples_per_second": 29.386, |
|
"eval_steps_per_second": 7.666, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.7135231316725978, |
|
"grad_norm": 3.6496685254189867, |
|
"learning_rate": 7.174813319581115e-06, |
|
"loss": 0.2097, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.7153024911032029, |
|
"grad_norm": 5.288556189119427, |
|
"learning_rate": 7.162221022738768e-06, |
|
"loss": 0.2215, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.7170818505338078, |
|
"grad_norm": 3.9917934976051765, |
|
"learning_rate": 7.149611834446664e-06, |
|
"loss": 0.2008, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.7188612099644128, |
|
"grad_norm": 5.024515362108062, |
|
"learning_rate": 7.136985853208824e-06, |
|
"loss": 0.2528, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.7206405693950177, |
|
"grad_norm": 4.995206628268624, |
|
"learning_rate": 7.124343177660462e-06, |
|
"loss": 0.1984, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.7224199288256228, |
|
"grad_norm": 4.945850645061519, |
|
"learning_rate": 7.111683906567206e-06, |
|
"loss": 0.216, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.7241992882562278, |
|
"grad_norm": 4.9111963446312545, |
|
"learning_rate": 7.099008138824329e-06, |
|
"loss": 0.2253, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.7259786476868327, |
|
"grad_norm": 3.951343555006625, |
|
"learning_rate": 7.086315973455982e-06, |
|
"loss": 0.2077, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.7277580071174378, |
|
"grad_norm": 3.4012995032614715, |
|
"learning_rate": 7.0736075096144084e-06, |
|
"loss": 0.1586, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.7295373665480427, |
|
"grad_norm": 3.5670973873719385, |
|
"learning_rate": 7.060882846579182e-06, |
|
"loss": 0.171, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.7313167259786477, |
|
"grad_norm": 3.686464320358478, |
|
"learning_rate": 7.048142083756427e-06, |
|
"loss": 0.1858, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.7330960854092526, |
|
"grad_norm": 4.017554681669126, |
|
"learning_rate": 7.035385320678035e-06, |
|
"loss": 0.1623, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.7348754448398577, |
|
"grad_norm": 4.981499883300648, |
|
"learning_rate": 7.022612657000898e-06, |
|
"loss": 0.35, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.7366548042704626, |
|
"grad_norm": 3.7993275801386437, |
|
"learning_rate": 7.0098241925061215e-06, |
|
"loss": 0.1882, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.7384341637010676, |
|
"grad_norm": 4.566185325953405, |
|
"learning_rate": 6.997020027098249e-06, |
|
"loss": 0.1816, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.7402135231316725, |
|
"grad_norm": 4.506376816655555, |
|
"learning_rate": 6.9842002608044844e-06, |
|
"loss": 0.2133, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.7419928825622776, |
|
"grad_norm": 3.9508225107534622, |
|
"learning_rate": 6.971364993773901e-06, |
|
"loss": 0.1882, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.7437722419928826, |
|
"grad_norm": 3.6711278466391426, |
|
"learning_rate": 6.958514326276669e-06, |
|
"loss": 0.1839, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.7455516014234875, |
|
"grad_norm": 3.9368569025101503, |
|
"learning_rate": 6.945648358703269e-06, |
|
"loss": 0.2023, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.7473309608540926, |
|
"grad_norm": 3.3271128259281717, |
|
"learning_rate": 6.932767191563703e-06, |
|
"loss": 0.214, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.7491103202846975, |
|
"grad_norm": 3.6457121136012667, |
|
"learning_rate": 6.919870925486718e-06, |
|
"loss": 0.157, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.7508896797153025, |
|
"grad_norm": 3.5045906708066936, |
|
"learning_rate": 6.906959661219011e-06, |
|
"loss": 0.1929, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.7526690391459074, |
|
"grad_norm": 3.6117649722075176, |
|
"learning_rate": 6.8940334996244505e-06, |
|
"loss": 0.1687, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.7544483985765125, |
|
"grad_norm": 3.6332976094124727, |
|
"learning_rate": 6.881092541683279e-06, |
|
"loss": 0.1559, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.7562277580071174, |
|
"grad_norm": 3.6772967507015064, |
|
"learning_rate": 6.8681368884913345e-06, |
|
"loss": 0.1484, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.7580071174377224, |
|
"grad_norm": 3.6369199809423973, |
|
"learning_rate": 6.855166641259252e-06, |
|
"loss": 0.2512, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.7597864768683275, |
|
"grad_norm": 3.241952316665325, |
|
"learning_rate": 6.8421819013116766e-06, |
|
"loss": 0.184, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.7615658362989324, |
|
"grad_norm": 4.3873942878269405, |
|
"learning_rate": 6.829182770086474e-06, |
|
"loss": 0.2732, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.7633451957295374, |
|
"grad_norm": 3.7819721755233653, |
|
"learning_rate": 6.816169349133934e-06, |
|
"loss": 0.178, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.7651245551601423, |
|
"grad_norm": 4.022516512828127, |
|
"learning_rate": 6.803141740115979e-06, |
|
"loss": 0.1589, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.7669039145907474, |
|
"grad_norm": 5.973943620799343, |
|
"learning_rate": 6.7901000448053676e-06, |
|
"loss": 0.2736, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.7686832740213523, |
|
"grad_norm": 3.814805117431919, |
|
"learning_rate": 6.777044365084907e-06, |
|
"loss": 0.1735, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.7704626334519573, |
|
"grad_norm": 3.7038062837436465, |
|
"learning_rate": 6.763974802946649e-06, |
|
"loss": 0.1557, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.7722419928825622, |
|
"grad_norm": 6.330212892571082, |
|
"learning_rate": 6.750891460491093e-06, |
|
"loss": 0.2607, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.7740213523131673, |
|
"grad_norm": 4.545891435234668, |
|
"learning_rate": 6.737794439926395e-06, |
|
"loss": 0.2409, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.7758007117437722, |
|
"grad_norm": 3.421436423226525, |
|
"learning_rate": 6.724683843567567e-06, |
|
"loss": 0.159, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.7775800711743772, |
|
"grad_norm": 4.378590951071992, |
|
"learning_rate": 6.711559773835672e-06, |
|
"loss": 0.1933, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.7793594306049823, |
|
"grad_norm": 3.766475545347043, |
|
"learning_rate": 6.69842233325703e-06, |
|
"loss": 0.1961, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.7811387900355872, |
|
"grad_norm": 4.752339560522412, |
|
"learning_rate": 6.685271624462416e-06, |
|
"loss": 0.2865, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.7829181494661922, |
|
"grad_norm": 3.8106736048589513, |
|
"learning_rate": 6.672107750186255e-06, |
|
"loss": 0.1675, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.7846975088967971, |
|
"grad_norm": 3.0046563453225184, |
|
"learning_rate": 6.658930813265825e-06, |
|
"loss": 0.1456, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.7864768683274022, |
|
"grad_norm": 3.61460394569828, |
|
"learning_rate": 6.645740916640449e-06, |
|
"loss": 0.1725, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.7882562277580071, |
|
"grad_norm": 3.4723587643625944, |
|
"learning_rate": 6.63253816335069e-06, |
|
"loss": 0.1489, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.7900355871886121, |
|
"grad_norm": 3.7796814790135653, |
|
"learning_rate": 6.619322656537552e-06, |
|
"loss": 0.1754, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.791814946619217, |
|
"grad_norm": 3.651504128862347, |
|
"learning_rate": 6.606094499441671e-06, |
|
"loss": 0.1931, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.7935943060498221, |
|
"grad_norm": 4.350687161041938, |
|
"learning_rate": 6.592853795402502e-06, |
|
"loss": 0.2346, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.7953736654804271, |
|
"grad_norm": 3.866200479414623, |
|
"learning_rate": 6.579600647857525e-06, |
|
"loss": 0.1607, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.797153024911032, |
|
"grad_norm": 5.01890885513813, |
|
"learning_rate": 6.566335160341425e-06, |
|
"loss": 0.2207, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.798932384341637, |
|
"grad_norm": 3.1974367495653357, |
|
"learning_rate": 6.553057436485289e-06, |
|
"loss": 0.1711, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.800711743772242, |
|
"grad_norm": 3.0582960031855397, |
|
"learning_rate": 6.539767580015799e-06, |
|
"loss": 0.1993, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.802491103202847, |
|
"grad_norm": 4.819643555652815, |
|
"learning_rate": 6.52646569475441e-06, |
|
"loss": 0.1661, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.8042704626334519, |
|
"grad_norm": 4.65088488978281, |
|
"learning_rate": 6.513151884616556e-06, |
|
"loss": 0.2663, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.806049822064057, |
|
"grad_norm": 4.96303005188114, |
|
"learning_rate": 6.499826253610823e-06, |
|
"loss": 0.2116, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.8078291814946619, |
|
"grad_norm": 4.075945262502351, |
|
"learning_rate": 6.486488905838143e-06, |
|
"loss": 0.1576, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.8096085409252669, |
|
"grad_norm": 3.769243754610397, |
|
"learning_rate": 6.473139945490984e-06, |
|
"loss": 0.157, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.8113879003558719, |
|
"grad_norm": 4.184219301093068, |
|
"learning_rate": 6.459779476852528e-06, |
|
"loss": 0.1832, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.8131672597864769, |
|
"grad_norm": 3.5258435361998326, |
|
"learning_rate": 6.446407604295863e-06, |
|
"loss": 0.1835, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.8149466192170819, |
|
"grad_norm": 3.5157230217368705, |
|
"learning_rate": 6.433024432283169e-06, |
|
"loss": 0.1754, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.8167259786476868, |
|
"grad_norm": 4.504248486459894, |
|
"learning_rate": 6.41963006536489e-06, |
|
"loss": 0.218, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.8185053380782918, |
|
"grad_norm": 3.7027478194830614, |
|
"learning_rate": 6.4062246081789316e-06, |
|
"loss": 0.1823, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.8202846975088968, |
|
"grad_norm": 3.340084191421797, |
|
"learning_rate": 6.392808165449836e-06, |
|
"loss": 0.1425, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.8220640569395018, |
|
"grad_norm": 3.450980506799201, |
|
"learning_rate": 6.379380841987965e-06, |
|
"loss": 0.2061, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.8238434163701067, |
|
"grad_norm": 3.897286916273504, |
|
"learning_rate": 6.365942742688684e-06, |
|
"loss": 0.1853, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.8256227758007118, |
|
"grad_norm": 4.18369657459111, |
|
"learning_rate": 6.352493972531535e-06, |
|
"loss": 0.2307, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.8274021352313167, |
|
"grad_norm": 4.672191933418133, |
|
"learning_rate": 6.339034636579425e-06, |
|
"loss": 0.227, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.8291814946619217, |
|
"grad_norm": 3.450194318798735, |
|
"learning_rate": 6.325564839977802e-06, |
|
"loss": 0.1511, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.8309608540925267, |
|
"grad_norm": 3.492012703279857, |
|
"learning_rate": 6.312084687953835e-06, |
|
"loss": 0.184, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.8327402135231317, |
|
"grad_norm": 4.849147571765675, |
|
"learning_rate": 6.298594285815585e-06, |
|
"loss": 0.2256, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.8345195729537367, |
|
"grad_norm": 3.9407179877473917, |
|
"learning_rate": 6.2850937389511936e-06, |
|
"loss": 0.2376, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.8362989323843416, |
|
"grad_norm": 3.7664412947901607, |
|
"learning_rate": 6.271583152828049e-06, |
|
"loss": 0.1589, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.8380782918149466, |
|
"grad_norm": 3.965817253746303, |
|
"learning_rate": 6.258062632991972e-06, |
|
"loss": 0.1427, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.8398576512455516, |
|
"grad_norm": 3.028889343395467, |
|
"learning_rate": 6.244532285066382e-06, |
|
"loss": 0.1363, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.8416370106761566, |
|
"grad_norm": 3.5748346929325137, |
|
"learning_rate": 6.2309922147514775e-06, |
|
"loss": 0.1991, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.8434163701067615, |
|
"grad_norm": 4.245901230048077, |
|
"learning_rate": 6.2174425278234115e-06, |
|
"loss": 0.2349, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.8451957295373665, |
|
"grad_norm": 4.789682966236731, |
|
"learning_rate": 6.20388333013346e-06, |
|
"loss": 0.1713, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.8469750889679716, |
|
"grad_norm": 3.928085695655477, |
|
"learning_rate": 6.190314727607196e-06, |
|
"loss": 0.2071, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.8487544483985765, |
|
"grad_norm": 3.6285598651374618, |
|
"learning_rate": 6.176736826243671e-06, |
|
"loss": 0.1704, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.8505338078291815, |
|
"grad_norm": 4.563874914050937, |
|
"learning_rate": 6.163149732114571e-06, |
|
"loss": 0.2371, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.8523131672597865, |
|
"grad_norm": 4.703796673716383, |
|
"learning_rate": 6.149553551363404e-06, |
|
"loss": 0.2586, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.8540925266903915, |
|
"grad_norm": 3.9084499001640345, |
|
"learning_rate": 6.1359483902046605e-06, |
|
"loss": 0.1849, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.8558718861209964, |
|
"grad_norm": 3.6848393410794094, |
|
"learning_rate": 6.122334354922984e-06, |
|
"loss": 0.1895, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.8576512455516014, |
|
"grad_norm": 3.8088090680289683, |
|
"learning_rate": 6.108711551872347e-06, |
|
"loss": 0.2111, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.8594306049822064, |
|
"grad_norm": 5.967596044601333, |
|
"learning_rate": 6.095080087475218e-06, |
|
"loss": 0.2373, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.8612099644128114, |
|
"grad_norm": 4.017572776284846, |
|
"learning_rate": 6.0814400682217236e-06, |
|
"loss": 0.2086, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.8629893238434164, |
|
"grad_norm": 3.596728238351977, |
|
"learning_rate": 6.067791600668823e-06, |
|
"loss": 0.1363, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.8647686832740213, |
|
"grad_norm": 4.582135519199611, |
|
"learning_rate": 6.054134791439479e-06, |
|
"loss": 0.1716, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.8665480427046264, |
|
"grad_norm": 4.299362907106527, |
|
"learning_rate": 6.040469747221815e-06, |
|
"loss": 0.173, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.8683274021352313, |
|
"grad_norm": 3.976188825048674, |
|
"learning_rate": 6.026796574768288e-06, |
|
"loss": 0.1578, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.8701067615658363, |
|
"grad_norm": 3.821450021408301, |
|
"learning_rate": 6.013115380894854e-06, |
|
"loss": 0.1661, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.8718861209964412, |
|
"grad_norm": 3.3944212334816313, |
|
"learning_rate": 5.999426272480133e-06, |
|
"loss": 0.1548, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.8736654804270463, |
|
"grad_norm": 3.8031511108691007, |
|
"learning_rate": 5.985729356464575e-06, |
|
"loss": 0.2172, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.8754448398576512, |
|
"grad_norm": 3.0821444996717666, |
|
"learning_rate": 5.972024739849622e-06, |
|
"loss": 0.1727, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.8772241992882562, |
|
"grad_norm": 3.2573579758744087, |
|
"learning_rate": 5.958312529696874e-06, |
|
"loss": 0.1581, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.8790035587188612, |
|
"grad_norm": 3.9430043586960455, |
|
"learning_rate": 5.944592833127253e-06, |
|
"loss": 0.2317, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.8807829181494662, |
|
"grad_norm": 3.5817698139529934, |
|
"learning_rate": 5.9308657573201645e-06, |
|
"loss": 0.1967, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.8825622775800712, |
|
"grad_norm": 3.939496922647276, |
|
"learning_rate": 5.917131409512663e-06, |
|
"loss": 0.1828, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.8843416370106761, |
|
"grad_norm": 3.629068610935976, |
|
"learning_rate": 5.903389896998611e-06, |
|
"loss": 0.1639, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.8861209964412812, |
|
"grad_norm": 4.341749456609039, |
|
"learning_rate": 5.889641327127843e-06, |
|
"loss": 0.2159, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.8879003558718861, |
|
"grad_norm": 3.8747758549062645, |
|
"learning_rate": 5.875885807305326e-06, |
|
"loss": 0.1876, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.8896797153024911, |
|
"grad_norm": 3.564522736734071, |
|
"learning_rate": 5.862123444990319e-06, |
|
"loss": 0.1837, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.891459074733096, |
|
"grad_norm": 4.174444406411341, |
|
"learning_rate": 5.848354347695537e-06, |
|
"loss": 0.3298, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 0.8932384341637011, |
|
"grad_norm": 4.426841525563123, |
|
"learning_rate": 5.83457862298631e-06, |
|
"loss": 0.2582, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 0.895017793594306, |
|
"grad_norm": 4.435783380290895, |
|
"learning_rate": 5.8207963784797396e-06, |
|
"loss": 0.162, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 0.896797153024911, |
|
"grad_norm": 3.964608455615552, |
|
"learning_rate": 5.807007721843862e-06, |
|
"loss": 0.2296, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.8985765124555161, |
|
"grad_norm": 5.15622680492661, |
|
"learning_rate": 5.793212760796804e-06, |
|
"loss": 0.269, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.900355871886121, |
|
"grad_norm": 3.99440044862334, |
|
"learning_rate": 5.779411603105947e-06, |
|
"loss": 0.2008, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 0.902135231316726, |
|
"grad_norm": 3.712933692465381, |
|
"learning_rate": 5.765604356587076e-06, |
|
"loss": 0.1635, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 0.9039145907473309, |
|
"grad_norm": 3.985724780857063, |
|
"learning_rate": 5.751791129103545e-06, |
|
"loss": 0.1877, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 0.905693950177936, |
|
"grad_norm": 3.8492903866750474, |
|
"learning_rate": 5.737972028565431e-06, |
|
"loss": 0.1875, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 0.9074733096085409, |
|
"grad_norm": 4.104607656744395, |
|
"learning_rate": 5.7241471629286934e-06, |
|
"loss": 0.1967, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.9092526690391459, |
|
"grad_norm": 4.326137252164571, |
|
"learning_rate": 5.7103166401943276e-06, |
|
"loss": 0.2051, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 0.9110320284697508, |
|
"grad_norm": 4.444509312766475, |
|
"learning_rate": 5.696480568407523e-06, |
|
"loss": 0.1766, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.9128113879003559, |
|
"grad_norm": 3.967771805282229, |
|
"learning_rate": 5.682639055656817e-06, |
|
"loss": 0.2663, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 0.9145907473309609, |
|
"grad_norm": 4.419525843396819, |
|
"learning_rate": 5.668792210073255e-06, |
|
"loss": 0.1985, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 0.9163701067615658, |
|
"grad_norm": 3.9201395538983994, |
|
"learning_rate": 5.654940139829544e-06, |
|
"loss": 0.1785, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.9181494661921709, |
|
"grad_norm": 4.0031126745227965, |
|
"learning_rate": 5.641082953139201e-06, |
|
"loss": 0.1624, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 0.9199288256227758, |
|
"grad_norm": 3.9325720439807124, |
|
"learning_rate": 5.6272207582557195e-06, |
|
"loss": 0.2, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 0.9217081850533808, |
|
"grad_norm": 4.09915288572735, |
|
"learning_rate": 5.61335366347171e-06, |
|
"loss": 0.1709, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 0.9234875444839857, |
|
"grad_norm": 4.684286007560391, |
|
"learning_rate": 5.599481777118071e-06, |
|
"loss": 0.2038, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 0.9252669039145908, |
|
"grad_norm": 4.465530597198707, |
|
"learning_rate": 5.585605207563124e-06, |
|
"loss": 0.1576, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.9270462633451957, |
|
"grad_norm": 3.746540098915994, |
|
"learning_rate": 5.571724063211782e-06, |
|
"loss": 0.1782, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 0.9288256227758007, |
|
"grad_norm": 3.049757596570069, |
|
"learning_rate": 5.557838452504692e-06, |
|
"loss": 0.1168, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 0.9306049822064056, |
|
"grad_norm": 4.9621953921462465, |
|
"learning_rate": 5.5439484839173996e-06, |
|
"loss": 0.2182, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 0.9323843416370107, |
|
"grad_norm": 3.6303955245624793, |
|
"learning_rate": 5.530054265959486e-06, |
|
"loss": 0.1737, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 0.9341637010676157, |
|
"grad_norm": 4.2294079901381885, |
|
"learning_rate": 5.516155907173735e-06, |
|
"loss": 0.2238, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.9359430604982206, |
|
"grad_norm": 3.4961117366273617, |
|
"learning_rate": 5.5022535161352764e-06, |
|
"loss": 0.2073, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 0.9377224199288257, |
|
"grad_norm": 4.8917009931209305, |
|
"learning_rate": 5.488347201450741e-06, |
|
"loss": 0.1776, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 0.9395017793594306, |
|
"grad_norm": 3.814223945081305, |
|
"learning_rate": 5.47443707175741e-06, |
|
"loss": 0.1805, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 0.9412811387900356, |
|
"grad_norm": 3.627012763290324, |
|
"learning_rate": 5.46052323572237e-06, |
|
"loss": 0.1185, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 0.9430604982206405, |
|
"grad_norm": 4.104840444004774, |
|
"learning_rate": 5.446605802041662e-06, |
|
"loss": 0.1906, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.9448398576512456, |
|
"grad_norm": 3.493323034572416, |
|
"learning_rate": 5.432684879439428e-06, |
|
"loss": 0.1493, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 0.9466192170818505, |
|
"grad_norm": 3.7879944613748737, |
|
"learning_rate": 5.418760576667071e-06, |
|
"loss": 0.127, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 0.9483985765124555, |
|
"grad_norm": 4.161143776337595, |
|
"learning_rate": 5.404833002502398e-06, |
|
"loss": 0.2198, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 0.9501779359430605, |
|
"grad_norm": 5.642088111923067, |
|
"learning_rate": 5.39090226574877e-06, |
|
"loss": 0.2459, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 0.9519572953736655, |
|
"grad_norm": 4.499024766017599, |
|
"learning_rate": 5.376968475234258e-06, |
|
"loss": 0.2055, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.9537366548042705, |
|
"grad_norm": 3.295995243380386, |
|
"learning_rate": 5.363031739810787e-06, |
|
"loss": 0.1391, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 0.9555160142348754, |
|
"grad_norm": 3.665476066518449, |
|
"learning_rate": 5.349092168353291e-06, |
|
"loss": 0.1661, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 0.9572953736654805, |
|
"grad_norm": 3.9631175894124175, |
|
"learning_rate": 5.335149869758855e-06, |
|
"loss": 0.178, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 0.9590747330960854, |
|
"grad_norm": 3.5415647072506844, |
|
"learning_rate": 5.32120495294587e-06, |
|
"loss": 0.187, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 0.9608540925266904, |
|
"grad_norm": 3.31449893002666, |
|
"learning_rate": 5.3072575268531835e-06, |
|
"loss": 0.179, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.9626334519572953, |
|
"grad_norm": 4.32823684237884, |
|
"learning_rate": 5.293307700439242e-06, |
|
"loss": 0.2262, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 0.9644128113879004, |
|
"grad_norm": 4.93998070999509, |
|
"learning_rate": 5.2793555826812456e-06, |
|
"loss": 0.1963, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 0.9661921708185054, |
|
"grad_norm": 4.08306884913846, |
|
"learning_rate": 5.265401282574294e-06, |
|
"loss": 0.1587, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 0.9679715302491103, |
|
"grad_norm": 4.0836293577079275, |
|
"learning_rate": 5.2514449091305375e-06, |
|
"loss": 0.1673, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 0.9697508896797153, |
|
"grad_norm": 4.025652129769559, |
|
"learning_rate": 5.237486571378317e-06, |
|
"loss": 0.1779, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.9715302491103203, |
|
"grad_norm": 3.8769462652013362, |
|
"learning_rate": 5.22352637836133e-06, |
|
"loss": 0.1613, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 0.9733096085409253, |
|
"grad_norm": 3.3008221956749835, |
|
"learning_rate": 5.209564439137755e-06, |
|
"loss": 0.1229, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 0.9750889679715302, |
|
"grad_norm": 4.779142137038802, |
|
"learning_rate": 5.195600862779421e-06, |
|
"loss": 0.2438, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 0.9768683274021353, |
|
"grad_norm": 3.947323165726393, |
|
"learning_rate": 5.181635758370942e-06, |
|
"loss": 0.2021, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 0.9786476868327402, |
|
"grad_norm": 4.841592450454382, |
|
"learning_rate": 5.167669235008871e-06, |
|
"loss": 0.2274, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.9804270462633452, |
|
"grad_norm": 4.857867078626577, |
|
"learning_rate": 5.153701401800845e-06, |
|
"loss": 0.2359, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 0.9822064056939501, |
|
"grad_norm": 4.101558495297293, |
|
"learning_rate": 5.139732367864736e-06, |
|
"loss": 0.2214, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 0.9839857651245552, |
|
"grad_norm": 3.638094261102884, |
|
"learning_rate": 5.1257622423277934e-06, |
|
"loss": 0.1287, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 0.9857651245551602, |
|
"grad_norm": 4.062911931577164, |
|
"learning_rate": 5.111791134325793e-06, |
|
"loss": 0.2079, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 0.9875444839857651, |
|
"grad_norm": 4.159267140826344, |
|
"learning_rate": 5.097819153002192e-06, |
|
"loss": 0.212, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.9893238434163701, |
|
"grad_norm": 3.423865413678623, |
|
"learning_rate": 5.083846407507263e-06, |
|
"loss": 0.1253, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 0.9911032028469751, |
|
"grad_norm": 4.261727702503341, |
|
"learning_rate": 5.0698730069972535e-06, |
|
"loss": 0.2019, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 0.9928825622775801, |
|
"grad_norm": 2.986514833860016, |
|
"learning_rate": 5.055899060633524e-06, |
|
"loss": 0.1351, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 0.994661921708185, |
|
"grad_norm": 3.6506561084702502, |
|
"learning_rate": 5.041924677581702e-06, |
|
"loss": 0.2003, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 0.99644128113879, |
|
"grad_norm": 3.871475607640716, |
|
"learning_rate": 5.0279499670108245e-06, |
|
"loss": 0.1837, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.998220640569395, |
|
"grad_norm": 3.7826231960004044, |
|
"learning_rate": 5.013975038092491e-06, |
|
"loss": 0.1525, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 3.2199040971389916, |
|
"learning_rate": 5e-06, |
|
"loss": 0.1127, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 1.001779359430605, |
|
"grad_norm": 3.2423619154930763, |
|
"learning_rate": 4.98602496190751e-06, |
|
"loss": 0.1214, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 1.00355871886121, |
|
"grad_norm": 2.4378750066443295, |
|
"learning_rate": 4.9720500329891755e-06, |
|
"loss": 0.0947, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 1.0053380782918149, |
|
"grad_norm": 3.2923620733221455, |
|
"learning_rate": 4.9580753224183005e-06, |
|
"loss": 0.1281, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 1.00711743772242, |
|
"grad_norm": 2.8065763224049083, |
|
"learning_rate": 4.944100939366478e-06, |
|
"loss": 0.0906, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 1.008896797153025, |
|
"grad_norm": 3.254360971936307, |
|
"learning_rate": 4.930126993002748e-06, |
|
"loss": 0.1124, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 1.01067615658363, |
|
"grad_norm": 2.3517447959696365, |
|
"learning_rate": 4.9161535924927375e-06, |
|
"loss": 0.0781, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 1.0124555160142348, |
|
"grad_norm": 2.5346060875216656, |
|
"learning_rate": 4.90218084699781e-06, |
|
"loss": 0.0768, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 1.0142348754448398, |
|
"grad_norm": 3.575243449601108, |
|
"learning_rate": 4.888208865674208e-06, |
|
"loss": 0.1091, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.0160142348754448, |
|
"grad_norm": 3.368218340821098, |
|
"learning_rate": 4.874237757672209e-06, |
|
"loss": 0.1074, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 1.0177935943060499, |
|
"grad_norm": 3.2681082074950605, |
|
"learning_rate": 4.8602676321352646e-06, |
|
"loss": 0.1094, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 1.019572953736655, |
|
"grad_norm": 3.2548895434638556, |
|
"learning_rate": 4.846298598199155e-06, |
|
"loss": 0.0844, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 1.0213523131672597, |
|
"grad_norm": 3.38580186332175, |
|
"learning_rate": 4.832330764991131e-06, |
|
"loss": 0.0896, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 1.0231316725978647, |
|
"grad_norm": 3.211544649175006, |
|
"learning_rate": 4.81836424162906e-06, |
|
"loss": 0.1118, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 1.0249110320284698, |
|
"grad_norm": 3.662224663538181, |
|
"learning_rate": 4.80439913722058e-06, |
|
"loss": 0.1051, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 1.0266903914590748, |
|
"grad_norm": 3.759015012374257, |
|
"learning_rate": 4.790435560862247e-06, |
|
"loss": 0.1496, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 1.0284697508896796, |
|
"grad_norm": 4.577866370446059, |
|
"learning_rate": 4.776473621638673e-06, |
|
"loss": 0.0977, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 1.0302491103202847, |
|
"grad_norm": 3.828109545031818, |
|
"learning_rate": 4.762513428621684e-06, |
|
"loss": 0.0871, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 1.0320284697508897, |
|
"grad_norm": 4.703256082172063, |
|
"learning_rate": 4.748555090869464e-06, |
|
"loss": 0.0929, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.0338078291814947, |
|
"grad_norm": 3.5581872144242563, |
|
"learning_rate": 4.734598717425706e-06, |
|
"loss": 0.091, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 1.0355871886120998, |
|
"grad_norm": 5.24727862037066, |
|
"learning_rate": 4.720644417318755e-06, |
|
"loss": 0.1225, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 1.0373665480427046, |
|
"grad_norm": 4.556442457822358, |
|
"learning_rate": 4.70669229956076e-06, |
|
"loss": 0.0789, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 1.0391459074733096, |
|
"grad_norm": 4.233428870632521, |
|
"learning_rate": 4.692742473146818e-06, |
|
"loss": 0.1067, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 1.0409252669039146, |
|
"grad_norm": 3.829864181706009, |
|
"learning_rate": 4.678795047054131e-06, |
|
"loss": 0.1275, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 1.0427046263345197, |
|
"grad_norm": 4.354659510164847, |
|
"learning_rate": 4.664850130241146e-06, |
|
"loss": 0.0772, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 1.0444839857651245, |
|
"grad_norm": 3.150028952780218, |
|
"learning_rate": 4.650907831646711e-06, |
|
"loss": 0.0687, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 1.0462633451957295, |
|
"grad_norm": 3.378680021945315, |
|
"learning_rate": 4.636968260189214e-06, |
|
"loss": 0.1242, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 1.0480427046263345, |
|
"grad_norm": 4.061865457068288, |
|
"learning_rate": 4.623031524765744e-06, |
|
"loss": 0.0961, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 1.0498220640569396, |
|
"grad_norm": 3.650064375865568, |
|
"learning_rate": 4.609097734251231e-06, |
|
"loss": 0.0798, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.0516014234875444, |
|
"grad_norm": 4.573883314884186, |
|
"learning_rate": 4.595166997497605e-06, |
|
"loss": 0.0862, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 1.0533807829181494, |
|
"grad_norm": 3.5182403920295875, |
|
"learning_rate": 4.58123942333293e-06, |
|
"loss": 0.0889, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 1.0551601423487544, |
|
"grad_norm": 3.3887932917797317, |
|
"learning_rate": 4.567315120560573e-06, |
|
"loss": 0.0896, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 1.0569395017793595, |
|
"grad_norm": 2.960384477470443, |
|
"learning_rate": 4.553394197958339e-06, |
|
"loss": 0.07, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 1.0587188612099645, |
|
"grad_norm": 4.02120462079333, |
|
"learning_rate": 4.539476764277631e-06, |
|
"loss": 0.0923, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 1.0604982206405693, |
|
"grad_norm": 3.1487123682951648, |
|
"learning_rate": 4.525562928242592e-06, |
|
"loss": 0.0844, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 1.0622775800711743, |
|
"grad_norm": 4.109455241572257, |
|
"learning_rate": 4.511652798549261e-06, |
|
"loss": 0.1052, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 1.0640569395017794, |
|
"grad_norm": 3.3119269447488704, |
|
"learning_rate": 4.497746483864725e-06, |
|
"loss": 0.0734, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 1.0658362989323844, |
|
"grad_norm": 3.393803278133145, |
|
"learning_rate": 4.483844092826267e-06, |
|
"loss": 0.0732, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 1.0676156583629894, |
|
"grad_norm": 2.951451660336476, |
|
"learning_rate": 4.469945734040516e-06, |
|
"loss": 0.0644, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.0676156583629894, |
|
"eval_loss": 0.2324187010526657, |
|
"eval_runtime": 1.5698, |
|
"eval_samples_per_second": 29.304, |
|
"eval_steps_per_second": 7.644, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.0693950177935942, |
|
"grad_norm": 3.477132951500829, |
|
"learning_rate": 4.456051516082603e-06, |
|
"loss": 0.0941, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 1.0711743772241993, |
|
"grad_norm": 4.640038198155476, |
|
"learning_rate": 4.442161547495309e-06, |
|
"loss": 0.0938, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 1.0729537366548043, |
|
"grad_norm": 3.398559845802383, |
|
"learning_rate": 4.42827593678822e-06, |
|
"loss": 0.0693, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 1.0747330960854093, |
|
"grad_norm": 3.738110831321602, |
|
"learning_rate": 4.414394792436877e-06, |
|
"loss": 0.0821, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 1.0765124555160142, |
|
"grad_norm": 3.730030898739278, |
|
"learning_rate": 4.400518222881931e-06, |
|
"loss": 0.106, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 1.0782918149466192, |
|
"grad_norm": 3.8248051740980307, |
|
"learning_rate": 4.386646336528291e-06, |
|
"loss": 0.1241, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 1.0800711743772242, |
|
"grad_norm": 4.032099772712417, |
|
"learning_rate": 4.372779241744282e-06, |
|
"loss": 0.0749, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 1.0818505338078293, |
|
"grad_norm": 2.820672631217004, |
|
"learning_rate": 4.358917046860799e-06, |
|
"loss": 0.0703, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 1.083629893238434, |
|
"grad_norm": 3.927064366607658, |
|
"learning_rate": 4.345059860170458e-06, |
|
"loss": 0.1175, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 1.085409252669039, |
|
"grad_norm": 3.754826646717613, |
|
"learning_rate": 4.331207789926746e-06, |
|
"loss": 0.0802, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.0871886120996441, |
|
"grad_norm": 2.7341443213413865, |
|
"learning_rate": 4.317360944343184e-06, |
|
"loss": 0.0678, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 1.0889679715302492, |
|
"grad_norm": 2.878256222554803, |
|
"learning_rate": 4.303519431592479e-06, |
|
"loss": 0.0726, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 1.0907473309608542, |
|
"grad_norm": 4.246462278927262, |
|
"learning_rate": 4.289683359805673e-06, |
|
"loss": 0.1144, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 1.092526690391459, |
|
"grad_norm": 3.2216680291873083, |
|
"learning_rate": 4.275852837071309e-06, |
|
"loss": 0.0839, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 1.094306049822064, |
|
"grad_norm": 3.688166456713495, |
|
"learning_rate": 4.26202797143457e-06, |
|
"loss": 0.1006, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 1.096085409252669, |
|
"grad_norm": 4.080704786168854, |
|
"learning_rate": 4.248208870896456e-06, |
|
"loss": 0.1206, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 1.097864768683274, |
|
"grad_norm": 3.5121213561317046, |
|
"learning_rate": 4.234395643412925e-06, |
|
"loss": 0.0831, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 1.099644128113879, |
|
"grad_norm": 3.820860496051861, |
|
"learning_rate": 4.220588396894055e-06, |
|
"loss": 0.0739, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 1.101423487544484, |
|
"grad_norm": 3.5721251541134778, |
|
"learning_rate": 4.2067872392031965e-06, |
|
"loss": 0.0921, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 1.103202846975089, |
|
"grad_norm": 4.439059784001531, |
|
"learning_rate": 4.192992278156141e-06, |
|
"loss": 0.1122, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.104982206405694, |
|
"grad_norm": 3.696650029415793, |
|
"learning_rate": 4.179203621520262e-06, |
|
"loss": 0.1188, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 1.106761565836299, |
|
"grad_norm": 3.5087185398969964, |
|
"learning_rate": 4.165421377013691e-06, |
|
"loss": 0.074, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 1.1085409252669038, |
|
"grad_norm": 4.288968810833285, |
|
"learning_rate": 4.151645652304465e-06, |
|
"loss": 0.0782, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 1.1103202846975089, |
|
"grad_norm": 3.623701438561222, |
|
"learning_rate": 4.137876555009684e-06, |
|
"loss": 0.0966, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 1.112099644128114, |
|
"grad_norm": 3.3256536821990688, |
|
"learning_rate": 4.124114192694676e-06, |
|
"loss": 0.0792, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 1.113879003558719, |
|
"grad_norm": 3.8973244663256494, |
|
"learning_rate": 4.110358672872158e-06, |
|
"loss": 0.0984, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 1.1156583629893237, |
|
"grad_norm": 3.7590455311228674, |
|
"learning_rate": 4.0966101030013915e-06, |
|
"loss": 0.0928, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 1.1174377224199288, |
|
"grad_norm": 3.588282480981207, |
|
"learning_rate": 4.082868590487339e-06, |
|
"loss": 0.087, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 1.1192170818505338, |
|
"grad_norm": 2.6628110358136636, |
|
"learning_rate": 4.069134242679837e-06, |
|
"loss": 0.0698, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 1.1209964412811388, |
|
"grad_norm": 3.7301333427837986, |
|
"learning_rate": 4.055407166872748e-06, |
|
"loss": 0.0842, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.1227758007117439, |
|
"grad_norm": 3.600556749255139, |
|
"learning_rate": 4.041687470303127e-06, |
|
"loss": 0.1269, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 1.1245551601423487, |
|
"grad_norm": 3.1199071718036726, |
|
"learning_rate": 4.02797526015038e-06, |
|
"loss": 0.1055, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 1.1263345195729537, |
|
"grad_norm": 4.061652810473033, |
|
"learning_rate": 4.014270643535427e-06, |
|
"loss": 0.0996, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 1.1281138790035588, |
|
"grad_norm": 3.5697216701065333, |
|
"learning_rate": 4.000573727519868e-06, |
|
"loss": 0.0738, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 1.1298932384341638, |
|
"grad_norm": 3.5718430820551097, |
|
"learning_rate": 3.9868846191051465e-06, |
|
"loss": 0.0889, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 1.1316725978647686, |
|
"grad_norm": 4.722048988749142, |
|
"learning_rate": 3.973203425231715e-06, |
|
"loss": 0.1403, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 1.1334519572953736, |
|
"grad_norm": 3.22654931499242, |
|
"learning_rate": 3.959530252778187e-06, |
|
"loss": 0.1023, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 1.1352313167259787, |
|
"grad_norm": 3.9324156054695907, |
|
"learning_rate": 3.945865208560522e-06, |
|
"loss": 0.1055, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 1.1370106761565837, |
|
"grad_norm": 3.6728782971699223, |
|
"learning_rate": 3.932208399331177e-06, |
|
"loss": 0.1104, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 1.1387900355871885, |
|
"grad_norm": 3.7602711014863863, |
|
"learning_rate": 3.918559931778277e-06, |
|
"loss": 0.1011, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.1405693950177935, |
|
"grad_norm": 3.157432598778677, |
|
"learning_rate": 3.904919912524784e-06, |
|
"loss": 0.0777, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 1.1423487544483986, |
|
"grad_norm": 2.9374525643639235, |
|
"learning_rate": 3.891288448127654e-06, |
|
"loss": 0.0766, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 1.1441281138790036, |
|
"grad_norm": 4.529010042264392, |
|
"learning_rate": 3.877665645077017e-06, |
|
"loss": 0.1243, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 1.1459074733096086, |
|
"grad_norm": 3.6340529305081293, |
|
"learning_rate": 3.86405160979534e-06, |
|
"loss": 0.1172, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 1.1476868327402134, |
|
"grad_norm": 3.813112953664629, |
|
"learning_rate": 3.850446448636597e-06, |
|
"loss": 0.0947, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 1.1494661921708185, |
|
"grad_norm": 3.1581768388763853, |
|
"learning_rate": 3.8368502678854296e-06, |
|
"loss": 0.0761, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 1.1512455516014235, |
|
"grad_norm": 3.212649699793076, |
|
"learning_rate": 3.8232631737563306e-06, |
|
"loss": 0.0949, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 1.1530249110320285, |
|
"grad_norm": 4.163633318117342, |
|
"learning_rate": 3.809685272392804e-06, |
|
"loss": 0.0795, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 1.1548042704626336, |
|
"grad_norm": 3.2211790051439784, |
|
"learning_rate": 3.796116669866543e-06, |
|
"loss": 0.0962, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 1.1565836298932384, |
|
"grad_norm": 3.2899054269458095, |
|
"learning_rate": 3.78255747217659e-06, |
|
"loss": 0.0724, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.1583629893238434, |
|
"grad_norm": 3.391690760249811, |
|
"learning_rate": 3.769007785248523e-06, |
|
"loss": 0.0966, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 1.1601423487544484, |
|
"grad_norm": 4.294140698493697, |
|
"learning_rate": 3.7554677149336186e-06, |
|
"loss": 0.113, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 1.1619217081850535, |
|
"grad_norm": 3.769028277821608, |
|
"learning_rate": 3.7419373670080284e-06, |
|
"loss": 0.0938, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 1.1637010676156583, |
|
"grad_norm": 3.977932938731009, |
|
"learning_rate": 3.7284168471719527e-06, |
|
"loss": 0.0893, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 1.1654804270462633, |
|
"grad_norm": 3.193681458977709, |
|
"learning_rate": 3.7149062610488085e-06, |
|
"loss": 0.0894, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 1.1672597864768683, |
|
"grad_norm": 3.222495438629174, |
|
"learning_rate": 3.701405714184416e-06, |
|
"loss": 0.0823, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 1.1690391459074734, |
|
"grad_norm": 3.38477213212641, |
|
"learning_rate": 3.687915312046166e-06, |
|
"loss": 0.0962, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 1.1708185053380782, |
|
"grad_norm": 4.0022405702127175, |
|
"learning_rate": 3.6744351600221994e-06, |
|
"loss": 0.0865, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 1.1725978647686832, |
|
"grad_norm": 3.3901677039807456, |
|
"learning_rate": 3.6609653634205773e-06, |
|
"loss": 0.123, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 1.1743772241992882, |
|
"grad_norm": 3.505174508416737, |
|
"learning_rate": 3.647506027468467e-06, |
|
"loss": 0.0769, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.1761565836298933, |
|
"grad_norm": 3.145190987718115, |
|
"learning_rate": 3.6340572573113176e-06, |
|
"loss": 0.0954, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 1.1779359430604983, |
|
"grad_norm": 2.55496336225693, |
|
"learning_rate": 3.6206191580120346e-06, |
|
"loss": 0.0733, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 1.1797153024911031, |
|
"grad_norm": 3.5233368769682376, |
|
"learning_rate": 3.6071918345501655e-06, |
|
"loss": 0.0732, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 1.1814946619217082, |
|
"grad_norm": 3.1818406211122414, |
|
"learning_rate": 3.5937753918210705e-06, |
|
"loss": 0.073, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 1.1832740213523132, |
|
"grad_norm": 3.348066843030124, |
|
"learning_rate": 3.5803699346351117e-06, |
|
"loss": 0.0822, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 1.1850533807829182, |
|
"grad_norm": 2.8591843879359993, |
|
"learning_rate": 3.566975567716833e-06, |
|
"loss": 0.0873, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 1.1868327402135233, |
|
"grad_norm": 2.732160647293849, |
|
"learning_rate": 3.5535923957041374e-06, |
|
"loss": 0.0569, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 1.188612099644128, |
|
"grad_norm": 2.59427058187742, |
|
"learning_rate": 3.540220523147474e-06, |
|
"loss": 0.0729, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 1.190391459074733, |
|
"grad_norm": 3.3228146702751347, |
|
"learning_rate": 3.5268600545090183e-06, |
|
"loss": 0.0576, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 1.1921708185053381, |
|
"grad_norm": 3.844620756789584, |
|
"learning_rate": 3.513511094161858e-06, |
|
"loss": 0.0975, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.193950177935943, |
|
"grad_norm": 4.428974788875177, |
|
"learning_rate": 3.5001737463891793e-06, |
|
"loss": 0.0983, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 1.195729537366548, |
|
"grad_norm": 4.004915885684005, |
|
"learning_rate": 3.4868481153834454e-06, |
|
"loss": 0.1062, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 1.197508896797153, |
|
"grad_norm": 3.7553604729303713, |
|
"learning_rate": 3.4735343052455905e-06, |
|
"loss": 0.0843, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 1.199288256227758, |
|
"grad_norm": 3.7184265000947803, |
|
"learning_rate": 3.4602324199842026e-06, |
|
"loss": 0.0829, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 1.201067615658363, |
|
"grad_norm": 3.8248528703707705, |
|
"learning_rate": 3.446942563514711e-06, |
|
"loss": 0.0791, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 1.2028469750889679, |
|
"grad_norm": 4.304529083962577, |
|
"learning_rate": 3.4336648396585777e-06, |
|
"loss": 0.0795, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 1.204626334519573, |
|
"grad_norm": 3.390641110010474, |
|
"learning_rate": 3.4203993521424774e-06, |
|
"loss": 0.0936, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 1.206405693950178, |
|
"grad_norm": 3.4500238665906897, |
|
"learning_rate": 3.407146204597499e-06, |
|
"loss": 0.0896, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 1.208185053380783, |
|
"grad_norm": 3.2829261340015234, |
|
"learning_rate": 3.3939055005583305e-06, |
|
"loss": 0.0723, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 1.209964412811388, |
|
"grad_norm": 3.655789355929783, |
|
"learning_rate": 3.3806773434624475e-06, |
|
"loss": 0.0905, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.2117437722419928, |
|
"grad_norm": 3.9485701058640483, |
|
"learning_rate": 3.3674618366493117e-06, |
|
"loss": 0.125, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 1.2135231316725978, |
|
"grad_norm": 5.482670916293208, |
|
"learning_rate": 3.3542590833595533e-06, |
|
"loss": 0.1115, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 1.2153024911032029, |
|
"grad_norm": 4.306623577716619, |
|
"learning_rate": 3.341069186734176e-06, |
|
"loss": 0.1286, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 1.217081850533808, |
|
"grad_norm": 3.108719305520008, |
|
"learning_rate": 3.3278922498137455e-06, |
|
"loss": 0.0738, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 1.2188612099644127, |
|
"grad_norm": 4.103979319604206, |
|
"learning_rate": 3.314728375537587e-06, |
|
"loss": 0.0919, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 1.2206405693950177, |
|
"grad_norm": 3.4245763358208112, |
|
"learning_rate": 3.3015776667429724e-06, |
|
"loss": 0.1269, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 1.2224199288256228, |
|
"grad_norm": 3.9243524241236227, |
|
"learning_rate": 3.2884402261643296e-06, |
|
"loss": 0.0721, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 1.2241992882562278, |
|
"grad_norm": 3.5400636683234086, |
|
"learning_rate": 3.2753161564324344e-06, |
|
"loss": 0.0763, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 1.2259786476868326, |
|
"grad_norm": 4.2599692214964815, |
|
"learning_rate": 3.262205560073605e-06, |
|
"loss": 0.0777, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 1.2277580071174377, |
|
"grad_norm": 2.9934795463045254, |
|
"learning_rate": 3.249108539508909e-06, |
|
"loss": 0.06, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.2295373665480427, |
|
"grad_norm": 4.151000720248405, |
|
"learning_rate": 3.2360251970533527e-06, |
|
"loss": 0.0984, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 1.2313167259786477, |
|
"grad_norm": 3.3401036678045424, |
|
"learning_rate": 3.2229556349150947e-06, |
|
"loss": 0.0812, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 1.2330960854092528, |
|
"grad_norm": 3.367806273051442, |
|
"learning_rate": 3.2098999551946337e-06, |
|
"loss": 0.0881, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 1.2348754448398576, |
|
"grad_norm": 4.044762247407665, |
|
"learning_rate": 3.1968582598840234e-06, |
|
"loss": 0.0986, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 1.2366548042704626, |
|
"grad_norm": 4.331594767046947, |
|
"learning_rate": 3.183830650866068e-06, |
|
"loss": 0.106, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 1.2384341637010676, |
|
"grad_norm": 3.4390942446374138, |
|
"learning_rate": 3.1708172299135266e-06, |
|
"loss": 0.0786, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 1.2402135231316727, |
|
"grad_norm": 4.384015423680779, |
|
"learning_rate": 3.1578180986883234e-06, |
|
"loss": 0.1002, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 1.2419928825622777, |
|
"grad_norm": 3.431627400756195, |
|
"learning_rate": 3.1448333587407486e-06, |
|
"loss": 0.0897, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 1.2437722419928825, |
|
"grad_norm": 2.953664505987484, |
|
"learning_rate": 3.131863111508667e-06, |
|
"loss": 0.0835, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 1.2455516014234875, |
|
"grad_norm": 3.1308684341287565, |
|
"learning_rate": 3.118907458316722e-06, |
|
"loss": 0.0737, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.2473309608540926, |
|
"grad_norm": 3.7567639590230195, |
|
"learning_rate": 3.105966500375551e-06, |
|
"loss": 0.0853, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 1.2491103202846976, |
|
"grad_norm": 3.8737613048144928, |
|
"learning_rate": 3.0930403387809892e-06, |
|
"loss": 0.1171, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 1.2508896797153026, |
|
"grad_norm": 3.3925312372822747, |
|
"learning_rate": 3.080129074513285e-06, |
|
"loss": 0.0937, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 1.2526690391459074, |
|
"grad_norm": 4.258282083594837, |
|
"learning_rate": 3.067232808436299e-06, |
|
"loss": 0.1027, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 1.2544483985765125, |
|
"grad_norm": 4.081876716967663, |
|
"learning_rate": 3.0543516412967327e-06, |
|
"loss": 0.1211, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 1.2562277580071175, |
|
"grad_norm": 3.4346561793227566, |
|
"learning_rate": 3.041485673723331e-06, |
|
"loss": 0.1012, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 1.2580071174377223, |
|
"grad_norm": 4.1532000070351325, |
|
"learning_rate": 3.0286350062261017e-06, |
|
"loss": 0.0948, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 1.2597864768683273, |
|
"grad_norm": 3.808392764206537, |
|
"learning_rate": 3.0157997391955172e-06, |
|
"loss": 0.0841, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 1.2615658362989324, |
|
"grad_norm": 4.634849650373259, |
|
"learning_rate": 3.0029799729017518e-06, |
|
"loss": 0.1156, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 1.2633451957295374, |
|
"grad_norm": 3.498575245755138, |
|
"learning_rate": 2.9901758074938797e-06, |
|
"loss": 0.117, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 1.2651245551601424, |
|
"grad_norm": 3.3049994394465814, |
|
"learning_rate": 2.977387342999103e-06, |
|
"loss": 0.0745, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 1.2669039145907472, |
|
"grad_norm": 3.781046056201895, |
|
"learning_rate": 2.964614679321966e-06, |
|
"loss": 0.0898, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 1.2686832740213523, |
|
"grad_norm": 3.3617443335794412, |
|
"learning_rate": 2.951857916243574e-06, |
|
"loss": 0.0908, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 1.2704626334519573, |
|
"grad_norm": 4.27800390234936, |
|
"learning_rate": 2.9391171534208185e-06, |
|
"loss": 0.1238, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 1.2722419928825623, |
|
"grad_norm": 3.9820803068726525, |
|
"learning_rate": 2.9263924903855932e-06, |
|
"loss": 0.069, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 1.2740213523131674, |
|
"grad_norm": 3.13406386994981, |
|
"learning_rate": 2.9136840265440213e-06, |
|
"loss": 0.0766, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 1.2758007117437722, |
|
"grad_norm": 3.5977086928206177, |
|
"learning_rate": 2.9009918611756732e-06, |
|
"loss": 0.0842, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 1.2775800711743772, |
|
"grad_norm": 4.143615037233167, |
|
"learning_rate": 2.8883160934327968e-06, |
|
"loss": 0.0907, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 1.2793594306049823, |
|
"grad_norm": 4.334618013265256, |
|
"learning_rate": 2.8756568223395396e-06, |
|
"loss": 0.1012, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 1.281138790035587, |
|
"grad_norm": 3.231578882198494, |
|
"learning_rate": 2.8630141467911777e-06, |
|
"loss": 0.0786, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 1.282918149466192, |
|
"grad_norm": 3.0863411843841044, |
|
"learning_rate": 2.8503881655533395e-06, |
|
"loss": 0.0627, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 1.2846975088967971, |
|
"grad_norm": 2.6906650522121693, |
|
"learning_rate": 2.837778977261235e-06, |
|
"loss": 0.0745, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 1.2864768683274022, |
|
"grad_norm": 4.4930540532010825, |
|
"learning_rate": 2.8251866804188875e-06, |
|
"loss": 0.1191, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 1.2882562277580072, |
|
"grad_norm": 3.768654076606709, |
|
"learning_rate": 2.812611373398365e-06, |
|
"loss": 0.1272, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 1.290035587188612, |
|
"grad_norm": 3.7340001145062565, |
|
"learning_rate": 2.8000531544390064e-06, |
|
"loss": 0.1024, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 1.291814946619217, |
|
"grad_norm": 3.706727609342372, |
|
"learning_rate": 2.7875121216466595e-06, |
|
"loss": 0.1034, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 1.293594306049822, |
|
"grad_norm": 4.035320957370683, |
|
"learning_rate": 2.7749883729929105e-06, |
|
"loss": 0.101, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 1.295373665480427, |
|
"grad_norm": 3.581062646314127, |
|
"learning_rate": 2.762482006314324e-06, |
|
"loss": 0.077, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 1.2971530249110321, |
|
"grad_norm": 3.6142850482166637, |
|
"learning_rate": 2.7499931193116692e-06, |
|
"loss": 0.0872, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 1.298932384341637, |
|
"grad_norm": 3.48089199804994, |
|
"learning_rate": 2.737521809549167e-06, |
|
"loss": 0.0978, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 1.300711743772242, |
|
"grad_norm": 3.0665401715644323, |
|
"learning_rate": 2.725068174453722e-06, |
|
"loss": 0.077, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 1.302491103202847, |
|
"grad_norm": 3.206171569889489, |
|
"learning_rate": 2.712632311314165e-06, |
|
"loss": 0.0807, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 1.304270462633452, |
|
"grad_norm": 4.781504705905053, |
|
"learning_rate": 2.7002143172804875e-06, |
|
"loss": 0.1124, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 1.306049822064057, |
|
"grad_norm": 3.629016316981091, |
|
"learning_rate": 2.6878142893630904e-06, |
|
"loss": 0.1062, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 1.3078291814946619, |
|
"grad_norm": 3.6393291652532254, |
|
"learning_rate": 2.6754323244320154e-06, |
|
"loss": 0.0929, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 1.309608540925267, |
|
"grad_norm": 3.321438831129242, |
|
"learning_rate": 2.6630685192161995e-06, |
|
"loss": 0.0945, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 1.311387900355872, |
|
"grad_norm": 3.705228822484712, |
|
"learning_rate": 2.650722970302714e-06, |
|
"loss": 0.0967, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 1.3131672597864767, |
|
"grad_norm": 4.224247720426908, |
|
"learning_rate": 2.638395774136009e-06, |
|
"loss": 0.1034, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 1.3149466192170818, |
|
"grad_norm": 3.318141702021124, |
|
"learning_rate": 2.6260870270171645e-06, |
|
"loss": 0.0896, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 1.3167259786476868, |
|
"grad_norm": 4.802122663962688, |
|
"learning_rate": 2.613796825103129e-06, |
|
"loss": 0.1353, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 1.3185053380782918, |
|
"grad_norm": 2.966459876839839, |
|
"learning_rate": 2.60152526440598e-06, |
|
"loss": 0.0576, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 1.3202846975088969, |
|
"grad_norm": 3.439121873429226, |
|
"learning_rate": 2.5892724407921667e-06, |
|
"loss": 0.076, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 1.3220640569395017, |
|
"grad_norm": 4.899388838873401, |
|
"learning_rate": 2.577038449981763e-06, |
|
"loss": 0.0933, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 1.3238434163701067, |
|
"grad_norm": 3.4880522206731186, |
|
"learning_rate": 2.564823387547716e-06, |
|
"loss": 0.0899, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 1.3256227758007118, |
|
"grad_norm": 3.9816691194580014, |
|
"learning_rate": 2.552627348915106e-06, |
|
"loss": 0.1121, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 1.3274021352313168, |
|
"grad_norm": 3.494991621485606, |
|
"learning_rate": 2.5404504293603983e-06, |
|
"loss": 0.0855, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 1.3291814946619218, |
|
"grad_norm": 4.519400571465645, |
|
"learning_rate": 2.528292724010697e-06, |
|
"loss": 0.1382, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 1.3309608540925266, |
|
"grad_norm": 2.9249937817557043, |
|
"learning_rate": 2.5161543278430055e-06, |
|
"loss": 0.0841, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 1.3327402135231317, |
|
"grad_norm": 3.593772757717091, |
|
"learning_rate": 2.5040353356834756e-06, |
|
"loss": 0.0652, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 1.3345195729537367, |
|
"grad_norm": 3.2613951894554156, |
|
"learning_rate": 2.4919358422066816e-06, |
|
"loss": 0.0781, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.3362989323843417, |
|
"grad_norm": 4.581591976618446, |
|
"learning_rate": 2.4798559419348672e-06, |
|
"loss": 0.0912, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 1.3380782918149468, |
|
"grad_norm": 3.059279706683268, |
|
"learning_rate": 2.4677957292372166e-06, |
|
"loss": 0.0813, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 1.3398576512455516, |
|
"grad_norm": 3.2084839618124725, |
|
"learning_rate": 2.455755298329107e-06, |
|
"loss": 0.096, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 1.3416370106761566, |
|
"grad_norm": 3.2052679488413256, |
|
"learning_rate": 2.4437347432713838e-06, |
|
"loss": 0.0695, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 1.3434163701067616, |
|
"grad_norm": 3.7398039923420265, |
|
"learning_rate": 2.431734157969619e-06, |
|
"loss": 0.091, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 1.3451957295373664, |
|
"grad_norm": 3.6997409351449444, |
|
"learning_rate": 2.4197536361733792e-06, |
|
"loss": 0.1438, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 1.3469750889679715, |
|
"grad_norm": 4.097427432559169, |
|
"learning_rate": 2.407793271475495e-06, |
|
"loss": 0.1186, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 1.3487544483985765, |
|
"grad_norm": 3.697584886343356, |
|
"learning_rate": 2.3958531573113223e-06, |
|
"loss": 0.1216, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 1.3505338078291815, |
|
"grad_norm": 3.4099269928803295, |
|
"learning_rate": 2.3839333869580243e-06, |
|
"loss": 0.0738, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 1.3523131672597866, |
|
"grad_norm": 4.049023200573077, |
|
"learning_rate": 2.372034053533835e-06, |
|
"loss": 0.0652, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 1.3540925266903914, |
|
"grad_norm": 3.6390767626293856, |
|
"learning_rate": 2.360155249997334e-06, |
|
"loss": 0.1168, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 1.3558718861209964, |
|
"grad_norm": 3.6821832282680846, |
|
"learning_rate": 2.348297069146715e-06, |
|
"loss": 0.0815, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 1.3576512455516014, |
|
"grad_norm": 2.9815779761463443, |
|
"learning_rate": 2.3364596036190706e-06, |
|
"loss": 0.0578, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 1.3594306049822065, |
|
"grad_norm": 3.4246864300936997, |
|
"learning_rate": 2.3246429458896637e-06, |
|
"loss": 0.0889, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 1.3612099644128115, |
|
"grad_norm": 3.5642960365753527, |
|
"learning_rate": 2.312847188271203e-06, |
|
"loss": 0.0964, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 1.3629893238434163, |
|
"grad_norm": 5.7404774407390615, |
|
"learning_rate": 2.301072422913123e-06, |
|
"loss": 0.0941, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 1.3647686832740213, |
|
"grad_norm": 3.676868173435702, |
|
"learning_rate": 2.2893187418008666e-06, |
|
"loss": 0.1039, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 1.3665480427046264, |
|
"grad_norm": 2.974677852619648, |
|
"learning_rate": 2.2775862367551642e-06, |
|
"loss": 0.0706, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 1.3683274021352312, |
|
"grad_norm": 4.062174240688957, |
|
"learning_rate": 2.265874999431318e-06, |
|
"loss": 0.0603, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 1.3701067615658362, |
|
"grad_norm": 3.871833044842942, |
|
"learning_rate": 2.254185121318484e-06, |
|
"loss": 0.0807, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 1.3718861209964412, |
|
"grad_norm": 4.100985071515056, |
|
"learning_rate": 2.2425166937389596e-06, |
|
"loss": 0.1056, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 1.3736654804270463, |
|
"grad_norm": 2.9909493030720804, |
|
"learning_rate": 2.2308698078474645e-06, |
|
"loss": 0.0838, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 1.3754448398576513, |
|
"grad_norm": 3.0517553437756533, |
|
"learning_rate": 2.219244554630438e-06, |
|
"loss": 0.0676, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 1.3772241992882561, |
|
"grad_norm": 3.5809339429996934, |
|
"learning_rate": 2.207641024905322e-06, |
|
"loss": 0.0973, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 1.3790035587188612, |
|
"grad_norm": 4.514926222818748, |
|
"learning_rate": 2.1960593093198508e-06, |
|
"loss": 0.0828, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 1.3807829181494662, |
|
"grad_norm": 2.968102964716029, |
|
"learning_rate": 2.184499498351347e-06, |
|
"loss": 0.0661, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 1.3825622775800712, |
|
"grad_norm": 3.3639423275436564, |
|
"learning_rate": 2.172961682306011e-06, |
|
"loss": 0.0964, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 1.3843416370106763, |
|
"grad_norm": 3.527070744765392, |
|
"learning_rate": 2.1614459513182173e-06, |
|
"loss": 0.0737, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 1.386120996441281, |
|
"grad_norm": 3.915547122122255, |
|
"learning_rate": 2.149952395349813e-06, |
|
"loss": 0.0855, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 1.387900355871886, |
|
"grad_norm": 4.250509288941953, |
|
"learning_rate": 2.1384811041894055e-06, |
|
"loss": 0.0937, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 1.3896797153024911, |
|
"grad_norm": 4.484012098307333, |
|
"learning_rate": 2.1270321674516736e-06, |
|
"loss": 0.0845, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 1.3914590747330962, |
|
"grad_norm": 4.00649709119714, |
|
"learning_rate": 2.1156056745766593e-06, |
|
"loss": 0.0643, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 1.3932384341637012, |
|
"grad_norm": 3.2275938313332966, |
|
"learning_rate": 2.104201714829074e-06, |
|
"loss": 0.088, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 1.395017793594306, |
|
"grad_norm": 4.149051302633403, |
|
"learning_rate": 2.0928203772975917e-06, |
|
"loss": 0.1031, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 1.396797153024911, |
|
"grad_norm": 3.3951917885688037, |
|
"learning_rate": 2.081461750894166e-06, |
|
"loss": 0.0747, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 1.398576512455516, |
|
"grad_norm": 3.188676150308672, |
|
"learning_rate": 2.070125924353328e-06, |
|
"loss": 0.0617, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 1.4003558718861209, |
|
"grad_norm": 3.3038997419977205, |
|
"learning_rate": 2.058812986231493e-06, |
|
"loss": 0.0801, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 1.402135231316726, |
|
"grad_norm": 4.078026188873788, |
|
"learning_rate": 2.0475230249062727e-06, |
|
"loss": 0.1111, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 1.403914590747331, |
|
"grad_norm": 3.4476577180477586, |
|
"learning_rate": 2.0362561285757766e-06, |
|
"loss": 0.0861, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 1.405693950177936, |
|
"grad_norm": 3.853253440546021, |
|
"learning_rate": 2.0250123852579347e-06, |
|
"loss": 0.0725, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.407473309608541, |
|
"grad_norm": 2.812471467905264, |
|
"learning_rate": 2.013791882789801e-06, |
|
"loss": 0.0693, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 1.4092526690391458, |
|
"grad_norm": 3.6512473703284893, |
|
"learning_rate": 2.0025947088268714e-06, |
|
"loss": 0.0675, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 1.4110320284697508, |
|
"grad_norm": 3.095649897151838, |
|
"learning_rate": 1.9914209508423943e-06, |
|
"loss": 0.0677, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 1.4128113879003559, |
|
"grad_norm": 3.130149272504594, |
|
"learning_rate": 1.9802706961266936e-06, |
|
"loss": 0.075, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 1.414590747330961, |
|
"grad_norm": 3.987644214529634, |
|
"learning_rate": 1.969144031786483e-06, |
|
"loss": 0.1053, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 1.416370106761566, |
|
"grad_norm": 3.4417807064270947, |
|
"learning_rate": 1.958041044744186e-06, |
|
"loss": 0.0873, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 1.4181494661921707, |
|
"grad_norm": 3.6335737313496113, |
|
"learning_rate": 1.94696182173726e-06, |
|
"loss": 0.0721, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 1.4199288256227758, |
|
"grad_norm": 3.482670601521907, |
|
"learning_rate": 1.9359064493175077e-06, |
|
"loss": 0.1062, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 1.4217081850533808, |
|
"grad_norm": 2.6247260611415064, |
|
"learning_rate": 1.9248750138504176e-06, |
|
"loss": 0.0566, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 1.4234875444839858, |
|
"grad_norm": 3.581655875301917, |
|
"learning_rate": 1.9138676015144765e-06, |
|
"loss": 0.1014, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.4234875444839858, |
|
"eval_loss": 0.23001189529895782, |
|
"eval_runtime": 1.5685, |
|
"eval_samples_per_second": 29.328, |
|
"eval_steps_per_second": 7.651, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.4252669039145909, |
|
"grad_norm": 3.9022798832478527, |
|
"learning_rate": 1.9028842983005036e-06, |
|
"loss": 0.1121, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 1.4270462633451957, |
|
"grad_norm": 4.214156738637564, |
|
"learning_rate": 1.8919251900109697e-06, |
|
"loss": 0.1287, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 1.4288256227758007, |
|
"grad_norm": 2.9228481398415385, |
|
"learning_rate": 1.8809903622593395e-06, |
|
"loss": 0.0721, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 1.4306049822064058, |
|
"grad_norm": 3.328032777679221, |
|
"learning_rate": 1.870079900469392e-06, |
|
"loss": 0.0877, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 1.4323843416370106, |
|
"grad_norm": 3.51896837771515, |
|
"learning_rate": 1.8591938898745593e-06, |
|
"loss": 0.0965, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 1.4341637010676156, |
|
"grad_norm": 3.748542112977985, |
|
"learning_rate": 1.8483324155172594e-06, |
|
"loss": 0.081, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 1.4359430604982206, |
|
"grad_norm": 3.1798970885916544, |
|
"learning_rate": 1.837495562248226e-06, |
|
"loss": 0.095, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 1.4377224199288257, |
|
"grad_norm": 3.4221992943695922, |
|
"learning_rate": 1.8266834147258577e-06, |
|
"loss": 0.0751, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 1.4395017793594307, |
|
"grad_norm": 4.051665563325545, |
|
"learning_rate": 1.8158960574155455e-06, |
|
"loss": 0.1231, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 1.4412811387900355, |
|
"grad_norm": 3.1912821119380204, |
|
"learning_rate": 1.8051335745890196e-06, |
|
"loss": 0.0679, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 1.4430604982206405, |
|
"grad_norm": 3.3788770882866355, |
|
"learning_rate": 1.7943960503236856e-06, |
|
"loss": 0.1007, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 1.4448398576512456, |
|
"grad_norm": 3.219781484439294, |
|
"learning_rate": 1.7836835685019732e-06, |
|
"loss": 0.0869, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 1.4466192170818506, |
|
"grad_norm": 4.704884713202341, |
|
"learning_rate": 1.7729962128106787e-06, |
|
"loss": 0.1007, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 1.4483985765124556, |
|
"grad_norm": 3.558593157347857, |
|
"learning_rate": 1.7623340667403089e-06, |
|
"loss": 0.0801, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 1.4501779359430604, |
|
"grad_norm": 4.323785861947054, |
|
"learning_rate": 1.7516972135844352e-06, |
|
"loss": 0.0755, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 1.4519572953736655, |
|
"grad_norm": 3.624959257787092, |
|
"learning_rate": 1.741085736439031e-06, |
|
"loss": 0.0737, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 1.4537366548042705, |
|
"grad_norm": 3.8904894909059515, |
|
"learning_rate": 1.730499718201838e-06, |
|
"loss": 0.1011, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 1.4555160142348753, |
|
"grad_norm": 3.8260062070467615, |
|
"learning_rate": 1.7199392415717064e-06, |
|
"loss": 0.0904, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 1.4572953736654806, |
|
"grad_norm": 3.003839729247025, |
|
"learning_rate": 1.7094043890479557e-06, |
|
"loss": 0.0838, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 1.4590747330960854, |
|
"grad_norm": 3.4441255408488947, |
|
"learning_rate": 1.698895242929725e-06, |
|
"loss": 0.1016, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 1.4608540925266904, |
|
"grad_norm": 3.142536627106039, |
|
"learning_rate": 1.6884118853153358e-06, |
|
"loss": 0.0585, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 1.4626334519572954, |
|
"grad_norm": 4.030784024879671, |
|
"learning_rate": 1.6779543981016478e-06, |
|
"loss": 0.0905, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 1.4644128113879002, |
|
"grad_norm": 3.5197397989181556, |
|
"learning_rate": 1.6675228629834133e-06, |
|
"loss": 0.0878, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 1.4661921708185053, |
|
"grad_norm": 3.6401618996525014, |
|
"learning_rate": 1.657117361452651e-06, |
|
"loss": 0.0638, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 1.4679715302491103, |
|
"grad_norm": 2.904413383311939, |
|
"learning_rate": 1.6467379747980011e-06, |
|
"loss": 0.0848, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 1.4697508896797153, |
|
"grad_norm": 3.204891201338953, |
|
"learning_rate": 1.6363847841040914e-06, |
|
"loss": 0.0746, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 1.4715302491103204, |
|
"grad_norm": 3.0116302956130805, |
|
"learning_rate": 1.626057870250906e-06, |
|
"loss": 0.0994, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 1.4733096085409252, |
|
"grad_norm": 3.7194641140288316, |
|
"learning_rate": 1.6157573139131527e-06, |
|
"loss": 0.0702, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 1.4750889679715302, |
|
"grad_norm": 2.9040577626952393, |
|
"learning_rate": 1.605483195559628e-06, |
|
"loss": 0.0966, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 1.4768683274021353, |
|
"grad_norm": 3.424792964314128, |
|
"learning_rate": 1.5952355954525966e-06, |
|
"loss": 0.0721, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 1.4786476868327403, |
|
"grad_norm": 3.2518839267678596, |
|
"learning_rate": 1.5850145936471607e-06, |
|
"loss": 0.076, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 1.4804270462633453, |
|
"grad_norm": 4.362009404504319, |
|
"learning_rate": 1.5748202699906335e-06, |
|
"loss": 0.0731, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 1.4822064056939501, |
|
"grad_norm": 3.5042377991707854, |
|
"learning_rate": 1.5646527041219128e-06, |
|
"loss": 0.0728, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 1.4839857651245552, |
|
"grad_norm": 3.4570397907771007, |
|
"learning_rate": 1.5545119754708682e-06, |
|
"loss": 0.0817, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 1.4857651245551602, |
|
"grad_norm": 3.807884074834835, |
|
"learning_rate": 1.544398163257711e-06, |
|
"loss": 0.1269, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 1.487544483985765, |
|
"grad_norm": 3.637991734158939, |
|
"learning_rate": 1.5343113464923808e-06, |
|
"loss": 0.1208, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 1.48932384341637, |
|
"grad_norm": 2.6629245403935933, |
|
"learning_rate": 1.524251603973927e-06, |
|
"loss": 0.0709, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 1.491103202846975, |
|
"grad_norm": 3.2251116224276397, |
|
"learning_rate": 1.5142190142898883e-06, |
|
"loss": 0.0744, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 1.49288256227758, |
|
"grad_norm": 3.504488960988416, |
|
"learning_rate": 1.5042136558156883e-06, |
|
"loss": 0.0823, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 1.4946619217081851, |
|
"grad_norm": 3.5815452811835606, |
|
"learning_rate": 1.4942356067140162e-06, |
|
"loss": 0.0823, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.49644128113879, |
|
"grad_norm": 2.9378328622685395, |
|
"learning_rate": 1.4842849449342195e-06, |
|
"loss": 0.0727, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 1.498220640569395, |
|
"grad_norm": 3.7895679138893112, |
|
"learning_rate": 1.4743617482116896e-06, |
|
"loss": 0.0894, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 3.3359327588602268, |
|
"learning_rate": 1.4644660940672628e-06, |
|
"loss": 0.0665, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 1.501779359430605, |
|
"grad_norm": 3.5907327810560687, |
|
"learning_rate": 1.454598059806609e-06, |
|
"loss": 0.0962, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 1.50355871886121, |
|
"grad_norm": 4.412671445716315, |
|
"learning_rate": 1.4447577225196296e-06, |
|
"loss": 0.1012, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 1.5053380782918149, |
|
"grad_norm": 3.6547384604072013, |
|
"learning_rate": 1.4349451590798564e-06, |
|
"loss": 0.1084, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 1.50711743772242, |
|
"grad_norm": 3.868488095099105, |
|
"learning_rate": 1.4251604461438444e-06, |
|
"loss": 0.1275, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 1.508896797153025, |
|
"grad_norm": 3.296042997828009, |
|
"learning_rate": 1.4154036601505834e-06, |
|
"loss": 0.0909, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 1.5106761565836297, |
|
"grad_norm": 3.7096125262294763, |
|
"learning_rate": 1.4056748773208933e-06, |
|
"loss": 0.1028, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 1.512455516014235, |
|
"grad_norm": 4.199839749264668, |
|
"learning_rate": 1.3959741736568339e-06, |
|
"loss": 0.1188, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.5142348754448398, |
|
"grad_norm": 3.079515146068007, |
|
"learning_rate": 1.3863016249411027e-06, |
|
"loss": 0.0694, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 1.5160142348754448, |
|
"grad_norm": 3.4144649023149283, |
|
"learning_rate": 1.376657306736453e-06, |
|
"loss": 0.0751, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 1.5177935943060499, |
|
"grad_norm": 3.9660611633276557, |
|
"learning_rate": 1.3670412943850975e-06, |
|
"loss": 0.0964, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 1.5195729537366547, |
|
"grad_norm": 4.512699606603503, |
|
"learning_rate": 1.3574536630081208e-06, |
|
"loss": 0.0933, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 1.52135231316726, |
|
"grad_norm": 3.5653914617322413, |
|
"learning_rate": 1.347894487504896e-06, |
|
"loss": 0.0913, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 1.5231316725978647, |
|
"grad_norm": 5.993954121330623, |
|
"learning_rate": 1.3383638425524909e-06, |
|
"loss": 0.0755, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 1.5249110320284698, |
|
"grad_norm": 3.041624892690082, |
|
"learning_rate": 1.3288618026050943e-06, |
|
"loss": 0.0761, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 1.5266903914590748, |
|
"grad_norm": 3.8853351352002536, |
|
"learning_rate": 1.31938844189343e-06, |
|
"loss": 0.0967, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 1.5284697508896796, |
|
"grad_norm": 3.544861954668673, |
|
"learning_rate": 1.3099438344241777e-06, |
|
"loss": 0.0937, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 1.5302491103202847, |
|
"grad_norm": 3.4971477616781197, |
|
"learning_rate": 1.3005280539793908e-06, |
|
"loss": 0.0802, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 1.5320284697508897, |
|
"grad_norm": 3.1895950129812283, |
|
"learning_rate": 1.2911411741159273e-06, |
|
"loss": 0.0694, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 1.5338078291814945, |
|
"grad_norm": 3.282253314783813, |
|
"learning_rate": 1.2817832681648712e-06, |
|
"loss": 0.1013, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 1.5355871886120998, |
|
"grad_norm": 4.0084212144048745, |
|
"learning_rate": 1.2724544092309581e-06, |
|
"loss": 0.1013, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 1.5373665480427046, |
|
"grad_norm": 3.415885730335747, |
|
"learning_rate": 1.2631546701920073e-06, |
|
"loss": 0.0909, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 1.5391459074733096, |
|
"grad_norm": 3.349488218112524, |
|
"learning_rate": 1.2538841236983519e-06, |
|
"loss": 0.086, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 1.5409252669039146, |
|
"grad_norm": 3.658181939367356, |
|
"learning_rate": 1.244642842172266e-06, |
|
"loss": 0.0646, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 1.5427046263345194, |
|
"grad_norm": 3.3695857939452076, |
|
"learning_rate": 1.2354308978074088e-06, |
|
"loss": 0.0818, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 1.5444839857651247, |
|
"grad_norm": 3.795429713297916, |
|
"learning_rate": 1.2262483625682514e-06, |
|
"loss": 0.1123, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 1.5462633451957295, |
|
"grad_norm": 4.1932972672206805, |
|
"learning_rate": 1.2170953081895214e-06, |
|
"loss": 0.076, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 1.5480427046263345, |
|
"grad_norm": 3.3360706689373876, |
|
"learning_rate": 1.2079718061756369e-06, |
|
"loss": 0.0814, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 1.5498220640569396, |
|
"grad_norm": 3.2165098388897664, |
|
"learning_rate": 1.1988779278001517e-06, |
|
"loss": 0.0803, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 1.5516014234875444, |
|
"grad_norm": 3.8858321086484016, |
|
"learning_rate": 1.1898137441051982e-06, |
|
"loss": 0.1101, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 1.5533807829181496, |
|
"grad_norm": 2.9513697748964947, |
|
"learning_rate": 1.1807793259009282e-06, |
|
"loss": 0.0793, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 1.5551601423487544, |
|
"grad_norm": 2.9673948718920533, |
|
"learning_rate": 1.1717747437649657e-06, |
|
"loss": 0.0661, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 1.5569395017793595, |
|
"grad_norm": 3.444456203531531, |
|
"learning_rate": 1.1628000680418533e-06, |
|
"loss": 0.0638, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 1.5587188612099645, |
|
"grad_norm": 3.7339099983918995, |
|
"learning_rate": 1.1538553688425002e-06, |
|
"loss": 0.0647, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 1.5604982206405693, |
|
"grad_norm": 3.5853644200882178, |
|
"learning_rate": 1.14494071604364e-06, |
|
"loss": 0.1133, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 1.5622775800711743, |
|
"grad_norm": 3.983871128530315, |
|
"learning_rate": 1.1360561792872754e-06, |
|
"loss": 0.0904, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 1.5640569395017794, |
|
"grad_norm": 4.533343461847132, |
|
"learning_rate": 1.127201827980145e-06, |
|
"loss": 0.0991, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 1.5658362989323842, |
|
"grad_norm": 3.336125979305043, |
|
"learning_rate": 1.1183777312931748e-06, |
|
"loss": 0.0911, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 1.5676156583629894, |
|
"grad_norm": 2.89718706185389, |
|
"learning_rate": 1.1095839581609407e-06, |
|
"loss": 0.0779, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 1.5693950177935942, |
|
"grad_norm": 4.164023340477133, |
|
"learning_rate": 1.1008205772811248e-06, |
|
"loss": 0.117, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 1.5711743772241993, |
|
"grad_norm": 3.1318518687796244, |
|
"learning_rate": 1.0920876571139843e-06, |
|
"loss": 0.0896, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 1.5729537366548043, |
|
"grad_norm": 3.7179282259194504, |
|
"learning_rate": 1.0833852658818167e-06, |
|
"loss": 0.0882, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 1.5747330960854091, |
|
"grad_norm": 3.0991123929718025, |
|
"learning_rate": 1.0747134715684221e-06, |
|
"loss": 0.0843, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 1.5765124555160144, |
|
"grad_norm": 4.258111679998532, |
|
"learning_rate": 1.0660723419185776e-06, |
|
"loss": 0.1276, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 1.5782918149466192, |
|
"grad_norm": 3.285260557216569, |
|
"learning_rate": 1.0574619444375017e-06, |
|
"loss": 0.0753, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 1.5800711743772242, |
|
"grad_norm": 3.1614444635660353, |
|
"learning_rate": 1.0488823463903341e-06, |
|
"loss": 0.0913, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 1.5818505338078293, |
|
"grad_norm": 4.03935789922729, |
|
"learning_rate": 1.0403336148016053e-06, |
|
"loss": 0.1018, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 1.583629893238434, |
|
"grad_norm": 3.3548767081122763, |
|
"learning_rate": 1.0318158164547159e-06, |
|
"loss": 0.1099, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 1.585409252669039, |
|
"grad_norm": 3.929018297990129, |
|
"learning_rate": 1.0233290178914096e-06, |
|
"loss": 0.0847, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 1.5871886120996441, |
|
"grad_norm": 3.0497492640898236, |
|
"learning_rate": 1.014873285411262e-06, |
|
"loss": 0.0784, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 1.5889679715302492, |
|
"grad_norm": 3.7060260586138782, |
|
"learning_rate": 1.006448685071154e-06, |
|
"loss": 0.0973, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 1.5907473309608542, |
|
"grad_norm": 4.212768673769087, |
|
"learning_rate": 9.980552826847635e-07, |
|
"loss": 0.0966, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 1.592526690391459, |
|
"grad_norm": 4.1187013728072515, |
|
"learning_rate": 9.896931438220453e-07, |
|
"loss": 0.1399, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 1.594306049822064, |
|
"grad_norm": 3.488701821131246, |
|
"learning_rate": 9.813623338087181e-07, |
|
"loss": 0.0777, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 1.596085409252669, |
|
"grad_norm": 3.863957019215027, |
|
"learning_rate": 9.730629177257623e-07, |
|
"loss": 0.0994, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 1.5978647686832739, |
|
"grad_norm": 3.6144157800715018, |
|
"learning_rate": 9.64794960408903e-07, |
|
"loss": 0.0942, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 1.5996441281138791, |
|
"grad_norm": 4.702628715820047, |
|
"learning_rate": 9.565585264481092e-07, |
|
"loss": 0.092, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 1.601423487544484, |
|
"grad_norm": 3.0342143065036593, |
|
"learning_rate": 9.483536801870835e-07, |
|
"loss": 0.0696, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.603202846975089, |
|
"grad_norm": 2.881729192505434, |
|
"learning_rate": 9.401804857227648e-07, |
|
"loss": 0.0602, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 1.604982206405694, |
|
"grad_norm": 3.2548970936861124, |
|
"learning_rate": 9.320390069048258e-07, |
|
"loss": 0.0947, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 1.6067615658362988, |
|
"grad_norm": 3.2143170150633114, |
|
"learning_rate": 9.239293073351735e-07, |
|
"loss": 0.0588, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 1.608540925266904, |
|
"grad_norm": 3.640735293463136, |
|
"learning_rate": 9.158514503674543e-07, |
|
"loss": 0.0719, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 1.6103202846975089, |
|
"grad_norm": 3.039640895091384, |
|
"learning_rate": 9.078054991065532e-07, |
|
"loss": 0.0781, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 1.612099644128114, |
|
"grad_norm": 3.346582488821744, |
|
"learning_rate": 8.997915164081095e-07, |
|
"loss": 0.0856, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 1.613879003558719, |
|
"grad_norm": 4.425384853855579, |
|
"learning_rate": 8.918095648780195e-07, |
|
"loss": 0.0761, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 1.6156583629893237, |
|
"grad_norm": 3.8872847881180346, |
|
"learning_rate": 8.838597068719518e-07, |
|
"loss": 0.0903, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 1.6174377224199288, |
|
"grad_norm": 3.8105064817405867, |
|
"learning_rate": 8.75942004494853e-07, |
|
"loss": 0.0885, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 1.6192170818505338, |
|
"grad_norm": 3.9765605921881497, |
|
"learning_rate": 8.680565196004704e-07, |
|
"loss": 0.1154, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 1.6209964412811388, |
|
"grad_norm": 2.8175622315761992, |
|
"learning_rate": 8.602033137908666e-07, |
|
"loss": 0.0607, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 1.6227758007117439, |
|
"grad_norm": 3.9515617448967966, |
|
"learning_rate": 8.523824484159348e-07, |
|
"loss": 0.0735, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 1.6245551601423487, |
|
"grad_norm": 3.279307464679309, |
|
"learning_rate": 8.445939845729245e-07, |
|
"loss": 0.0668, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 1.6263345195729537, |
|
"grad_norm": 2.517299910394412, |
|
"learning_rate": 8.368379831059592e-07, |
|
"loss": 0.059, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 1.6281138790035588, |
|
"grad_norm": 2.8632057562437185, |
|
"learning_rate": 8.29114504605566e-07, |
|
"loss": 0.0772, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 1.6298932384341636, |
|
"grad_norm": 3.0123833840254686, |
|
"learning_rate": 8.21423609408199e-07, |
|
"loss": 0.0782, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 1.6316725978647688, |
|
"grad_norm": 3.380596866710731, |
|
"learning_rate": 8.137653575957666e-07, |
|
"loss": 0.0813, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 1.6334519572953736, |
|
"grad_norm": 3.92957326528419, |
|
"learning_rate": 8.061398089951678e-07, |
|
"loss": 0.103, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 1.6352313167259787, |
|
"grad_norm": 3.30256150065065, |
|
"learning_rate": 7.985470231778203e-07, |
|
"loss": 0.0631, |
|
"step": 919 |
|
}, |
|
{ |
|
"epoch": 1.6370106761565837, |
|
"grad_norm": 4.226134359492851, |
|
"learning_rate": 7.909870594591951e-07, |
|
"loss": 0.0862, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 1.6387900355871885, |
|
"grad_norm": 4.011456420063763, |
|
"learning_rate": 7.834599768983553e-07, |
|
"loss": 0.0674, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 1.6405693950177938, |
|
"grad_norm": 3.4549371281292127, |
|
"learning_rate": 7.759658342974951e-07, |
|
"loss": 0.0935, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 1.6423487544483986, |
|
"grad_norm": 4.398141220447772, |
|
"learning_rate": 7.685046902014747e-07, |
|
"loss": 0.1064, |
|
"step": 923 |
|
}, |
|
{ |
|
"epoch": 1.6441281138790036, |
|
"grad_norm": 4.462254154570054, |
|
"learning_rate": 7.61076602897371e-07, |
|
"loss": 0.086, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 1.6459074733096086, |
|
"grad_norm": 2.829930236082496, |
|
"learning_rate": 7.536816304140177e-07, |
|
"loss": 0.0688, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 1.6476868327402134, |
|
"grad_norm": 3.54232632479297, |
|
"learning_rate": 7.46319830521553e-07, |
|
"loss": 0.0756, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 1.6494661921708185, |
|
"grad_norm": 2.6581906588514084, |
|
"learning_rate": 7.389912607309662e-07, |
|
"loss": 0.0685, |
|
"step": 927 |
|
}, |
|
{ |
|
"epoch": 1.6512455516014235, |
|
"grad_norm": 4.005745276151917, |
|
"learning_rate": 7.316959782936516e-07, |
|
"loss": 0.1139, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 1.6530249110320283, |
|
"grad_norm": 2.757895702312864, |
|
"learning_rate": 7.244340402009608e-07, |
|
"loss": 0.0855, |
|
"step": 929 |
|
}, |
|
{ |
|
"epoch": 1.6548042704626336, |
|
"grad_norm": 4.482023471070777, |
|
"learning_rate": 7.172055031837572e-07, |
|
"loss": 0.0892, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 1.6565836298932384, |
|
"grad_norm": 3.6669098510413543, |
|
"learning_rate": 7.100104237119676e-07, |
|
"loss": 0.0975, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 1.6583629893238434, |
|
"grad_norm": 2.990101255917499, |
|
"learning_rate": 7.028488579941506e-07, |
|
"loss": 0.0761, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 1.6601423487544484, |
|
"grad_norm": 3.214221629850718, |
|
"learning_rate": 6.957208619770505e-07, |
|
"loss": 0.0734, |
|
"step": 933 |
|
}, |
|
{ |
|
"epoch": 1.6619217081850532, |
|
"grad_norm": 4.018779340301804, |
|
"learning_rate": 6.886264913451635e-07, |
|
"loss": 0.0664, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 1.6637010676156585, |
|
"grad_norm": 3.264131237917317, |
|
"learning_rate": 6.815658015203014e-07, |
|
"loss": 0.0586, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 1.6654804270462633, |
|
"grad_norm": 5.8620192244636655, |
|
"learning_rate": 6.745388476611553e-07, |
|
"loss": 0.1052, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 1.6672597864768683, |
|
"grad_norm": 3.3852215236540966, |
|
"learning_rate": 6.67545684662873e-07, |
|
"loss": 0.0991, |
|
"step": 937 |
|
}, |
|
{ |
|
"epoch": 1.6690391459074734, |
|
"grad_norm": 3.0056247970951633, |
|
"learning_rate": 6.605863671566221e-07, |
|
"loss": 0.1024, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 1.6708185053380782, |
|
"grad_norm": 3.1398062340003277, |
|
"learning_rate": 6.536609495091695e-07, |
|
"loss": 0.0705, |
|
"step": 939 |
|
}, |
|
{ |
|
"epoch": 1.6725978647686834, |
|
"grad_norm": 4.065567968075945, |
|
"learning_rate": 6.467694858224488e-07, |
|
"loss": 0.0978, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 1.6743772241992882, |
|
"grad_norm": 4.354216448266336, |
|
"learning_rate": 6.399120299331468e-07, |
|
"loss": 0.0729, |
|
"step": 941 |
|
}, |
|
{ |
|
"epoch": 1.6761565836298933, |
|
"grad_norm": 3.109757569818724, |
|
"learning_rate": 6.330886354122768e-07, |
|
"loss": 0.0795, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 1.6779359430604983, |
|
"grad_norm": 3.0892969100910483, |
|
"learning_rate": 6.262993555647617e-07, |
|
"loss": 0.0886, |
|
"step": 943 |
|
}, |
|
{ |
|
"epoch": 1.6797153024911031, |
|
"grad_norm": 3.507471500470927, |
|
"learning_rate": 6.1954424342902e-07, |
|
"loss": 0.1024, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 1.6814946619217082, |
|
"grad_norm": 3.816093226672282, |
|
"learning_rate": 6.128233517765448e-07, |
|
"loss": 0.1338, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 1.6832740213523132, |
|
"grad_norm": 3.3054079807755414, |
|
"learning_rate": 6.061367331114992e-07, |
|
"loss": 0.0734, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 1.685053380782918, |
|
"grad_norm": 3.7910453289189925, |
|
"learning_rate": 5.994844396703025e-07, |
|
"loss": 0.1098, |
|
"step": 947 |
|
}, |
|
{ |
|
"epoch": 1.6868327402135233, |
|
"grad_norm": 3.236260651998301, |
|
"learning_rate": 5.928665234212233e-07, |
|
"loss": 0.094, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 1.688612099644128, |
|
"grad_norm": 3.21129158033526, |
|
"learning_rate": 5.862830360639698e-07, |
|
"loss": 0.0685, |
|
"step": 949 |
|
}, |
|
{ |
|
"epoch": 1.690391459074733, |
|
"grad_norm": 3.614487045958546, |
|
"learning_rate": 5.797340290292907e-07, |
|
"loss": 0.0948, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 1.6921708185053381, |
|
"grad_norm": 3.938856534305073, |
|
"learning_rate": 5.732195534785723e-07, |
|
"loss": 0.0932, |
|
"step": 951 |
|
}, |
|
{ |
|
"epoch": 1.693950177935943, |
|
"grad_norm": 3.0697703212373084, |
|
"learning_rate": 5.667396603034369e-07, |
|
"loss": 0.0534, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 1.6957295373665482, |
|
"grad_norm": 3.7535424245852687, |
|
"learning_rate": 5.602944001253486e-07, |
|
"loss": 0.0864, |
|
"step": 953 |
|
}, |
|
{ |
|
"epoch": 1.697508896797153, |
|
"grad_norm": 3.4433388322984286, |
|
"learning_rate": 5.538838232952104e-07, |
|
"loss": 0.0708, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 1.699288256227758, |
|
"grad_norm": 4.34787326945058, |
|
"learning_rate": 5.475079798929816e-07, |
|
"loss": 0.1173, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 1.701067615658363, |
|
"grad_norm": 3.889188567099238, |
|
"learning_rate": 5.411669197272795e-07, |
|
"loss": 0.0671, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 1.7028469750889679, |
|
"grad_norm": 3.4629760377974033, |
|
"learning_rate": 5.348606923349903e-07, |
|
"loss": 0.082, |
|
"step": 957 |
|
}, |
|
{ |
|
"epoch": 1.704626334519573, |
|
"grad_norm": 3.1477545863300285, |
|
"learning_rate": 5.285893469808855e-07, |
|
"loss": 0.056, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 1.706405693950178, |
|
"grad_norm": 3.3431356088432698, |
|
"learning_rate": 5.223529326572352e-07, |
|
"loss": 0.0737, |
|
"step": 959 |
|
}, |
|
{ |
|
"epoch": 1.708185053380783, |
|
"grad_norm": 3.9059407476460177, |
|
"learning_rate": 5.161514980834232e-07, |
|
"loss": 0.0787, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 1.709964412811388, |
|
"grad_norm": 3.232446862993289, |
|
"learning_rate": 5.099850917055709e-07, |
|
"loss": 0.0546, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 1.7117437722419928, |
|
"grad_norm": 3.8220513530317044, |
|
"learning_rate": 5.038537616961559e-07, |
|
"loss": 0.0994, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 1.7135231316725978, |
|
"grad_norm": 4.323239814461721, |
|
"learning_rate": 4.977575559536358e-07, |
|
"loss": 0.0636, |
|
"step": 963 |
|
}, |
|
{ |
|
"epoch": 1.7153024911032029, |
|
"grad_norm": 3.271615330414324, |
|
"learning_rate": 4.916965221020753e-07, |
|
"loss": 0.0784, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 1.7170818505338077, |
|
"grad_norm": 3.397649995666313, |
|
"learning_rate": 4.856707074907729e-07, |
|
"loss": 0.0914, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 1.718861209964413, |
|
"grad_norm": 3.4270157481146377, |
|
"learning_rate": 4.796801591938922e-07, |
|
"loss": 0.0962, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 1.7206405693950177, |
|
"grad_norm": 3.5270037665668625, |
|
"learning_rate": 4.737249240100911e-07, |
|
"loss": 0.0859, |
|
"step": 967 |
|
}, |
|
{ |
|
"epoch": 1.7224199288256228, |
|
"grad_norm": 3.924852263174778, |
|
"learning_rate": 4.6780504846216155e-07, |
|
"loss": 0.0702, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 1.7241992882562278, |
|
"grad_norm": 3.165501366270589, |
|
"learning_rate": 4.619205787966613e-07, |
|
"loss": 0.0518, |
|
"step": 969 |
|
}, |
|
{ |
|
"epoch": 1.7259786476868326, |
|
"grad_norm": 2.4599640032378995, |
|
"learning_rate": 4.560715609835548e-07, |
|
"loss": 0.0572, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 1.7277580071174379, |
|
"grad_norm": 3.06932360573698, |
|
"learning_rate": 4.5025804071585464e-07, |
|
"loss": 0.0735, |
|
"step": 971 |
|
}, |
|
{ |
|
"epoch": 1.7295373665480427, |
|
"grad_norm": 3.5807864618810683, |
|
"learning_rate": 4.4448006340926163e-07, |
|
"loss": 0.1028, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 1.7313167259786477, |
|
"grad_norm": 3.0540367340797285, |
|
"learning_rate": 4.3873767420181344e-07, |
|
"loss": 0.0607, |
|
"step": 973 |
|
}, |
|
{ |
|
"epoch": 1.7330960854092528, |
|
"grad_norm": 3.550717729183326, |
|
"learning_rate": 4.3303091795353024e-07, |
|
"loss": 0.1101, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 1.7348754448398576, |
|
"grad_norm": 3.3955622631138236, |
|
"learning_rate": 4.2735983924606596e-07, |
|
"loss": 0.0597, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 1.7366548042704626, |
|
"grad_norm": 3.7602464122299284, |
|
"learning_rate": 4.2172448238235464e-07, |
|
"loss": 0.061, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 1.7384341637010676, |
|
"grad_norm": 3.8428030158898574, |
|
"learning_rate": 4.161248913862731e-07, |
|
"loss": 0.0792, |
|
"step": 977 |
|
}, |
|
{ |
|
"epoch": 1.7402135231316724, |
|
"grad_norm": 3.194753700727526, |
|
"learning_rate": 4.1056111000228937e-07, |
|
"loss": 0.0638, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 1.7419928825622777, |
|
"grad_norm": 3.5545432543755386, |
|
"learning_rate": 4.0503318169512417e-07, |
|
"loss": 0.0849, |
|
"step": 979 |
|
}, |
|
{ |
|
"epoch": 1.7437722419928825, |
|
"grad_norm": 3.51590680939792, |
|
"learning_rate": 3.9954114964941336e-07, |
|
"loss": 0.0736, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 1.7455516014234875, |
|
"grad_norm": 3.4060331741520136, |
|
"learning_rate": 3.9408505676936327e-07, |
|
"loss": 0.0609, |
|
"step": 981 |
|
}, |
|
{ |
|
"epoch": 1.7473309608540926, |
|
"grad_norm": 4.32352501990899, |
|
"learning_rate": 3.886649456784253e-07, |
|
"loss": 0.1243, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 1.7491103202846974, |
|
"grad_norm": 3.639695034707267, |
|
"learning_rate": 3.8328085871895624e-07, |
|
"loss": 0.0919, |
|
"step": 983 |
|
}, |
|
{ |
|
"epoch": 1.7508896797153026, |
|
"grad_norm": 3.57659079593492, |
|
"learning_rate": 3.779328379518898e-07, |
|
"loss": 0.0858, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 1.7526690391459074, |
|
"grad_norm": 2.82789289851559, |
|
"learning_rate": 3.7262092515640556e-07, |
|
"loss": 0.0739, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 1.7544483985765125, |
|
"grad_norm": 3.193976401526913, |
|
"learning_rate": 3.673451618296081e-07, |
|
"loss": 0.0723, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 1.7562277580071175, |
|
"grad_norm": 3.8777656227654496, |
|
"learning_rate": 3.621055891861963e-07, |
|
"loss": 0.0962, |
|
"step": 987 |
|
}, |
|
{ |
|
"epoch": 1.7580071174377223, |
|
"grad_norm": 4.879184831294678, |
|
"learning_rate": 3.56902248158148e-07, |
|
"loss": 0.0877, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 1.7597864768683276, |
|
"grad_norm": 3.2399375300877313, |
|
"learning_rate": 3.517351793943913e-07, |
|
"loss": 0.0714, |
|
"step": 989 |
|
}, |
|
{ |
|
"epoch": 1.7615658362989324, |
|
"grad_norm": 4.329259034241467, |
|
"learning_rate": 3.4660442326049704e-07, |
|
"loss": 0.0637, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 1.7633451957295374, |
|
"grad_norm": 3.436433032894715, |
|
"learning_rate": 3.4151001983835696e-07, |
|
"loss": 0.1109, |
|
"step": 991 |
|
}, |
|
{ |
|
"epoch": 1.7651245551601424, |
|
"grad_norm": 3.8415843903891833, |
|
"learning_rate": 3.364520089258727e-07, |
|
"loss": 0.0967, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 1.7669039145907472, |
|
"grad_norm": 3.352369227034985, |
|
"learning_rate": 3.314304300366461e-07, |
|
"loss": 0.0997, |
|
"step": 993 |
|
}, |
|
{ |
|
"epoch": 1.7686832740213523, |
|
"grad_norm": 3.3689071716538406, |
|
"learning_rate": 3.2644532239966444e-07, |
|
"loss": 0.0773, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 1.7704626334519573, |
|
"grad_norm": 3.501901720431375, |
|
"learning_rate": 3.2149672495900286e-07, |
|
"loss": 0.0726, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 1.7722419928825621, |
|
"grad_norm": 3.852275438157264, |
|
"learning_rate": 3.165846763735153e-07, |
|
"loss": 0.1083, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 1.7740213523131674, |
|
"grad_norm": 3.501701582615609, |
|
"learning_rate": 3.117092150165324e-07, |
|
"loss": 0.1153, |
|
"step": 997 |
|
}, |
|
{ |
|
"epoch": 1.7758007117437722, |
|
"grad_norm": 3.4193213697003593, |
|
"learning_rate": 3.068703789755606e-07, |
|
"loss": 0.0886, |
|
"step": 998 |
|
}, |
|
{ |
|
"epoch": 1.7775800711743772, |
|
"grad_norm": 3.4413007236054325, |
|
"learning_rate": 3.020682060519886e-07, |
|
"loss": 0.0849, |
|
"step": 999 |
|
}, |
|
{ |
|
"epoch": 1.7793594306049823, |
|
"grad_norm": 3.723667706777461, |
|
"learning_rate": 2.9730273376078923e-07, |
|
"loss": 0.0942, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.7793594306049823, |
|
"eval_loss": 0.2254125326871872, |
|
"eval_runtime": 1.568, |
|
"eval_samples_per_second": 29.336, |
|
"eval_steps_per_second": 7.653, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.781138790035587, |
|
"grad_norm": 5.637509925105232, |
|
"learning_rate": 2.9257399933022737e-07, |
|
"loss": 0.0716, |
|
"step": 1001 |
|
}, |
|
{ |
|
"epoch": 1.7829181494661923, |
|
"grad_norm": 3.5572413897295583, |
|
"learning_rate": 2.8788203970156805e-07, |
|
"loss": 0.0984, |
|
"step": 1002 |
|
}, |
|
{ |
|
"epoch": 1.7846975088967971, |
|
"grad_norm": 3.2004144789535305, |
|
"learning_rate": 2.832268915287878e-07, |
|
"loss": 0.1053, |
|
"step": 1003 |
|
}, |
|
{ |
|
"epoch": 1.7864768683274022, |
|
"grad_norm": 4.075278547203804, |
|
"learning_rate": 2.7860859117828985e-07, |
|
"loss": 0.1218, |
|
"step": 1004 |
|
}, |
|
{ |
|
"epoch": 1.7882562277580072, |
|
"grad_norm": 5.307940997524957, |
|
"learning_rate": 2.740271747286194e-07, |
|
"loss": 0.1219, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 1.790035587188612, |
|
"grad_norm": 3.816111869619712, |
|
"learning_rate": 2.6948267797018145e-07, |
|
"loss": 0.0999, |
|
"step": 1006 |
|
}, |
|
{ |
|
"epoch": 1.791814946619217, |
|
"grad_norm": 3.548697337432586, |
|
"learning_rate": 2.649751364049613e-07, |
|
"loss": 0.0646, |
|
"step": 1007 |
|
}, |
|
{ |
|
"epoch": 1.793594306049822, |
|
"grad_norm": 3.2252768514442156, |
|
"learning_rate": 2.6050458524624735e-07, |
|
"loss": 0.0807, |
|
"step": 1008 |
|
}, |
|
{ |
|
"epoch": 1.795373665480427, |
|
"grad_norm": 3.543340268545346, |
|
"learning_rate": 2.560710594183552e-07, |
|
"loss": 0.07, |
|
"step": 1009 |
|
}, |
|
{ |
|
"epoch": 1.7971530249110321, |
|
"grad_norm": 3.761871243885461, |
|
"learning_rate": 2.5167459355635524e-07, |
|
"loss": 0.0786, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 1.798932384341637, |
|
"grad_norm": 4.104335783895301, |
|
"learning_rate": 2.473152220058039e-07, |
|
"loss": 0.113, |
|
"step": 1011 |
|
}, |
|
{ |
|
"epoch": 1.800711743772242, |
|
"grad_norm": 4.088786646044284, |
|
"learning_rate": 2.429929788224722e-07, |
|
"loss": 0.0813, |
|
"step": 1012 |
|
}, |
|
{ |
|
"epoch": 1.802491103202847, |
|
"grad_norm": 3.6213223407428075, |
|
"learning_rate": 2.38707897772083e-07, |
|
"loss": 0.0923, |
|
"step": 1013 |
|
}, |
|
{ |
|
"epoch": 1.8042704626334518, |
|
"grad_norm": 3.2952422189482915, |
|
"learning_rate": 2.3446001233004333e-07, |
|
"loss": 0.0996, |
|
"step": 1014 |
|
}, |
|
{ |
|
"epoch": 1.806049822064057, |
|
"grad_norm": 3.271076247097943, |
|
"learning_rate": 2.3024935568118745e-07, |
|
"loss": 0.0704, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 1.8078291814946619, |
|
"grad_norm": 2.9142948006611333, |
|
"learning_rate": 2.2607596071951288e-07, |
|
"loss": 0.0581, |
|
"step": 1016 |
|
}, |
|
{ |
|
"epoch": 1.809608540925267, |
|
"grad_norm": 2.569676617362709, |
|
"learning_rate": 2.2193986004792667e-07, |
|
"loss": 0.0763, |
|
"step": 1017 |
|
}, |
|
{ |
|
"epoch": 1.811387900355872, |
|
"grad_norm": 2.923033519692625, |
|
"learning_rate": 2.1784108597799058e-07, |
|
"loss": 0.0686, |
|
"step": 1018 |
|
}, |
|
{ |
|
"epoch": 1.8131672597864767, |
|
"grad_norm": 2.9120446368510535, |
|
"learning_rate": 2.1377967052966685e-07, |
|
"loss": 0.0532, |
|
"step": 1019 |
|
}, |
|
{ |
|
"epoch": 1.814946619217082, |
|
"grad_norm": 3.3755575394996225, |
|
"learning_rate": 2.0975564543107007e-07, |
|
"loss": 0.0685, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 1.8167259786476868, |
|
"grad_norm": 3.858234118660017, |
|
"learning_rate": 2.057690421182168e-07, |
|
"loss": 0.0621, |
|
"step": 1021 |
|
}, |
|
{ |
|
"epoch": 1.8185053380782918, |
|
"grad_norm": 3.1433568644353476, |
|
"learning_rate": 2.01819891734783e-07, |
|
"loss": 0.0757, |
|
"step": 1022 |
|
}, |
|
{ |
|
"epoch": 1.8202846975088969, |
|
"grad_norm": 3.8098613479069123, |
|
"learning_rate": 1.979082251318576e-07, |
|
"loss": 0.0777, |
|
"step": 1023 |
|
}, |
|
{ |
|
"epoch": 1.8220640569395017, |
|
"grad_norm": 3.103919390063623, |
|
"learning_rate": 1.9403407286770592e-07, |
|
"loss": 0.0766, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 1.8238434163701067, |
|
"grad_norm": 3.0551941596268044, |
|
"learning_rate": 1.9019746520752502e-07, |
|
"loss": 0.1103, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 1.8256227758007118, |
|
"grad_norm": 3.3887341268856415, |
|
"learning_rate": 1.8639843212321206e-07, |
|
"loss": 0.0637, |
|
"step": 1026 |
|
}, |
|
{ |
|
"epoch": 1.8274021352313166, |
|
"grad_norm": 2.939505463490007, |
|
"learning_rate": 1.826370032931285e-07, |
|
"loss": 0.0771, |
|
"step": 1027 |
|
}, |
|
{ |
|
"epoch": 1.8291814946619218, |
|
"grad_norm": 3.6418450011448993, |
|
"learning_rate": 1.789132081018674e-07, |
|
"loss": 0.0716, |
|
"step": 1028 |
|
}, |
|
{ |
|
"epoch": 1.8309608540925266, |
|
"grad_norm": 3.5426533292714124, |
|
"learning_rate": 1.7522707564002706e-07, |
|
"loss": 0.1003, |
|
"step": 1029 |
|
}, |
|
{ |
|
"epoch": 1.8327402135231317, |
|
"grad_norm": 3.0644343670497123, |
|
"learning_rate": 1.7157863470397718e-07, |
|
"loss": 0.0682, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 1.8345195729537367, |
|
"grad_norm": 3.705864735666452, |
|
"learning_rate": 1.6796791379564138e-07, |
|
"loss": 0.0967, |
|
"step": 1031 |
|
}, |
|
{ |
|
"epoch": 1.8362989323843415, |
|
"grad_norm": 3.2402598740887134, |
|
"learning_rate": 1.6439494112227173e-07, |
|
"loss": 0.0585, |
|
"step": 1032 |
|
}, |
|
{ |
|
"epoch": 1.8380782918149468, |
|
"grad_norm": 3.0922066890166597, |
|
"learning_rate": 1.6085974459622567e-07, |
|
"loss": 0.0678, |
|
"step": 1033 |
|
}, |
|
{ |
|
"epoch": 1.8398576512455516, |
|
"grad_norm": 2.8653307769324785, |
|
"learning_rate": 1.573623518347517e-07, |
|
"loss": 0.0554, |
|
"step": 1034 |
|
}, |
|
{ |
|
"epoch": 1.8416370106761566, |
|
"grad_norm": 3.1802179639672046, |
|
"learning_rate": 1.5390279015977117e-07, |
|
"loss": 0.0908, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 1.8434163701067616, |
|
"grad_norm": 3.4904508386718285, |
|
"learning_rate": 1.5048108659766693e-07, |
|
"loss": 0.0915, |
|
"step": 1036 |
|
}, |
|
{ |
|
"epoch": 1.8451957295373664, |
|
"grad_norm": 3.240637381478209, |
|
"learning_rate": 1.470972678790711e-07, |
|
"loss": 0.1006, |
|
"step": 1037 |
|
}, |
|
{ |
|
"epoch": 1.8469750889679717, |
|
"grad_norm": 2.699894093174582, |
|
"learning_rate": 1.437513604386559e-07, |
|
"loss": 0.0711, |
|
"step": 1038 |
|
}, |
|
{ |
|
"epoch": 1.8487544483985765, |
|
"grad_norm": 3.9924495960840014, |
|
"learning_rate": 1.404433904149266e-07, |
|
"loss": 0.0847, |
|
"step": 1039 |
|
}, |
|
{ |
|
"epoch": 1.8505338078291815, |
|
"grad_norm": 3.857337796531928, |
|
"learning_rate": 1.3717338365001943e-07, |
|
"loss": 0.0784, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 1.8523131672597866, |
|
"grad_norm": 4.203838986095957, |
|
"learning_rate": 1.3394136568949834e-07, |
|
"loss": 0.1225, |
|
"step": 1041 |
|
}, |
|
{ |
|
"epoch": 1.8540925266903914, |
|
"grad_norm": 4.846827476461272, |
|
"learning_rate": 1.307473617821553e-07, |
|
"loss": 0.0782, |
|
"step": 1042 |
|
}, |
|
{ |
|
"epoch": 1.8558718861209964, |
|
"grad_norm": 3.5932826371006, |
|
"learning_rate": 1.275913968798137e-07, |
|
"loss": 0.0694, |
|
"step": 1043 |
|
}, |
|
{ |
|
"epoch": 1.8576512455516014, |
|
"grad_norm": 2.9873702356304106, |
|
"learning_rate": 1.2447349563713186e-07, |
|
"loss": 0.0832, |
|
"step": 1044 |
|
}, |
|
{ |
|
"epoch": 1.8594306049822062, |
|
"grad_norm": 3.173919516804279, |
|
"learning_rate": 1.213936824114137e-07, |
|
"loss": 0.0737, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 1.8612099644128115, |
|
"grad_norm": 4.76498695025875, |
|
"learning_rate": 1.1835198126241509e-07, |
|
"loss": 0.0766, |
|
"step": 1046 |
|
}, |
|
{ |
|
"epoch": 1.8629893238434163, |
|
"grad_norm": 4.387855817822402, |
|
"learning_rate": 1.1534841595215617e-07, |
|
"loss": 0.089, |
|
"step": 1047 |
|
}, |
|
{ |
|
"epoch": 1.8647686832740213, |
|
"grad_norm": 3.034624201275938, |
|
"learning_rate": 1.1238300994473983e-07, |
|
"loss": 0.06, |
|
"step": 1048 |
|
}, |
|
{ |
|
"epoch": 1.8665480427046264, |
|
"grad_norm": 3.4506882500893092, |
|
"learning_rate": 1.0945578640616183e-07, |
|
"loss": 0.119, |
|
"step": 1049 |
|
}, |
|
{ |
|
"epoch": 1.8683274021352312, |
|
"grad_norm": 3.293030804645425, |
|
"learning_rate": 1.0656676820413603e-07, |
|
"loss": 0.0487, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 1.8701067615658364, |
|
"grad_norm": 2.9702719767450816, |
|
"learning_rate": 1.0371597790791166e-07, |
|
"loss": 0.0818, |
|
"step": 1051 |
|
}, |
|
{ |
|
"epoch": 1.8718861209964412, |
|
"grad_norm": 4.17100919566967, |
|
"learning_rate": 1.0090343778809908e-07, |
|
"loss": 0.1101, |
|
"step": 1052 |
|
}, |
|
{ |
|
"epoch": 1.8736654804270463, |
|
"grad_norm": 3.10584247755303, |
|
"learning_rate": 9.812916981649433e-08, |
|
"loss": 0.0618, |
|
"step": 1053 |
|
}, |
|
{ |
|
"epoch": 1.8754448398576513, |
|
"grad_norm": 3.115548130035552, |
|
"learning_rate": 9.539319566590766e-08, |
|
"loss": 0.0901, |
|
"step": 1054 |
|
}, |
|
{ |
|
"epoch": 1.8772241992882561, |
|
"grad_norm": 2.807944606934731, |
|
"learning_rate": 9.269553670999743e-08, |
|
"loss": 0.0685, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 1.8790035587188612, |
|
"grad_norm": 4.739520225502669, |
|
"learning_rate": 9.003621402309815e-08, |
|
"loss": 0.1016, |
|
"step": 1056 |
|
}, |
|
{ |
|
"epoch": 1.8807829181494662, |
|
"grad_norm": 4.125629963850313, |
|
"learning_rate": 8.741524838005888e-08, |
|
"loss": 0.089, |
|
"step": 1057 |
|
}, |
|
{ |
|
"epoch": 1.8825622775800712, |
|
"grad_norm": 3.3930882411230354, |
|
"learning_rate": 8.483266025608061e-08, |
|
"loss": 0.0681, |
|
"step": 1058 |
|
}, |
|
{ |
|
"epoch": 1.8843416370106763, |
|
"grad_norm": 3.033308508527131, |
|
"learning_rate": 8.228846982655525e-08, |
|
"loss": 0.0718, |
|
"step": 1059 |
|
}, |
|
{ |
|
"epoch": 1.886120996441281, |
|
"grad_norm": 3.473695307060586, |
|
"learning_rate": 7.978269696691021e-08, |
|
"loss": 0.1233, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 1.887900355871886, |
|
"grad_norm": 3.2145179033374998, |
|
"learning_rate": 7.731536125244965e-08, |
|
"loss": 0.066, |
|
"step": 1061 |
|
}, |
|
{ |
|
"epoch": 1.8896797153024911, |
|
"grad_norm": 3.8404168106610874, |
|
"learning_rate": 7.488648195820513e-08, |
|
"loss": 0.1155, |
|
"step": 1062 |
|
}, |
|
{ |
|
"epoch": 1.891459074733096, |
|
"grad_norm": 3.488209813896619, |
|
"learning_rate": 7.249607805878245e-08, |
|
"loss": 0.0741, |
|
"step": 1063 |
|
}, |
|
{ |
|
"epoch": 1.8932384341637012, |
|
"grad_norm": 4.3583380191771335, |
|
"learning_rate": 7.014416822821557e-08, |
|
"loss": 0.0908, |
|
"step": 1064 |
|
}, |
|
{ |
|
"epoch": 1.895017793594306, |
|
"grad_norm": 2.9361057932169023, |
|
"learning_rate": 6.783077083981793e-08, |
|
"loss": 0.0505, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 1.896797153024911, |
|
"grad_norm": 3.138951783993612, |
|
"learning_rate": 6.55559039660425e-08, |
|
"loss": 0.0832, |
|
"step": 1066 |
|
}, |
|
{ |
|
"epoch": 1.898576512455516, |
|
"grad_norm": 3.7783132974157145, |
|
"learning_rate": 6.331958537833693e-08, |
|
"loss": 0.0766, |
|
"step": 1067 |
|
}, |
|
{ |
|
"epoch": 1.9003558718861209, |
|
"grad_norm": 3.547174349345503, |
|
"learning_rate": 6.112183254700866e-08, |
|
"loss": 0.0696, |
|
"step": 1068 |
|
}, |
|
{ |
|
"epoch": 1.9021352313167261, |
|
"grad_norm": 3.141430732114121, |
|
"learning_rate": 5.8962662641083856e-08, |
|
"loss": 0.059, |
|
"step": 1069 |
|
}, |
|
{ |
|
"epoch": 1.903914590747331, |
|
"grad_norm": 3.7453234644568387, |
|
"learning_rate": 5.6842092528176516e-08, |
|
"loss": 0.0797, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 1.905693950177936, |
|
"grad_norm": 3.329392272104426, |
|
"learning_rate": 5.476013877435626e-08, |
|
"loss": 0.0792, |
|
"step": 1071 |
|
}, |
|
{ |
|
"epoch": 1.907473309608541, |
|
"grad_norm": 3.4534953514149715, |
|
"learning_rate": 5.271681764401848e-08, |
|
"loss": 0.1141, |
|
"step": 1072 |
|
}, |
|
{ |
|
"epoch": 1.9092526690391458, |
|
"grad_norm": 2.9269083227105686, |
|
"learning_rate": 5.071214509975775e-08, |
|
"loss": 0.0666, |
|
"step": 1073 |
|
}, |
|
{ |
|
"epoch": 1.9110320284697508, |
|
"grad_norm": 3.890807385592486, |
|
"learning_rate": 4.8746136802240716e-08, |
|
"loss": 0.083, |
|
"step": 1074 |
|
}, |
|
{ |
|
"epoch": 1.9128113879003559, |
|
"grad_norm": 4.225583150268323, |
|
"learning_rate": 4.6818808110087875e-08, |
|
"loss": 0.0963, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 1.914590747330961, |
|
"grad_norm": 3.9780990238436367, |
|
"learning_rate": 4.493017407975087e-08, |
|
"loss": 0.0974, |
|
"step": 1076 |
|
}, |
|
{ |
|
"epoch": 1.916370106761566, |
|
"grad_norm": 3.3934885928836853, |
|
"learning_rate": 4.308024946539424e-08, |
|
"loss": 0.0593, |
|
"step": 1077 |
|
}, |
|
{ |
|
"epoch": 1.9181494661921707, |
|
"grad_norm": 3.952171508564822, |
|
"learning_rate": 4.1269048718783344e-08, |
|
"loss": 0.0731, |
|
"step": 1078 |
|
}, |
|
{ |
|
"epoch": 1.9199288256227758, |
|
"grad_norm": 2.890819080297749, |
|
"learning_rate": 3.9496585989167726e-08, |
|
"loss": 0.0545, |
|
"step": 1079 |
|
}, |
|
{ |
|
"epoch": 1.9217081850533808, |
|
"grad_norm": 3.945973248390364, |
|
"learning_rate": 3.776287512317345e-08, |
|
"loss": 0.0924, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 1.9234875444839856, |
|
"grad_norm": 3.2808236376519724, |
|
"learning_rate": 3.606792966469375e-08, |
|
"loss": 0.0641, |
|
"step": 1081 |
|
}, |
|
{ |
|
"epoch": 1.9252669039145909, |
|
"grad_norm": 3.0709988233558665, |
|
"learning_rate": 3.4411762854782426e-08, |
|
"loss": 0.0683, |
|
"step": 1082 |
|
}, |
|
{ |
|
"epoch": 1.9270462633451957, |
|
"grad_norm": 3.2670661992605043, |
|
"learning_rate": 3.279438763155174e-08, |
|
"loss": 0.0641, |
|
"step": 1083 |
|
}, |
|
{ |
|
"epoch": 1.9288256227758007, |
|
"grad_norm": 3.385168608507766, |
|
"learning_rate": 3.121581663007134e-08, |
|
"loss": 0.08, |
|
"step": 1084 |
|
}, |
|
{ |
|
"epoch": 1.9306049822064058, |
|
"grad_norm": 2.6598297447432993, |
|
"learning_rate": 2.967606218226837e-08, |
|
"loss": 0.0847, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 1.9323843416370106, |
|
"grad_norm": 3.267764003593947, |
|
"learning_rate": 2.8175136316832e-08, |
|
"loss": 0.0695, |
|
"step": 1086 |
|
}, |
|
{ |
|
"epoch": 1.9341637010676158, |
|
"grad_norm": 3.774346146023106, |
|
"learning_rate": 2.6713050759120117e-08, |
|
"loss": 0.0906, |
|
"step": 1087 |
|
}, |
|
{ |
|
"epoch": 1.9359430604982206, |
|
"grad_norm": 3.073299712868894, |
|
"learning_rate": 2.528981693106558e-08, |
|
"loss": 0.0804, |
|
"step": 1088 |
|
}, |
|
{ |
|
"epoch": 1.9377224199288257, |
|
"grad_norm": 3.323357855702747, |
|
"learning_rate": 2.3905445951089013e-08, |
|
"loss": 0.0971, |
|
"step": 1089 |
|
}, |
|
{ |
|
"epoch": 1.9395017793594307, |
|
"grad_norm": 3.3705712446511957, |
|
"learning_rate": 2.2559948634011673e-08, |
|
"loss": 0.0667, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 1.9412811387900355, |
|
"grad_norm": 3.7218168539455405, |
|
"learning_rate": 2.125333549096942e-08, |
|
"loss": 0.0811, |
|
"step": 1091 |
|
}, |
|
{ |
|
"epoch": 1.9430604982206405, |
|
"grad_norm": 3.686116155563527, |
|
"learning_rate": 1.9985616729332747e-08, |
|
"loss": 0.0925, |
|
"step": 1092 |
|
}, |
|
{ |
|
"epoch": 1.9448398576512456, |
|
"grad_norm": 3.559486999825631, |
|
"learning_rate": 1.8756802252625773e-08, |
|
"loss": 0.0769, |
|
"step": 1093 |
|
}, |
|
{ |
|
"epoch": 1.9466192170818504, |
|
"grad_norm": 4.020036864709511, |
|
"learning_rate": 1.75669016604485e-08, |
|
"loss": 0.0749, |
|
"step": 1094 |
|
}, |
|
{ |
|
"epoch": 1.9483985765124556, |
|
"grad_norm": 4.487724407409945, |
|
"learning_rate": 1.6415924248403547e-08, |
|
"loss": 0.0541, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 1.9501779359430604, |
|
"grad_norm": 2.890105075182308, |
|
"learning_rate": 1.5303879008021773e-08, |
|
"loss": 0.0784, |
|
"step": 1096 |
|
}, |
|
{ |
|
"epoch": 1.9519572953736655, |
|
"grad_norm": 3.481758677313581, |
|
"learning_rate": 1.4230774626691756e-08, |
|
"loss": 0.0802, |
|
"step": 1097 |
|
}, |
|
{ |
|
"epoch": 1.9537366548042705, |
|
"grad_norm": 3.1832233582935823, |
|
"learning_rate": 1.3196619487594875e-08, |
|
"loss": 0.0784, |
|
"step": 1098 |
|
}, |
|
{ |
|
"epoch": 1.9555160142348753, |
|
"grad_norm": 3.831777691571096, |
|
"learning_rate": 1.2201421669636448e-08, |
|
"loss": 0.0967, |
|
"step": 1099 |
|
}, |
|
{ |
|
"epoch": 1.9572953736654806, |
|
"grad_norm": 3.3055143154626814, |
|
"learning_rate": 1.1245188947384133e-08, |
|
"loss": 0.0785, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 1.9590747330960854, |
|
"grad_norm": 2.790864706231256, |
|
"learning_rate": 1.0327928791006858e-08, |
|
"loss": 0.0679, |
|
"step": 1101 |
|
}, |
|
{ |
|
"epoch": 1.9608540925266904, |
|
"grad_norm": 3.251485140701101, |
|
"learning_rate": 9.449648366217645e-09, |
|
"loss": 0.085, |
|
"step": 1102 |
|
}, |
|
{ |
|
"epoch": 1.9626334519572954, |
|
"grad_norm": 3.0486025471067086, |
|
"learning_rate": 8.61035453421588e-09, |
|
"loss": 0.0646, |
|
"step": 1103 |
|
}, |
|
{ |
|
"epoch": 1.9644128113879002, |
|
"grad_norm": 2.763251074218536, |
|
"learning_rate": 7.81005385163458e-09, |
|
"loss": 0.0687, |
|
"step": 1104 |
|
}, |
|
{ |
|
"epoch": 1.9661921708185055, |
|
"grad_norm": 2.940337677365552, |
|
"learning_rate": 7.048752570488205e-09, |
|
"loss": 0.0907, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 1.9679715302491103, |
|
"grad_norm": 4.160470259682666, |
|
"learning_rate": 6.326456638125478e-09, |
|
"loss": 0.1445, |
|
"step": 1106 |
|
}, |
|
{ |
|
"epoch": 1.9697508896797153, |
|
"grad_norm": 3.1739813212293675, |
|
"learning_rate": 5.643171697183314e-09, |
|
"loss": 0.0661, |
|
"step": 1107 |
|
}, |
|
{ |
|
"epoch": 1.9715302491103204, |
|
"grad_norm": 4.330499852611146, |
|
"learning_rate": 4.998903085539075e-09, |
|
"loss": 0.0725, |
|
"step": 1108 |
|
}, |
|
{ |
|
"epoch": 1.9733096085409252, |
|
"grad_norm": 2.5985197587064883, |
|
"learning_rate": 4.393655836272825e-09, |
|
"loss": 0.0552, |
|
"step": 1109 |
|
}, |
|
{ |
|
"epoch": 1.9750889679715302, |
|
"grad_norm": 3.979357206420399, |
|
"learning_rate": 3.8274346776262514e-09, |
|
"loss": 0.0766, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 1.9768683274021353, |
|
"grad_norm": 3.1360641891756917, |
|
"learning_rate": 3.300244032966582e-09, |
|
"loss": 0.0672, |
|
"step": 1111 |
|
}, |
|
{ |
|
"epoch": 1.97864768683274, |
|
"grad_norm": 4.096117076658105, |
|
"learning_rate": 2.8120880207493928e-09, |
|
"loss": 0.0726, |
|
"step": 1112 |
|
}, |
|
{ |
|
"epoch": 1.9804270462633453, |
|
"grad_norm": 3.45913138053367, |
|
"learning_rate": 2.362970454491409e-09, |
|
"loss": 0.0794, |
|
"step": 1113 |
|
}, |
|
{ |
|
"epoch": 1.9822064056939501, |
|
"grad_norm": 3.67524077370303, |
|
"learning_rate": 1.952894842735531e-09, |
|
"loss": 0.063, |
|
"step": 1114 |
|
}, |
|
{ |
|
"epoch": 1.9839857651245552, |
|
"grad_norm": 2.182518486474042, |
|
"learning_rate": 1.5818643890258555e-09, |
|
"loss": 0.056, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 1.9857651245551602, |
|
"grad_norm": 4.021664462700635, |
|
"learning_rate": 1.2498819918843609e-09, |
|
"loss": 0.0785, |
|
"step": 1116 |
|
}, |
|
{ |
|
"epoch": 1.987544483985765, |
|
"grad_norm": 4.38100556373104, |
|
"learning_rate": 9.569502447837053e-10, |
|
"loss": 0.0954, |
|
"step": 1117 |
|
}, |
|
{ |
|
"epoch": 1.9893238434163703, |
|
"grad_norm": 3.1957902787098287, |
|
"learning_rate": 7.03071436131686e-10, |
|
"loss": 0.0871, |
|
"step": 1118 |
|
}, |
|
{ |
|
"epoch": 1.991103202846975, |
|
"grad_norm": 2.5899242158994014, |
|
"learning_rate": 4.882475492506977e-10, |
|
"loss": 0.0595, |
|
"step": 1119 |
|
}, |
|
{ |
|
"epoch": 1.99288256227758, |
|
"grad_norm": 4.018316373710333, |
|
"learning_rate": 3.124802623627465e-10, |
|
"loss": 0.0993, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 1.9946619217081851, |
|
"grad_norm": 3.3622748995615415, |
|
"learning_rate": 1.7577094857557097e-10, |
|
"loss": 0.1234, |
|
"step": 1121 |
|
}, |
|
{ |
|
"epoch": 1.99644128113879, |
|
"grad_norm": 4.207267537120296, |
|
"learning_rate": 7.812067587487093e-11, |
|
"loss": 0.1114, |
|
"step": 1122 |
|
}, |
|
{ |
|
"epoch": 1.998220640569395, |
|
"grad_norm": 3.649420513517773, |
|
"learning_rate": 1.9530207111539967e-11, |
|
"loss": 0.0849, |
|
"step": 1123 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 2.905980024206405, |
|
"learning_rate": 0.0, |
|
"loss": 0.0757, |
|
"step": 1124 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"step": 1124, |
|
"total_flos": 2176014237696.0, |
|
"train_loss": 0.14432900387700978, |
|
"train_runtime": 687.035, |
|
"train_samples_per_second": 13.079, |
|
"train_steps_per_second": 1.636 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 1124, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 2000, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2176014237696.0, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|