|
{ |
|
"best_metric": 0.15221890807151794, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-200", |
|
"epoch": 0.14711290915777858, |
|
"eval_steps": 50, |
|
"global_step": 200, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.000735564545788893, |
|
"grad_norm": 1.3955460786819458, |
|
"learning_rate": 3.3333333333333333e-06, |
|
"loss": 0.8061, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.000735564545788893, |
|
"eval_loss": 1.0986895561218262, |
|
"eval_runtime": 191.9666, |
|
"eval_samples_per_second": 11.929, |
|
"eval_steps_per_second": 5.965, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.001471129091577786, |
|
"grad_norm": 1.407787799835205, |
|
"learning_rate": 6.666666666666667e-06, |
|
"loss": 0.8431, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.002206693637366679, |
|
"grad_norm": 1.375799536705017, |
|
"learning_rate": 1e-05, |
|
"loss": 0.8597, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.002942258183155572, |
|
"grad_norm": 1.491884469985962, |
|
"learning_rate": 1.3333333333333333e-05, |
|
"loss": 0.8988, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.003677822728944465, |
|
"grad_norm": 1.3589227199554443, |
|
"learning_rate": 1.6666666666666667e-05, |
|
"loss": 0.853, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.004413387274733358, |
|
"grad_norm": 1.2300374507904053, |
|
"learning_rate": 2e-05, |
|
"loss": 0.8203, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.0051489518205222505, |
|
"grad_norm": 1.02793550491333, |
|
"learning_rate": 2.3333333333333336e-05, |
|
"loss": 0.7321, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.005884516366311144, |
|
"grad_norm": 0.952317476272583, |
|
"learning_rate": 2.6666666666666667e-05, |
|
"loss": 0.7058, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.0066200809121000365, |
|
"grad_norm": 0.8238752484321594, |
|
"learning_rate": 3e-05, |
|
"loss": 0.6184, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.00735564545788893, |
|
"grad_norm": 0.9686686992645264, |
|
"learning_rate": 3.3333333333333335e-05, |
|
"loss": 0.5712, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.008091210003677823, |
|
"grad_norm": 0.9226304888725281, |
|
"learning_rate": 3.6666666666666666e-05, |
|
"loss": 0.4854, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.008826774549466716, |
|
"grad_norm": 0.8993305563926697, |
|
"learning_rate": 4e-05, |
|
"loss": 0.4479, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.009562339095255609, |
|
"grad_norm": 0.8207470178604126, |
|
"learning_rate": 4.3333333333333334e-05, |
|
"loss": 0.3879, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.010297903641044501, |
|
"grad_norm": 0.7917346954345703, |
|
"learning_rate": 4.666666666666667e-05, |
|
"loss": 0.3442, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.011033468186833395, |
|
"grad_norm": 0.6463001370429993, |
|
"learning_rate": 5e-05, |
|
"loss": 0.2924, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.011769032732622288, |
|
"grad_norm": 0.524644672870636, |
|
"learning_rate": 5.333333333333333e-05, |
|
"loss": 0.2662, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.01250459727841118, |
|
"grad_norm": 0.5327229499816895, |
|
"learning_rate": 5.666666666666667e-05, |
|
"loss": 0.2513, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.013240161824200073, |
|
"grad_norm": 0.6008023619651794, |
|
"learning_rate": 6e-05, |
|
"loss": 0.2499, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.013975726369988967, |
|
"grad_norm": 0.4902108311653137, |
|
"learning_rate": 6.333333333333333e-05, |
|
"loss": 0.2138, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.01471129091577786, |
|
"grad_norm": 0.5153264999389648, |
|
"learning_rate": 6.666666666666667e-05, |
|
"loss": 0.2068, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.015446855461566752, |
|
"grad_norm": 0.5366236567497253, |
|
"learning_rate": 7e-05, |
|
"loss": 0.197, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.016182420007355647, |
|
"grad_norm": 0.407263845205307, |
|
"learning_rate": 7.333333333333333e-05, |
|
"loss": 0.1966, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.016917984553144538, |
|
"grad_norm": 0.36304229497909546, |
|
"learning_rate": 7.666666666666667e-05, |
|
"loss": 0.2016, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.017653549098933432, |
|
"grad_norm": 0.3669580817222595, |
|
"learning_rate": 8e-05, |
|
"loss": 0.2059, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.018389113644722323, |
|
"grad_norm": 0.22362849116325378, |
|
"learning_rate": 8.333333333333334e-05, |
|
"loss": 0.1527, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.019124678190511217, |
|
"grad_norm": 0.21722978353500366, |
|
"learning_rate": 8.666666666666667e-05, |
|
"loss": 0.1503, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.01986024273630011, |
|
"grad_norm": 0.2847565710544586, |
|
"learning_rate": 9e-05, |
|
"loss": 0.1685, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.020595807282089002, |
|
"grad_norm": 0.2842387557029724, |
|
"learning_rate": 9.333333333333334e-05, |
|
"loss": 0.1553, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.021331371827877896, |
|
"grad_norm": 0.25112128257751465, |
|
"learning_rate": 9.666666666666667e-05, |
|
"loss": 0.151, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.02206693637366679, |
|
"grad_norm": 0.2505984306335449, |
|
"learning_rate": 0.0001, |
|
"loss": 0.1433, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.02280250091945568, |
|
"grad_norm": 0.245281383395195, |
|
"learning_rate": 9.999146252290264e-05, |
|
"loss": 0.1618, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.023538065465244576, |
|
"grad_norm": 0.31131860613822937, |
|
"learning_rate": 9.996585300715116e-05, |
|
"loss": 0.1587, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.024273630011033467, |
|
"grad_norm": 0.2021295130252838, |
|
"learning_rate": 9.99231801983717e-05, |
|
"loss": 0.1654, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.02500919455682236, |
|
"grad_norm": 0.23759086430072784, |
|
"learning_rate": 9.986345866928941e-05, |
|
"loss": 0.1569, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.025744759102611255, |
|
"grad_norm": 0.23068475723266602, |
|
"learning_rate": 9.978670881475172e-05, |
|
"loss": 0.1664, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.026480323648400146, |
|
"grad_norm": 0.23946265876293182, |
|
"learning_rate": 9.96929568447637e-05, |
|
"loss": 0.1418, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.02721588819418904, |
|
"grad_norm": 0.16092844307422638, |
|
"learning_rate": 9.958223477553714e-05, |
|
"loss": 0.1519, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.027951452739977935, |
|
"grad_norm": 0.20561547577381134, |
|
"learning_rate": 9.94545804185573e-05, |
|
"loss": 0.1334, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.028687017285766826, |
|
"grad_norm": 0.24364995956420898, |
|
"learning_rate": 9.931003736767013e-05, |
|
"loss": 0.158, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.02942258183155572, |
|
"grad_norm": 0.2280334234237671, |
|
"learning_rate": 9.91486549841951e-05, |
|
"loss": 0.1429, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.03015814637734461, |
|
"grad_norm": 0.25129419565200806, |
|
"learning_rate": 9.89704883800683e-05, |
|
"loss": 0.1689, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.030893710923133505, |
|
"grad_norm": 0.15832379460334778, |
|
"learning_rate": 9.877559839902184e-05, |
|
"loss": 0.1559, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.0316292754689224, |
|
"grad_norm": 0.26977041363716125, |
|
"learning_rate": 9.85640515958057e-05, |
|
"loss": 0.1665, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.032364840014711294, |
|
"grad_norm": 0.18031959235668182, |
|
"learning_rate": 9.833592021345937e-05, |
|
"loss": 0.1779, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.03310040456050018, |
|
"grad_norm": 0.22443000972270966, |
|
"learning_rate": 9.809128215864097e-05, |
|
"loss": 0.1847, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.033835969106289075, |
|
"grad_norm": 0.2750892639160156, |
|
"learning_rate": 9.783022097502204e-05, |
|
"loss": 0.1664, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.03457153365207797, |
|
"grad_norm": 0.2007928043603897, |
|
"learning_rate": 9.755282581475769e-05, |
|
"loss": 0.1082, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.035307098197866864, |
|
"grad_norm": 0.20176196098327637, |
|
"learning_rate": 9.725919140804099e-05, |
|
"loss": 0.126, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.03604266274365576, |
|
"grad_norm": 0.19546356797218323, |
|
"learning_rate": 9.694941803075283e-05, |
|
"loss": 0.1979, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.036778227289444645, |
|
"grad_norm": 0.4081459641456604, |
|
"learning_rate": 9.662361147021779e-05, |
|
"loss": 0.1876, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.036778227289444645, |
|
"eval_loss": 0.19655592739582062, |
|
"eval_runtime": 192.9534, |
|
"eval_samples_per_second": 11.868, |
|
"eval_steps_per_second": 5.934, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.03751379183523354, |
|
"grad_norm": 0.9154587388038635, |
|
"learning_rate": 9.628188298907782e-05, |
|
"loss": 0.1907, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.038249356381022434, |
|
"grad_norm": 0.3832627832889557, |
|
"learning_rate": 9.592434928729616e-05, |
|
"loss": 0.1698, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.03898492092681133, |
|
"grad_norm": 0.344266414642334, |
|
"learning_rate": 9.555113246230442e-05, |
|
"loss": 0.1614, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.03972048547260022, |
|
"grad_norm": 0.3299565017223358, |
|
"learning_rate": 9.516235996730645e-05, |
|
"loss": 0.1651, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.04045605001838912, |
|
"grad_norm": 0.2757066488265991, |
|
"learning_rate": 9.475816456775313e-05, |
|
"loss": 0.1715, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.041191614564178004, |
|
"grad_norm": 0.26629284024238586, |
|
"learning_rate": 9.43386842960031e-05, |
|
"loss": 0.1632, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.0419271791099669, |
|
"grad_norm": 0.2985883057117462, |
|
"learning_rate": 9.39040624041849e-05, |
|
"loss": 0.177, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.04266274365575579, |
|
"grad_norm": 0.1407419890165329, |
|
"learning_rate": 9.345444731527642e-05, |
|
"loss": 0.1631, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.04339830820154469, |
|
"grad_norm": 0.16616809368133545, |
|
"learning_rate": 9.298999257241863e-05, |
|
"loss": 0.1715, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.04413387274733358, |
|
"grad_norm": 0.18962794542312622, |
|
"learning_rate": 9.251085678648072e-05, |
|
"loss": 0.181, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.04486943729312247, |
|
"grad_norm": 0.17522728443145752, |
|
"learning_rate": 9.201720358189464e-05, |
|
"loss": 0.1759, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.04560500183891136, |
|
"grad_norm": 0.14696826040744781, |
|
"learning_rate": 9.150920154077754e-05, |
|
"loss": 0.1894, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.04634056638470026, |
|
"grad_norm": 0.13528281450271606, |
|
"learning_rate": 9.098702414536107e-05, |
|
"loss": 0.1743, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.04707613093048915, |
|
"grad_norm": 0.13108627498149872, |
|
"learning_rate": 9.045084971874738e-05, |
|
"loss": 0.1666, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.047811695476278046, |
|
"grad_norm": 0.15503785014152527, |
|
"learning_rate": 8.9900861364012e-05, |
|
"loss": 0.1721, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.04854726002206693, |
|
"grad_norm": 0.13304218649864197, |
|
"learning_rate": 8.933724690167417e-05, |
|
"loss": 0.158, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.04928282456785583, |
|
"grad_norm": 0.1355734020471573, |
|
"learning_rate": 8.876019880555649e-05, |
|
"loss": 0.1752, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.05001838911364472, |
|
"grad_norm": 0.1821872442960739, |
|
"learning_rate": 8.816991413705516e-05, |
|
"loss": 0.1667, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.050753953659433616, |
|
"grad_norm": 0.12791278958320618, |
|
"learning_rate": 8.756659447784368e-05, |
|
"loss": 0.1646, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.05148951820522251, |
|
"grad_norm": 0.12836161255836487, |
|
"learning_rate": 8.695044586103296e-05, |
|
"loss": 0.165, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.0522250827510114, |
|
"grad_norm": 0.11118108779191971, |
|
"learning_rate": 8.632167870081121e-05, |
|
"loss": 0.1767, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.05296064729680029, |
|
"grad_norm": 0.18004432320594788, |
|
"learning_rate": 8.568050772058762e-05, |
|
"loss": 0.1536, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.05369621184258919, |
|
"grad_norm": 0.16035598516464233, |
|
"learning_rate": 8.502715187966455e-05, |
|
"loss": 0.1501, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.05443177638837808, |
|
"grad_norm": 0.11390311270952225, |
|
"learning_rate": 8.436183429846313e-05, |
|
"loss": 0.1473, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.055167340934166975, |
|
"grad_norm": 0.11487963795661926, |
|
"learning_rate": 8.368478218232787e-05, |
|
"loss": 0.1371, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.05590290547995587, |
|
"grad_norm": 0.1358136385679245, |
|
"learning_rate": 8.299622674393614e-05, |
|
"loss": 0.1354, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.05663847002574476, |
|
"grad_norm": 0.11609425395727158, |
|
"learning_rate": 8.229640312433937e-05, |
|
"loss": 0.1339, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.05737403457153365, |
|
"grad_norm": 0.10956840217113495, |
|
"learning_rate": 8.158555031266254e-05, |
|
"loss": 0.1329, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.058109599117322545, |
|
"grad_norm": 0.15083730220794678, |
|
"learning_rate": 8.086391106448965e-05, |
|
"loss": 0.1221, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.05884516366311144, |
|
"grad_norm": 0.13061225414276123, |
|
"learning_rate": 8.013173181896283e-05, |
|
"loss": 0.1356, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.059580728208900334, |
|
"grad_norm": 0.09824299067258835, |
|
"learning_rate": 7.938926261462366e-05, |
|
"loss": 0.1425, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.06031629275468922, |
|
"grad_norm": 0.23265309631824493, |
|
"learning_rate": 7.863675700402526e-05, |
|
"loss": 0.1478, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.061051857300478116, |
|
"grad_norm": 0.10225420445203781, |
|
"learning_rate": 7.787447196714427e-05, |
|
"loss": 0.1348, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.06178742184626701, |
|
"grad_norm": 0.14389212429523468, |
|
"learning_rate": 7.710266782362247e-05, |
|
"loss": 0.1449, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.0625229863920559, |
|
"grad_norm": 0.22097352147102356, |
|
"learning_rate": 7.63216081438678e-05, |
|
"loss": 0.1407, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.0632585509378448, |
|
"grad_norm": 0.13521602749824524, |
|
"learning_rate": 7.553155965904535e-05, |
|
"loss": 0.1591, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.06399411548363369, |
|
"grad_norm": 0.09170956909656525, |
|
"learning_rate": 7.473279216998895e-05, |
|
"loss": 0.1366, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.06472968002942259, |
|
"grad_norm": 0.09572591632604599, |
|
"learning_rate": 7.392557845506432e-05, |
|
"loss": 0.1557, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.06546524457521148, |
|
"grad_norm": 0.12399427592754364, |
|
"learning_rate": 7.311019417701566e-05, |
|
"loss": 0.1534, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.06620080912100036, |
|
"grad_norm": 0.09619427472352982, |
|
"learning_rate": 7.228691778882693e-05, |
|
"loss": 0.1561, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.06693637366678926, |
|
"grad_norm": 0.09777440130710602, |
|
"learning_rate": 7.145603043863045e-05, |
|
"loss": 0.1528, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.06767193821257815, |
|
"grad_norm": 0.14338025450706482, |
|
"learning_rate": 7.061781587369519e-05, |
|
"loss": 0.1577, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.06840750275836704, |
|
"grad_norm": 0.08410169184207916, |
|
"learning_rate": 6.977256034352712e-05, |
|
"loss": 0.1427, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.06914306730415594, |
|
"grad_norm": 0.09892772138118744, |
|
"learning_rate": 6.892055250211552e-05, |
|
"loss": 0.1435, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.06987863184994483, |
|
"grad_norm": 0.2183755338191986, |
|
"learning_rate": 6.806208330935766e-05, |
|
"loss": 0.1738, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.07061419639573373, |
|
"grad_norm": 0.14419087767601013, |
|
"learning_rate": 6.719744593169641e-05, |
|
"loss": 0.1714, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.07134976094152262, |
|
"grad_norm": 0.0980491042137146, |
|
"learning_rate": 6.632693564200416e-05, |
|
"loss": 0.0685, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.07208532548731152, |
|
"grad_norm": 0.08917222917079926, |
|
"learning_rate": 6.545084971874738e-05, |
|
"loss": 0.1289, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.07282089003310041, |
|
"grad_norm": 0.1148543581366539, |
|
"learning_rate": 6.456948734446624e-05, |
|
"loss": 0.1919, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.07355645457888929, |
|
"grad_norm": 0.1381547749042511, |
|
"learning_rate": 6.368314950360415e-05, |
|
"loss": 0.1883, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.07355645457888929, |
|
"eval_loss": 0.1586042046546936, |
|
"eval_runtime": 193.0598, |
|
"eval_samples_per_second": 11.862, |
|
"eval_steps_per_second": 5.931, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.07429201912467819, |
|
"grad_norm": 0.48300158977508545, |
|
"learning_rate": 6.279213887972179e-05, |
|
"loss": 0.1455, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.07502758367046708, |
|
"grad_norm": 0.13084329664707184, |
|
"learning_rate": 6.189675975213094e-05, |
|
"loss": 0.1361, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.07576314821625597, |
|
"grad_norm": 0.11135324835777283, |
|
"learning_rate": 6.099731789198344e-05, |
|
"loss": 0.1476, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.07649871276204487, |
|
"grad_norm": 0.17674945294857025, |
|
"learning_rate": 6.009412045785051e-05, |
|
"loss": 0.1504, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.07723427730783376, |
|
"grad_norm": 0.31758978962898254, |
|
"learning_rate": 5.918747589082853e-05, |
|
"loss": 0.1508, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.07796984185362266, |
|
"grad_norm": 0.14566396176815033, |
|
"learning_rate": 5.82776938092065e-05, |
|
"loss": 0.1425, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.07870540639941155, |
|
"grad_norm": 0.18799343705177307, |
|
"learning_rate": 5.736508490273188e-05, |
|
"loss": 0.1478, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.07944097094520045, |
|
"grad_norm": 0.14357222616672516, |
|
"learning_rate": 5.644996082651017e-05, |
|
"loss": 0.1617, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.08017653549098934, |
|
"grad_norm": 0.1759078949689865, |
|
"learning_rate": 5.553263409457504e-05, |
|
"loss": 0.1652, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.08091210003677823, |
|
"grad_norm": 0.13642680644989014, |
|
"learning_rate": 5.4613417973165106e-05, |
|
"loss": 0.158, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.08164766458256711, |
|
"grad_norm": 0.0958728939294815, |
|
"learning_rate": 5.3692626373743706e-05, |
|
"loss": 0.1736, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.08238322912835601, |
|
"grad_norm": 0.10906694084405899, |
|
"learning_rate": 5.27705737457985e-05, |
|
"loss": 0.1725, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.0831187936741449, |
|
"grad_norm": 0.09963790327310562, |
|
"learning_rate": 5.184757496945726e-05, |
|
"loss": 0.1853, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.0838543582199338, |
|
"grad_norm": 0.10227838158607483, |
|
"learning_rate": 5.092394524795649e-05, |
|
"loss": 0.1506, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.08458992276572269, |
|
"grad_norm": 0.11423751711845398, |
|
"learning_rate": 5e-05, |
|
"loss": 0.1647, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.08532548731151159, |
|
"grad_norm": 0.1167915016412735, |
|
"learning_rate": 4.907605475204352e-05, |
|
"loss": 0.1701, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.08606105185730048, |
|
"grad_norm": 0.10179633647203445, |
|
"learning_rate": 4.8152425030542766e-05, |
|
"loss": 0.1784, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.08679661640308937, |
|
"grad_norm": 0.07918064296245575, |
|
"learning_rate": 4.72294262542015e-05, |
|
"loss": 0.1655, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.08753218094887827, |
|
"grad_norm": 0.09195617586374283, |
|
"learning_rate": 4.6307373626256306e-05, |
|
"loss": 0.1673, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.08826774549466716, |
|
"grad_norm": 0.08825042843818665, |
|
"learning_rate": 4.5386582026834906e-05, |
|
"loss": 0.1792, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.08900331004045604, |
|
"grad_norm": 0.10082146525382996, |
|
"learning_rate": 4.446736590542497e-05, |
|
"loss": 0.1701, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.08973887458624494, |
|
"grad_norm": 0.09268957376480103, |
|
"learning_rate": 4.3550039173489845e-05, |
|
"loss": 0.1681, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.09047443913203383, |
|
"grad_norm": 0.18448396027088165, |
|
"learning_rate": 4.2634915097268115e-05, |
|
"loss": 0.159, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.09121000367782273, |
|
"grad_norm": 0.08122457563877106, |
|
"learning_rate": 4.1722306190793495e-05, |
|
"loss": 0.152, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.09194556822361162, |
|
"grad_norm": 0.11715678125619888, |
|
"learning_rate": 4.0812524109171476e-05, |
|
"loss": 0.1563, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.09268113276940051, |
|
"grad_norm": 0.06476876884698868, |
|
"learning_rate": 3.99058795421495e-05, |
|
"loss": 0.1444, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.09341669731518941, |
|
"grad_norm": 0.08869259804487228, |
|
"learning_rate": 3.9002682108016585e-05, |
|
"loss": 0.141, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.0941522618609783, |
|
"grad_norm": 0.09998400509357452, |
|
"learning_rate": 3.8103240247869075e-05, |
|
"loss": 0.1466, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.0948878264067672, |
|
"grad_norm": 0.07661914825439453, |
|
"learning_rate": 3.720786112027822e-05, |
|
"loss": 0.1294, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.09562339095255609, |
|
"grad_norm": 0.07216975837945938, |
|
"learning_rate": 3.631685049639586e-05, |
|
"loss": 0.1293, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.09635895549834499, |
|
"grad_norm": 0.09876257926225662, |
|
"learning_rate": 3.543051265553377e-05, |
|
"loss": 0.153, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.09709452004413387, |
|
"grad_norm": 0.08432336151599884, |
|
"learning_rate": 3.4549150281252636e-05, |
|
"loss": 0.1561, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.09783008458992276, |
|
"grad_norm": 0.15363752841949463, |
|
"learning_rate": 3.367306435799584e-05, |
|
"loss": 0.1451, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.09856564913571166, |
|
"grad_norm": 0.0780152752995491, |
|
"learning_rate": 3.2802554068303596e-05, |
|
"loss": 0.1562, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.09930121368150055, |
|
"grad_norm": 0.07892629504203796, |
|
"learning_rate": 3.1937916690642356e-05, |
|
"loss": 0.1575, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.10003677822728944, |
|
"grad_norm": 0.06880857050418854, |
|
"learning_rate": 3.107944749788449e-05, |
|
"loss": 0.1461, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.10077234277307834, |
|
"grad_norm": 0.11363841593265533, |
|
"learning_rate": 3.0227439656472877e-05, |
|
"loss": 0.163, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.10150790731886723, |
|
"grad_norm": 0.05595669150352478, |
|
"learning_rate": 2.9382184126304834e-05, |
|
"loss": 0.1535, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.10224347186465613, |
|
"grad_norm": 0.054943084716796875, |
|
"learning_rate": 2.8543969561369556e-05, |
|
"loss": 0.1232, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.10297903641044502, |
|
"grad_norm": 0.07336664199829102, |
|
"learning_rate": 2.771308221117309e-05, |
|
"loss": 0.1348, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.10371460095623392, |
|
"grad_norm": 0.044373560696840286, |
|
"learning_rate": 2.688980582298435e-05, |
|
"loss": 0.1183, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.1044501655020228, |
|
"grad_norm": 0.0573909729719162, |
|
"learning_rate": 2.607442154493568e-05, |
|
"loss": 0.1536, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.10518573004781169, |
|
"grad_norm": 0.06830648332834244, |
|
"learning_rate": 2.5267207830011068e-05, |
|
"loss": 0.1489, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.10592129459360058, |
|
"grad_norm": 0.06195629760622978, |
|
"learning_rate": 2.446844034095466e-05, |
|
"loss": 0.1512, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.10665685913938948, |
|
"grad_norm": 0.06268982589244843, |
|
"learning_rate": 2.3678391856132204e-05, |
|
"loss": 0.1738, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.10739242368517837, |
|
"grad_norm": 0.08042903244495392, |
|
"learning_rate": 2.2897332176377528e-05, |
|
"loss": 0.1464, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.10812798823096727, |
|
"grad_norm": 0.07065901905298233, |
|
"learning_rate": 2.2125528032855724e-05, |
|
"loss": 0.1503, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.10886355277675616, |
|
"grad_norm": 0.04282858222723007, |
|
"learning_rate": 2.136324299597474e-05, |
|
"loss": 0.113, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.10959911732254506, |
|
"grad_norm": 0.07070647180080414, |
|
"learning_rate": 2.061073738537635e-05, |
|
"loss": 0.1903, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.11033468186833395, |
|
"grad_norm": 0.2172415852546692, |
|
"learning_rate": 1.9868268181037185e-05, |
|
"loss": 0.1915, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.11033468186833395, |
|
"eval_loss": 0.1532428115606308, |
|
"eval_runtime": 192.9593, |
|
"eval_samples_per_second": 11.868, |
|
"eval_steps_per_second": 5.934, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.11107024641412284, |
|
"grad_norm": 0.12797152996063232, |
|
"learning_rate": 1.9136088935510362e-05, |
|
"loss": 0.1411, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.11180581095991174, |
|
"grad_norm": 0.1103443130850792, |
|
"learning_rate": 1.8414449687337464e-05, |
|
"loss": 0.1444, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.11254137550570062, |
|
"grad_norm": 0.08198443800210953, |
|
"learning_rate": 1.7703596875660645e-05, |
|
"loss": 0.1422, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.11327694005148951, |
|
"grad_norm": 0.10905614495277405, |
|
"learning_rate": 1.700377325606388e-05, |
|
"loss": 0.1533, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.11401250459727841, |
|
"grad_norm": 0.13996587693691254, |
|
"learning_rate": 1.631521781767214e-05, |
|
"loss": 0.1418, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.1147480691430673, |
|
"grad_norm": 0.07795163244009018, |
|
"learning_rate": 1.5638165701536868e-05, |
|
"loss": 0.1514, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.1154836336888562, |
|
"grad_norm": 0.07931917160749435, |
|
"learning_rate": 1.4972848120335453e-05, |
|
"loss": 0.1464, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.11621919823464509, |
|
"grad_norm": 0.0686430111527443, |
|
"learning_rate": 1.4319492279412388e-05, |
|
"loss": 0.1472, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.11695476278043399, |
|
"grad_norm": 0.083328977227211, |
|
"learning_rate": 1.3678321299188801e-05, |
|
"loss": 0.1532, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.11769032732622288, |
|
"grad_norm": 0.09695445746183395, |
|
"learning_rate": 1.3049554138967051e-05, |
|
"loss": 0.1645, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.11842589187201177, |
|
"grad_norm": 0.1066075786948204, |
|
"learning_rate": 1.2433405522156332e-05, |
|
"loss": 0.1596, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.11916145641780067, |
|
"grad_norm": 0.08189854025840759, |
|
"learning_rate": 1.183008586294485e-05, |
|
"loss": 0.1734, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.11989702096358955, |
|
"grad_norm": 0.06908301264047623, |
|
"learning_rate": 1.1239801194443506e-05, |
|
"loss": 0.1726, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.12063258550937844, |
|
"grad_norm": 0.08635183423757553, |
|
"learning_rate": 1.066275309832584e-05, |
|
"loss": 0.1892, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.12136815005516734, |
|
"grad_norm": 0.1024232879281044, |
|
"learning_rate": 1.0099138635988026e-05, |
|
"loss": 0.174, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.12210371460095623, |
|
"grad_norm": 0.09103770554065704, |
|
"learning_rate": 9.549150281252633e-06, |
|
"loss": 0.1606, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.12283927914674513, |
|
"grad_norm": 0.07270883768796921, |
|
"learning_rate": 9.012975854638949e-06, |
|
"loss": 0.1572, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.12357484369253402, |
|
"grad_norm": 0.0831652358174324, |
|
"learning_rate": 8.490798459222476e-06, |
|
"loss": 0.172, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.12431040823832291, |
|
"grad_norm": 0.11702089756727219, |
|
"learning_rate": 7.982796418105371e-06, |
|
"loss": 0.1667, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.1250459727841118, |
|
"grad_norm": 0.08802536129951477, |
|
"learning_rate": 7.489143213519301e-06, |
|
"loss": 0.1639, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.1257815373299007, |
|
"grad_norm": 0.08707013726234436, |
|
"learning_rate": 7.010007427581378e-06, |
|
"loss": 0.1817, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.1265171018756896, |
|
"grad_norm": 0.0758250504732132, |
|
"learning_rate": 6.5455526847235825e-06, |
|
"loss": 0.1584, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.12725266642147848, |
|
"grad_norm": 0.08350931107997894, |
|
"learning_rate": 6.0959375958151045e-06, |
|
"loss": 0.1696, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.12798823096726739, |
|
"grad_norm": 0.07942811399698257, |
|
"learning_rate": 5.6613157039969055e-06, |
|
"loss": 0.1441, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.12872379551305627, |
|
"grad_norm": 0.062362104654312134, |
|
"learning_rate": 5.241835432246889e-06, |
|
"loss": 0.1323, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.12945936005884517, |
|
"grad_norm": 0.07514508813619614, |
|
"learning_rate": 4.837640032693558e-06, |
|
"loss": 0.1618, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.13019492460463405, |
|
"grad_norm": 0.07202325016260147, |
|
"learning_rate": 4.448867537695578e-06, |
|
"loss": 0.1222, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.13093048915042296, |
|
"grad_norm": 0.0819399282336235, |
|
"learning_rate": 4.075650712703849e-06, |
|
"loss": 0.1395, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.13166605369621184, |
|
"grad_norm": 0.092143714427948, |
|
"learning_rate": 3.71811701092219e-06, |
|
"loss": 0.1463, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.13240161824200072, |
|
"grad_norm": 0.0711476281285286, |
|
"learning_rate": 3.376388529782215e-06, |
|
"loss": 0.1356, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.13313718278778963, |
|
"grad_norm": 0.06504477560520172, |
|
"learning_rate": 3.0505819692471792e-06, |
|
"loss": 0.1335, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.1338727473335785, |
|
"grad_norm": 0.07897264510393143, |
|
"learning_rate": 2.7408085919590264e-06, |
|
"loss": 0.1623, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.13460831187936742, |
|
"grad_norm": 0.08557404577732086, |
|
"learning_rate": 2.4471741852423237e-06, |
|
"loss": 0.1515, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.1353438764251563, |
|
"grad_norm": 0.06471476703882217, |
|
"learning_rate": 2.1697790249779636e-06, |
|
"loss": 0.134, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.1360794409709452, |
|
"grad_norm": 0.07636171579360962, |
|
"learning_rate": 1.908717841359048e-06, |
|
"loss": 0.1438, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.1368150055167341, |
|
"grad_norm": 0.05042177066206932, |
|
"learning_rate": 1.6640797865406288e-06, |
|
"loss": 0.1344, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.137550570062523, |
|
"grad_norm": 0.05630411207675934, |
|
"learning_rate": 1.4359484041943038e-06, |
|
"loss": 0.1498, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.13828613460831188, |
|
"grad_norm": 0.04595191031694412, |
|
"learning_rate": 1.2244016009781701e-06, |
|
"loss": 0.1455, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.13902169915410076, |
|
"grad_norm": 0.07169954478740692, |
|
"learning_rate": 1.0295116199317057e-06, |
|
"loss": 0.1542, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.13975726369988967, |
|
"grad_norm": 0.05570828542113304, |
|
"learning_rate": 8.513450158049108e-07, |
|
"loss": 0.1326, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.14049282824567855, |
|
"grad_norm": 0.09222900122404099, |
|
"learning_rate": 6.899626323298713e-07, |
|
"loss": 0.1656, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.14122839279146746, |
|
"grad_norm": 0.09059873968362808, |
|
"learning_rate": 5.454195814427021e-07, |
|
"loss": 0.1732, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.14196395733725634, |
|
"grad_norm": 0.11539854854345322, |
|
"learning_rate": 4.177652244628627e-07, |
|
"loss": 0.157, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.14269952188304524, |
|
"grad_norm": 0.08133802562952042, |
|
"learning_rate": 3.0704315523631953e-07, |
|
"loss": 0.1733, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.14343508642883412, |
|
"grad_norm": 0.0870978981256485, |
|
"learning_rate": 2.1329118524827662e-07, |
|
"loss": 0.1564, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.14417065097462303, |
|
"grad_norm": 0.07082478702068329, |
|
"learning_rate": 1.3654133071059893e-07, |
|
"loss": 0.1379, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.1449062155204119, |
|
"grad_norm": 0.11655426025390625, |
|
"learning_rate": 7.681980162830282e-08, |
|
"loss": 0.1102, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.14564178006620082, |
|
"grad_norm": 0.07025150209665298, |
|
"learning_rate": 3.4146992848854695e-08, |
|
"loss": 0.1203, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.1463773446119897, |
|
"grad_norm": 0.08077063411474228, |
|
"learning_rate": 8.537477097364522e-09, |
|
"loss": 0.1903, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.14711290915777858, |
|
"grad_norm": 0.10279268771409988, |
|
"learning_rate": 0.0, |
|
"loss": 0.194, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.14711290915777858, |
|
"eval_loss": 0.15221890807151794, |
|
"eval_runtime": 193.1181, |
|
"eval_samples_per_second": 11.858, |
|
"eval_steps_per_second": 5.929, |
|
"step": 200 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 200, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 3.0321122932791706e+17, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|