|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 10.0, |
|
"eval_steps": 500, |
|
"global_step": 226530, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.98896393413676e-05, |
|
"loss": 7.855, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.977927868273518e-05, |
|
"loss": 6.9826, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.9668918024102774e-05, |
|
"loss": 6.6355, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.955855736547036e-05, |
|
"loss": 6.3627, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.944819670683795e-05, |
|
"loss": 6.2169, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.933783604820554e-05, |
|
"loss": 6.0647, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.922747538957313e-05, |
|
"loss": 5.9159, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.9117114730940715e-05, |
|
"loss": 5.7711, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.900675407230831e-05, |
|
"loss": 5.6904, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.88963934136759e-05, |
|
"loss": 5.571, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.8786032755043486e-05, |
|
"loss": 5.49, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.8675672096411075e-05, |
|
"loss": 5.3809, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.856531143777866e-05, |
|
"loss": 5.3067, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.845495077914625e-05, |
|
"loss": 5.2368, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.8344590120513846e-05, |
|
"loss": 5.1583, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.823422946188143e-05, |
|
"loss": 5.1042, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.812386880324902e-05, |
|
"loss": 5.0425, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.801350814461661e-05, |
|
"loss": 5.0018, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.79031474859842e-05, |
|
"loss": 4.9537, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.779278682735179e-05, |
|
"loss": 4.9003, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.768242616871938e-05, |
|
"loss": 4.8523, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.7572065510086963e-05, |
|
"loss": 4.8387, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.746170485145456e-05, |
|
"loss": 4.7748, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.735134419282215e-05, |
|
"loss": 4.7472, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.7240983534189735e-05, |
|
"loss": 4.722, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.713062287555732e-05, |
|
"loss": 4.6713, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.702026221692492e-05, |
|
"loss": 4.6363, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 4.69099015582925e-05, |
|
"loss": 4.6174, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 4.6799540899660094e-05, |
|
"loss": 4.593, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.668918024102768e-05, |
|
"loss": 4.5752, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.657881958239527e-05, |
|
"loss": 4.5604, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.646845892376286e-05, |
|
"loss": 4.5126, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 4.6358098265130454e-05, |
|
"loss": 4.4943, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 4.6247737606498036e-05, |
|
"loss": 4.4695, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 4.613737694786563e-05, |
|
"loss": 4.4434, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.602701628923322e-05, |
|
"loss": 4.4365, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 4.591665563060081e-05, |
|
"loss": 4.3896, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 4.5806294971968395e-05, |
|
"loss": 4.3728, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 4.569593431333598e-05, |
|
"loss": 4.348, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 4.558557365470357e-05, |
|
"loss": 4.3362, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.5475212996071167e-05, |
|
"loss": 4.2904, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 4.536485233743875e-05, |
|
"loss": 4.2728, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 4.525449167880634e-05, |
|
"loss": 4.2577, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.514413102017393e-05, |
|
"loss": 4.2348, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 4.503377036154152e-05, |
|
"loss": 4.2174, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.492340970290911e-05, |
|
"loss": 4.1921, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 4.48130490442767e-05, |
|
"loss": 4.1695, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 4.4702688385644284e-05, |
|
"loss": 4.1917, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 4.459232772701188e-05, |
|
"loss": 4.143, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 4.448196706837947e-05, |
|
"loss": 4.1317, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 4.4371606409747055e-05, |
|
"loss": 4.1043, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 4.4261245751114644e-05, |
|
"loss": 4.0883, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 4.415088509248224e-05, |
|
"loss": 4.0739, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 4.404052443384982e-05, |
|
"loss": 4.0767, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 4.3930163775217415e-05, |
|
"loss": 4.0529, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 4.3819803116585e-05, |
|
"loss": 4.041, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 4.370944245795259e-05, |
|
"loss": 4.0143, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 4.359908179932018e-05, |
|
"loss": 3.9921, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 4.3488721140687775e-05, |
|
"loss": 3.9872, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 4.3378360482055356e-05, |
|
"loss": 3.9652, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 4.326799982342295e-05, |
|
"loss": 3.9583, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 4.315763916479054e-05, |
|
"loss": 3.9451, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 4.304727850615813e-05, |
|
"loss": 3.9318, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 4.2936917847525716e-05, |
|
"loss": 3.9023, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 4.2826557188893304e-05, |
|
"loss": 3.8742, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 4.271619653026089e-05, |
|
"loss": 3.8979, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 4.260583587162849e-05, |
|
"loss": 3.8582, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 4.2495475212996075e-05, |
|
"loss": 3.8363, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 4.2385114554363664e-05, |
|
"loss": 3.826, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 4.227475389573125e-05, |
|
"loss": 3.824, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 4.216439323709884e-05, |
|
"loss": 3.7984, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 4.205403257846643e-05, |
|
"loss": 3.7817, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 4.194367191983402e-05, |
|
"loss": 3.7696, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 4.1833311261201605e-05, |
|
"loss": 3.7553, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 4.17229506025692e-05, |
|
"loss": 3.7469, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 4.161258994393679e-05, |
|
"loss": 3.732, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 4.1502229285304376e-05, |
|
"loss": 3.7407, |
|
"step": 38500 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 4.1391868626671964e-05, |
|
"loss": 3.7176, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 4.128150796803956e-05, |
|
"loss": 3.71, |
|
"step": 39500 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 4.117114730940714e-05, |
|
"loss": 3.709, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 4.1060786650774736e-05, |
|
"loss": 3.677, |
|
"step": 40500 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 4.0950425992142324e-05, |
|
"loss": 3.6671, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 4.084006533350991e-05, |
|
"loss": 3.6457, |
|
"step": 41500 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 4.07297046748775e-05, |
|
"loss": 3.6528, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 4.0619344016245095e-05, |
|
"loss": 3.6485, |
|
"step": 42500 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 4.050898335761268e-05, |
|
"loss": 3.6456, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 4.039862269898027e-05, |
|
"loss": 3.6146, |
|
"step": 43500 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 4.028826204034786e-05, |
|
"loss": 3.6269, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 4.017790138171545e-05, |
|
"loss": 3.614, |
|
"step": 44500 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 4.0067540723083036e-05, |
|
"loss": 3.5848, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 3.9957180064450625e-05, |
|
"loss": 3.5802, |
|
"step": 45500 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 3.984681940581821e-05, |
|
"loss": 3.5824, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 3.973645874718581e-05, |
|
"loss": 3.5543, |
|
"step": 46500 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 3.9626098088553396e-05, |
|
"loss": 3.5592, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 3.9515737429920984e-05, |
|
"loss": 3.5176, |
|
"step": 47500 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 3.940537677128857e-05, |
|
"loss": 3.5316, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 3.929501611265616e-05, |
|
"loss": 3.5072, |
|
"step": 48500 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 3.918465545402375e-05, |
|
"loss": 3.5152, |
|
"step": 49000 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 3.9074294795391344e-05, |
|
"loss": 3.4943, |
|
"step": 49500 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 3.8963934136758925e-05, |
|
"loss": 3.505, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 3.885357347812652e-05, |
|
"loss": 3.4826, |
|
"step": 50500 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 3.874321281949411e-05, |
|
"loss": 3.4639, |
|
"step": 51000 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 3.86328521608617e-05, |
|
"loss": 3.4813, |
|
"step": 51500 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 3.8522491502229285e-05, |
|
"loss": 3.4717, |
|
"step": 52000 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 3.841213084359688e-05, |
|
"loss": 3.4705, |
|
"step": 52500 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 3.830177018496446e-05, |
|
"loss": 3.4374, |
|
"step": 53000 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 3.8191409526332056e-05, |
|
"loss": 3.4321, |
|
"step": 53500 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 3.8081048867699645e-05, |
|
"loss": 3.4489, |
|
"step": 54000 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 3.797068820906723e-05, |
|
"loss": 3.4124, |
|
"step": 54500 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 3.786032755043482e-05, |
|
"loss": 3.4061, |
|
"step": 55000 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 3.7749966891802416e-05, |
|
"loss": 3.4028, |
|
"step": 55500 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 3.763960623317e-05, |
|
"loss": 3.3825, |
|
"step": 56000 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 3.752924557453759e-05, |
|
"loss": 3.3922, |
|
"step": 56500 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 3.741888491590518e-05, |
|
"loss": 3.3839, |
|
"step": 57000 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 3.730852425727277e-05, |
|
"loss": 3.3888, |
|
"step": 57500 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 3.719816359864036e-05, |
|
"loss": 3.3722, |
|
"step": 58000 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 3.7087802940007945e-05, |
|
"loss": 3.3507, |
|
"step": 58500 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 3.6977442281375534e-05, |
|
"loss": 3.3475, |
|
"step": 59000 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 3.686708162274313e-05, |
|
"loss": 3.3432, |
|
"step": 59500 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 3.675672096411072e-05, |
|
"loss": 3.3564, |
|
"step": 60000 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 3.6646360305478305e-05, |
|
"loss": 3.331, |
|
"step": 60500 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 3.653599964684589e-05, |
|
"loss": 3.3267, |
|
"step": 61000 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 3.642563898821348e-05, |
|
"loss": 3.3173, |
|
"step": 61500 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 3.631527832958107e-05, |
|
"loss": 3.2926, |
|
"step": 62000 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 3.6204917670948665e-05, |
|
"loss": 3.3027, |
|
"step": 62500 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 3.6094557012316246e-05, |
|
"loss": 3.2966, |
|
"step": 63000 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 3.598419635368384e-05, |
|
"loss": 3.2973, |
|
"step": 63500 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 3.587383569505143e-05, |
|
"loss": 3.2974, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 3.576347503641902e-05, |
|
"loss": 3.3035, |
|
"step": 64500 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 3.5653114377786606e-05, |
|
"loss": 3.297, |
|
"step": 65000 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 3.55427537191542e-05, |
|
"loss": 3.2761, |
|
"step": 65500 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 3.543239306052178e-05, |
|
"loss": 3.2871, |
|
"step": 66000 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 3.532203240188938e-05, |
|
"loss": 3.2621, |
|
"step": 66500 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 3.5211671743256965e-05, |
|
"loss": 3.2646, |
|
"step": 67000 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 3.5101311084624553e-05, |
|
"loss": 3.2538, |
|
"step": 67500 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 3.499095042599214e-05, |
|
"loss": 3.247, |
|
"step": 68000 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 3.488058976735974e-05, |
|
"loss": 3.2398, |
|
"step": 68500 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"learning_rate": 3.477022910872732e-05, |
|
"loss": 3.219, |
|
"step": 69000 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"learning_rate": 3.465986845009491e-05, |
|
"loss": 3.2208, |
|
"step": 69500 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"learning_rate": 3.45495077914625e-05, |
|
"loss": 3.2071, |
|
"step": 70000 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"learning_rate": 3.443914713283009e-05, |
|
"loss": 3.2269, |
|
"step": 70500 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"learning_rate": 3.432878647419768e-05, |
|
"loss": 3.218, |
|
"step": 71000 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"learning_rate": 3.4218425815565266e-05, |
|
"loss": 3.2206, |
|
"step": 71500 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"learning_rate": 3.4108065156932854e-05, |
|
"loss": 3.1963, |
|
"step": 72000 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"learning_rate": 3.399770449830045e-05, |
|
"loss": 3.1893, |
|
"step": 72500 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"learning_rate": 3.388734383966804e-05, |
|
"loss": 3.1861, |
|
"step": 73000 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"learning_rate": 3.3776983181035626e-05, |
|
"loss": 3.1908, |
|
"step": 73500 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"learning_rate": 3.3666622522403214e-05, |
|
"loss": 3.1648, |
|
"step": 74000 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"learning_rate": 3.35562618637708e-05, |
|
"loss": 3.1668, |
|
"step": 74500 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"learning_rate": 3.344590120513839e-05, |
|
"loss": 3.1649, |
|
"step": 75000 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"learning_rate": 3.3335540546505985e-05, |
|
"loss": 3.1564, |
|
"step": 75500 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"learning_rate": 3.322517988787357e-05, |
|
"loss": 3.1445, |
|
"step": 76000 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"learning_rate": 3.311481922924116e-05, |
|
"loss": 3.1372, |
|
"step": 76500 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"learning_rate": 3.300445857060875e-05, |
|
"loss": 3.1581, |
|
"step": 77000 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"learning_rate": 3.289409791197634e-05, |
|
"loss": 3.1479, |
|
"step": 77500 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"learning_rate": 3.2783737253343926e-05, |
|
"loss": 3.1489, |
|
"step": 78000 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"learning_rate": 3.267337659471152e-05, |
|
"loss": 3.1375, |
|
"step": 78500 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"learning_rate": 3.25630159360791e-05, |
|
"loss": 3.1397, |
|
"step": 79000 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"learning_rate": 3.24526552774467e-05, |
|
"loss": 3.1281, |
|
"step": 79500 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"learning_rate": 3.2342294618814286e-05, |
|
"loss": 3.1195, |
|
"step": 80000 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"learning_rate": 3.2231933960181874e-05, |
|
"loss": 3.1139, |
|
"step": 80500 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"learning_rate": 3.212157330154946e-05, |
|
"loss": 3.1121, |
|
"step": 81000 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"learning_rate": 3.201121264291706e-05, |
|
"loss": 3.1138, |
|
"step": 81500 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"learning_rate": 3.1900851984284646e-05, |
|
"loss": 3.0999, |
|
"step": 82000 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"learning_rate": 3.1790491325652234e-05, |
|
"loss": 3.1108, |
|
"step": 82500 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"learning_rate": 3.168013066701982e-05, |
|
"loss": 3.0985, |
|
"step": 83000 |
|
}, |
|
{ |
|
"epoch": 3.69, |
|
"learning_rate": 3.156977000838741e-05, |
|
"loss": 3.0951, |
|
"step": 83500 |
|
}, |
|
{ |
|
"epoch": 3.71, |
|
"learning_rate": 3.1459409349755005e-05, |
|
"loss": 3.1011, |
|
"step": 84000 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"learning_rate": 3.134904869112259e-05, |
|
"loss": 3.0798, |
|
"step": 84500 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"learning_rate": 3.123868803249018e-05, |
|
"loss": 3.0822, |
|
"step": 85000 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"learning_rate": 3.112832737385777e-05, |
|
"loss": 3.088, |
|
"step": 85500 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"learning_rate": 3.101796671522536e-05, |
|
"loss": 3.0756, |
|
"step": 86000 |
|
}, |
|
{ |
|
"epoch": 3.82, |
|
"learning_rate": 3.0907606056592946e-05, |
|
"loss": 3.0744, |
|
"step": 86500 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"learning_rate": 3.079724539796054e-05, |
|
"loss": 3.0834, |
|
"step": 87000 |
|
}, |
|
{ |
|
"epoch": 3.86, |
|
"learning_rate": 3.068688473932812e-05, |
|
"loss": 3.0687, |
|
"step": 87500 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"learning_rate": 3.057652408069572e-05, |
|
"loss": 3.0488, |
|
"step": 88000 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"learning_rate": 3.0466163422063302e-05, |
|
"loss": 3.0629, |
|
"step": 88500 |
|
}, |
|
{ |
|
"epoch": 3.93, |
|
"learning_rate": 3.0355802763430897e-05, |
|
"loss": 3.0516, |
|
"step": 89000 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"learning_rate": 3.0245442104798482e-05, |
|
"loss": 3.0653, |
|
"step": 89500 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"learning_rate": 3.0135081446166074e-05, |
|
"loss": 3.0395, |
|
"step": 90000 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"learning_rate": 3.0024720787533662e-05, |
|
"loss": 3.0313, |
|
"step": 90500 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"learning_rate": 2.9914360128901254e-05, |
|
"loss": 3.024, |
|
"step": 91000 |
|
}, |
|
{ |
|
"epoch": 4.04, |
|
"learning_rate": 2.980399947026884e-05, |
|
"loss": 3.0226, |
|
"step": 91500 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"learning_rate": 2.969363881163643e-05, |
|
"loss": 3.0257, |
|
"step": 92000 |
|
}, |
|
{ |
|
"epoch": 4.08, |
|
"learning_rate": 2.958327815300402e-05, |
|
"loss": 3.0177, |
|
"step": 92500 |
|
}, |
|
{ |
|
"epoch": 4.11, |
|
"learning_rate": 2.947291749437161e-05, |
|
"loss": 3.011, |
|
"step": 93000 |
|
}, |
|
{ |
|
"epoch": 4.13, |
|
"learning_rate": 2.9362556835739198e-05, |
|
"loss": 3.0119, |
|
"step": 93500 |
|
}, |
|
{ |
|
"epoch": 4.15, |
|
"learning_rate": 2.925219617710679e-05, |
|
"loss": 2.9969, |
|
"step": 94000 |
|
}, |
|
{ |
|
"epoch": 4.17, |
|
"learning_rate": 2.9141835518474375e-05, |
|
"loss": 3.018, |
|
"step": 94500 |
|
}, |
|
{ |
|
"epoch": 4.19, |
|
"learning_rate": 2.9031474859841966e-05, |
|
"loss": 3.0119, |
|
"step": 95000 |
|
}, |
|
{ |
|
"epoch": 4.22, |
|
"learning_rate": 2.8921114201209554e-05, |
|
"loss": 3.0031, |
|
"step": 95500 |
|
}, |
|
{ |
|
"epoch": 4.24, |
|
"learning_rate": 2.8810753542577146e-05, |
|
"loss": 3.0037, |
|
"step": 96000 |
|
}, |
|
{ |
|
"epoch": 4.26, |
|
"learning_rate": 2.870039288394473e-05, |
|
"loss": 2.9866, |
|
"step": 96500 |
|
}, |
|
{ |
|
"epoch": 4.28, |
|
"learning_rate": 2.8590032225312326e-05, |
|
"loss": 3.0029, |
|
"step": 97000 |
|
}, |
|
{ |
|
"epoch": 4.3, |
|
"learning_rate": 2.847967156667991e-05, |
|
"loss": 3.0032, |
|
"step": 97500 |
|
}, |
|
{ |
|
"epoch": 4.33, |
|
"learning_rate": 2.8369310908047502e-05, |
|
"loss": 2.9706, |
|
"step": 98000 |
|
}, |
|
{ |
|
"epoch": 4.35, |
|
"learning_rate": 2.825895024941509e-05, |
|
"loss": 2.9844, |
|
"step": 98500 |
|
}, |
|
{ |
|
"epoch": 4.37, |
|
"learning_rate": 2.8148589590782682e-05, |
|
"loss": 2.9758, |
|
"step": 99000 |
|
}, |
|
{ |
|
"epoch": 4.39, |
|
"learning_rate": 2.8038228932150267e-05, |
|
"loss": 2.9609, |
|
"step": 99500 |
|
}, |
|
{ |
|
"epoch": 4.41, |
|
"learning_rate": 2.792786827351786e-05, |
|
"loss": 2.9714, |
|
"step": 100000 |
|
}, |
|
{ |
|
"epoch": 4.44, |
|
"learning_rate": 2.7817507614885447e-05, |
|
"loss": 2.9702, |
|
"step": 100500 |
|
}, |
|
{ |
|
"epoch": 4.46, |
|
"learning_rate": 2.7707146956253038e-05, |
|
"loss": 2.9755, |
|
"step": 101000 |
|
}, |
|
{ |
|
"epoch": 4.48, |
|
"learning_rate": 2.7596786297620626e-05, |
|
"loss": 2.9457, |
|
"step": 101500 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"learning_rate": 2.7486425638988218e-05, |
|
"loss": 2.9575, |
|
"step": 102000 |
|
}, |
|
{ |
|
"epoch": 4.52, |
|
"learning_rate": 2.7376064980355803e-05, |
|
"loss": 2.9565, |
|
"step": 102500 |
|
}, |
|
{ |
|
"epoch": 4.55, |
|
"learning_rate": 2.7265704321723395e-05, |
|
"loss": 2.9376, |
|
"step": 103000 |
|
}, |
|
{ |
|
"epoch": 4.57, |
|
"learning_rate": 2.7155343663090983e-05, |
|
"loss": 2.9508, |
|
"step": 103500 |
|
}, |
|
{ |
|
"epoch": 4.59, |
|
"learning_rate": 2.7044983004458574e-05, |
|
"loss": 2.9427, |
|
"step": 104000 |
|
}, |
|
{ |
|
"epoch": 4.61, |
|
"learning_rate": 2.693462234582616e-05, |
|
"loss": 2.9427, |
|
"step": 104500 |
|
}, |
|
{ |
|
"epoch": 4.64, |
|
"learning_rate": 2.682426168719375e-05, |
|
"loss": 2.9428, |
|
"step": 105000 |
|
}, |
|
{ |
|
"epoch": 4.66, |
|
"learning_rate": 2.671390102856134e-05, |
|
"loss": 2.9441, |
|
"step": 105500 |
|
}, |
|
{ |
|
"epoch": 4.68, |
|
"learning_rate": 2.660354036992893e-05, |
|
"loss": 2.9409, |
|
"step": 106000 |
|
}, |
|
{ |
|
"epoch": 4.7, |
|
"learning_rate": 2.649317971129652e-05, |
|
"loss": 2.9254, |
|
"step": 106500 |
|
}, |
|
{ |
|
"epoch": 4.72, |
|
"learning_rate": 2.638281905266411e-05, |
|
"loss": 2.9344, |
|
"step": 107000 |
|
}, |
|
{ |
|
"epoch": 4.75, |
|
"learning_rate": 2.6272458394031695e-05, |
|
"loss": 2.9313, |
|
"step": 107500 |
|
}, |
|
{ |
|
"epoch": 4.77, |
|
"learning_rate": 2.6162097735399287e-05, |
|
"loss": 2.926, |
|
"step": 108000 |
|
}, |
|
{ |
|
"epoch": 4.79, |
|
"learning_rate": 2.6051737076766875e-05, |
|
"loss": 2.9341, |
|
"step": 108500 |
|
}, |
|
{ |
|
"epoch": 4.81, |
|
"learning_rate": 2.5941376418134467e-05, |
|
"loss": 2.9209, |
|
"step": 109000 |
|
}, |
|
{ |
|
"epoch": 4.83, |
|
"learning_rate": 2.583101575950205e-05, |
|
"loss": 2.9083, |
|
"step": 109500 |
|
}, |
|
{ |
|
"epoch": 4.86, |
|
"learning_rate": 2.5720655100869646e-05, |
|
"loss": 2.9081, |
|
"step": 110000 |
|
}, |
|
{ |
|
"epoch": 4.88, |
|
"learning_rate": 2.561029444223723e-05, |
|
"loss": 2.8912, |
|
"step": 110500 |
|
}, |
|
{ |
|
"epoch": 4.9, |
|
"learning_rate": 2.5499933783604823e-05, |
|
"loss": 2.9015, |
|
"step": 111000 |
|
}, |
|
{ |
|
"epoch": 4.92, |
|
"learning_rate": 2.538957312497241e-05, |
|
"loss": 2.8944, |
|
"step": 111500 |
|
}, |
|
{ |
|
"epoch": 4.94, |
|
"learning_rate": 2.5279212466340003e-05, |
|
"loss": 2.9002, |
|
"step": 112000 |
|
}, |
|
{ |
|
"epoch": 4.97, |
|
"learning_rate": 2.5168851807707588e-05, |
|
"loss": 2.9021, |
|
"step": 112500 |
|
}, |
|
{ |
|
"epoch": 4.99, |
|
"learning_rate": 2.505849114907518e-05, |
|
"loss": 2.8858, |
|
"step": 113000 |
|
}, |
|
{ |
|
"epoch": 5.01, |
|
"learning_rate": 2.4948130490442767e-05, |
|
"loss": 2.8896, |
|
"step": 113500 |
|
}, |
|
{ |
|
"epoch": 5.03, |
|
"learning_rate": 2.4837769831810356e-05, |
|
"loss": 2.872, |
|
"step": 114000 |
|
}, |
|
{ |
|
"epoch": 5.05, |
|
"learning_rate": 2.4727409173177947e-05, |
|
"loss": 2.8754, |
|
"step": 114500 |
|
}, |
|
{ |
|
"epoch": 5.08, |
|
"learning_rate": 2.4617048514545535e-05, |
|
"loss": 2.8811, |
|
"step": 115000 |
|
}, |
|
{ |
|
"epoch": 5.1, |
|
"learning_rate": 2.4506687855913124e-05, |
|
"loss": 2.8895, |
|
"step": 115500 |
|
}, |
|
{ |
|
"epoch": 5.12, |
|
"learning_rate": 2.4396327197280712e-05, |
|
"loss": 2.8927, |
|
"step": 116000 |
|
}, |
|
{ |
|
"epoch": 5.14, |
|
"learning_rate": 2.4285966538648303e-05, |
|
"loss": 2.8724, |
|
"step": 116500 |
|
}, |
|
{ |
|
"epoch": 5.16, |
|
"learning_rate": 2.417560588001589e-05, |
|
"loss": 2.8696, |
|
"step": 117000 |
|
}, |
|
{ |
|
"epoch": 5.19, |
|
"learning_rate": 2.406524522138348e-05, |
|
"loss": 2.8622, |
|
"step": 117500 |
|
}, |
|
{ |
|
"epoch": 5.21, |
|
"learning_rate": 2.395488456275107e-05, |
|
"loss": 2.8595, |
|
"step": 118000 |
|
}, |
|
{ |
|
"epoch": 5.23, |
|
"learning_rate": 2.384452390411866e-05, |
|
"loss": 2.8666, |
|
"step": 118500 |
|
}, |
|
{ |
|
"epoch": 5.25, |
|
"learning_rate": 2.3734163245486248e-05, |
|
"loss": 2.8444, |
|
"step": 119000 |
|
}, |
|
{ |
|
"epoch": 5.28, |
|
"learning_rate": 2.362380258685384e-05, |
|
"loss": 2.8432, |
|
"step": 119500 |
|
}, |
|
{ |
|
"epoch": 5.3, |
|
"learning_rate": 2.3513441928221428e-05, |
|
"loss": 2.8656, |
|
"step": 120000 |
|
}, |
|
{ |
|
"epoch": 5.32, |
|
"learning_rate": 2.3403081269589016e-05, |
|
"loss": 2.8539, |
|
"step": 120500 |
|
}, |
|
{ |
|
"epoch": 5.34, |
|
"learning_rate": 2.3292720610956607e-05, |
|
"loss": 2.8426, |
|
"step": 121000 |
|
}, |
|
{ |
|
"epoch": 5.36, |
|
"learning_rate": 2.3182359952324196e-05, |
|
"loss": 2.856, |
|
"step": 121500 |
|
}, |
|
{ |
|
"epoch": 5.39, |
|
"learning_rate": 2.3071999293691784e-05, |
|
"loss": 2.8566, |
|
"step": 122000 |
|
}, |
|
{ |
|
"epoch": 5.41, |
|
"learning_rate": 2.2961638635059375e-05, |
|
"loss": 2.8455, |
|
"step": 122500 |
|
}, |
|
{ |
|
"epoch": 5.43, |
|
"learning_rate": 2.2851277976426967e-05, |
|
"loss": 2.8429, |
|
"step": 123000 |
|
}, |
|
{ |
|
"epoch": 5.45, |
|
"learning_rate": 2.2740917317794555e-05, |
|
"loss": 2.8431, |
|
"step": 123500 |
|
}, |
|
{ |
|
"epoch": 5.47, |
|
"learning_rate": 2.2630556659162144e-05, |
|
"loss": 2.8347, |
|
"step": 124000 |
|
}, |
|
{ |
|
"epoch": 5.5, |
|
"learning_rate": 2.2520196000529735e-05, |
|
"loss": 2.8189, |
|
"step": 124500 |
|
}, |
|
{ |
|
"epoch": 5.52, |
|
"learning_rate": 2.2409835341897323e-05, |
|
"loss": 2.8323, |
|
"step": 125000 |
|
}, |
|
{ |
|
"epoch": 5.54, |
|
"learning_rate": 2.229947468326491e-05, |
|
"loss": 2.8366, |
|
"step": 125500 |
|
}, |
|
{ |
|
"epoch": 5.56, |
|
"learning_rate": 2.21891140246325e-05, |
|
"loss": 2.8309, |
|
"step": 126000 |
|
}, |
|
{ |
|
"epoch": 5.58, |
|
"learning_rate": 2.207875336600009e-05, |
|
"loss": 2.8222, |
|
"step": 126500 |
|
}, |
|
{ |
|
"epoch": 5.61, |
|
"learning_rate": 2.196839270736768e-05, |
|
"loss": 2.822, |
|
"step": 127000 |
|
}, |
|
{ |
|
"epoch": 5.63, |
|
"learning_rate": 2.1858032048735268e-05, |
|
"loss": 2.831, |
|
"step": 127500 |
|
}, |
|
{ |
|
"epoch": 5.65, |
|
"learning_rate": 2.174767139010286e-05, |
|
"loss": 2.8172, |
|
"step": 128000 |
|
}, |
|
{ |
|
"epoch": 5.67, |
|
"learning_rate": 2.1637310731470448e-05, |
|
"loss": 2.8112, |
|
"step": 128500 |
|
}, |
|
{ |
|
"epoch": 5.69, |
|
"learning_rate": 2.1526950072838036e-05, |
|
"loss": 2.8105, |
|
"step": 129000 |
|
}, |
|
{ |
|
"epoch": 5.72, |
|
"learning_rate": 2.1416589414205627e-05, |
|
"loss": 2.8175, |
|
"step": 129500 |
|
}, |
|
{ |
|
"epoch": 5.74, |
|
"learning_rate": 2.1306228755573216e-05, |
|
"loss": 2.8036, |
|
"step": 130000 |
|
}, |
|
{ |
|
"epoch": 5.76, |
|
"learning_rate": 2.1195868096940804e-05, |
|
"loss": 2.8057, |
|
"step": 130500 |
|
}, |
|
{ |
|
"epoch": 5.78, |
|
"learning_rate": 2.1085507438308395e-05, |
|
"loss": 2.8263, |
|
"step": 131000 |
|
}, |
|
{ |
|
"epoch": 5.8, |
|
"learning_rate": 2.0975146779675984e-05, |
|
"loss": 2.802, |
|
"step": 131500 |
|
}, |
|
{ |
|
"epoch": 5.83, |
|
"learning_rate": 2.0864786121043572e-05, |
|
"loss": 2.8149, |
|
"step": 132000 |
|
}, |
|
{ |
|
"epoch": 5.85, |
|
"learning_rate": 2.075442546241116e-05, |
|
"loss": 2.7993, |
|
"step": 132500 |
|
}, |
|
{ |
|
"epoch": 5.87, |
|
"learning_rate": 2.064406480377875e-05, |
|
"loss": 2.809, |
|
"step": 133000 |
|
}, |
|
{ |
|
"epoch": 5.89, |
|
"learning_rate": 2.053370414514634e-05, |
|
"loss": 2.7923, |
|
"step": 133500 |
|
}, |
|
{ |
|
"epoch": 5.92, |
|
"learning_rate": 2.0423343486513928e-05, |
|
"loss": 2.8004, |
|
"step": 134000 |
|
}, |
|
{ |
|
"epoch": 5.94, |
|
"learning_rate": 2.031298282788152e-05, |
|
"loss": 2.7943, |
|
"step": 134500 |
|
}, |
|
{ |
|
"epoch": 5.96, |
|
"learning_rate": 2.0202622169249108e-05, |
|
"loss": 2.7971, |
|
"step": 135000 |
|
}, |
|
{ |
|
"epoch": 5.98, |
|
"learning_rate": 2.0092261510616696e-05, |
|
"loss": 2.7913, |
|
"step": 135500 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"learning_rate": 1.9981900851984288e-05, |
|
"loss": 2.7839, |
|
"step": 136000 |
|
}, |
|
{ |
|
"epoch": 6.03, |
|
"learning_rate": 1.9871540193351876e-05, |
|
"loss": 2.7821, |
|
"step": 136500 |
|
}, |
|
{ |
|
"epoch": 6.05, |
|
"learning_rate": 1.9761179534719464e-05, |
|
"loss": 2.7832, |
|
"step": 137000 |
|
}, |
|
{ |
|
"epoch": 6.07, |
|
"learning_rate": 1.9650818876087056e-05, |
|
"loss": 2.7768, |
|
"step": 137500 |
|
}, |
|
{ |
|
"epoch": 6.09, |
|
"learning_rate": 1.9540458217454644e-05, |
|
"loss": 2.7808, |
|
"step": 138000 |
|
}, |
|
{ |
|
"epoch": 6.11, |
|
"learning_rate": 1.9430097558822232e-05, |
|
"loss": 2.7602, |
|
"step": 138500 |
|
}, |
|
{ |
|
"epoch": 6.14, |
|
"learning_rate": 1.931973690018982e-05, |
|
"loss": 2.7757, |
|
"step": 139000 |
|
}, |
|
{ |
|
"epoch": 6.16, |
|
"learning_rate": 1.9209376241557412e-05, |
|
"loss": 2.7805, |
|
"step": 139500 |
|
}, |
|
{ |
|
"epoch": 6.18, |
|
"learning_rate": 1.9099015582925e-05, |
|
"loss": 2.7524, |
|
"step": 140000 |
|
}, |
|
{ |
|
"epoch": 6.2, |
|
"learning_rate": 1.898865492429259e-05, |
|
"loss": 2.768, |
|
"step": 140500 |
|
}, |
|
{ |
|
"epoch": 6.22, |
|
"learning_rate": 1.887829426566018e-05, |
|
"loss": 2.7736, |
|
"step": 141000 |
|
}, |
|
{ |
|
"epoch": 6.25, |
|
"learning_rate": 1.8767933607027768e-05, |
|
"loss": 2.755, |
|
"step": 141500 |
|
}, |
|
{ |
|
"epoch": 6.27, |
|
"learning_rate": 1.8657572948395356e-05, |
|
"loss": 2.7659, |
|
"step": 142000 |
|
}, |
|
{ |
|
"epoch": 6.29, |
|
"learning_rate": 1.8547212289762948e-05, |
|
"loss": 2.7609, |
|
"step": 142500 |
|
}, |
|
{ |
|
"epoch": 6.31, |
|
"learning_rate": 1.8436851631130536e-05, |
|
"loss": 2.7552, |
|
"step": 143000 |
|
}, |
|
{ |
|
"epoch": 6.33, |
|
"learning_rate": 1.8326490972498124e-05, |
|
"loss": 2.7453, |
|
"step": 143500 |
|
}, |
|
{ |
|
"epoch": 6.36, |
|
"learning_rate": 1.8216130313865716e-05, |
|
"loss": 2.762, |
|
"step": 144000 |
|
}, |
|
{ |
|
"epoch": 6.38, |
|
"learning_rate": 1.8105769655233304e-05, |
|
"loss": 2.7626, |
|
"step": 144500 |
|
}, |
|
{ |
|
"epoch": 6.4, |
|
"learning_rate": 1.7995408996600893e-05, |
|
"loss": 2.7481, |
|
"step": 145000 |
|
}, |
|
{ |
|
"epoch": 6.42, |
|
"learning_rate": 1.7885048337968484e-05, |
|
"loss": 2.745, |
|
"step": 145500 |
|
}, |
|
{ |
|
"epoch": 6.45, |
|
"learning_rate": 1.7774687679336072e-05, |
|
"loss": 2.7483, |
|
"step": 146000 |
|
}, |
|
{ |
|
"epoch": 6.47, |
|
"learning_rate": 1.766432702070366e-05, |
|
"loss": 2.7454, |
|
"step": 146500 |
|
}, |
|
{ |
|
"epoch": 6.49, |
|
"learning_rate": 1.755396636207125e-05, |
|
"loss": 2.7438, |
|
"step": 147000 |
|
}, |
|
{ |
|
"epoch": 6.51, |
|
"learning_rate": 1.744360570343884e-05, |
|
"loss": 2.7544, |
|
"step": 147500 |
|
}, |
|
{ |
|
"epoch": 6.53, |
|
"learning_rate": 1.733324504480643e-05, |
|
"loss": 2.7412, |
|
"step": 148000 |
|
}, |
|
{ |
|
"epoch": 6.56, |
|
"learning_rate": 1.7222884386174017e-05, |
|
"loss": 2.7303, |
|
"step": 148500 |
|
}, |
|
{ |
|
"epoch": 6.58, |
|
"learning_rate": 1.711252372754161e-05, |
|
"loss": 2.7464, |
|
"step": 149000 |
|
}, |
|
{ |
|
"epoch": 6.6, |
|
"learning_rate": 1.7002163068909197e-05, |
|
"loss": 2.7312, |
|
"step": 149500 |
|
}, |
|
{ |
|
"epoch": 6.62, |
|
"learning_rate": 1.6891802410276785e-05, |
|
"loss": 2.7302, |
|
"step": 150000 |
|
}, |
|
{ |
|
"epoch": 6.64, |
|
"learning_rate": 1.6781441751644376e-05, |
|
"loss": 2.7202, |
|
"step": 150500 |
|
}, |
|
{ |
|
"epoch": 6.67, |
|
"learning_rate": 1.6671081093011965e-05, |
|
"loss": 2.716, |
|
"step": 151000 |
|
}, |
|
{ |
|
"epoch": 6.69, |
|
"learning_rate": 1.6560720434379553e-05, |
|
"loss": 2.7209, |
|
"step": 151500 |
|
}, |
|
{ |
|
"epoch": 6.71, |
|
"learning_rate": 1.6450359775747144e-05, |
|
"loss": 2.7151, |
|
"step": 152000 |
|
}, |
|
{ |
|
"epoch": 6.73, |
|
"learning_rate": 1.6339999117114733e-05, |
|
"loss": 2.7211, |
|
"step": 152500 |
|
}, |
|
{ |
|
"epoch": 6.75, |
|
"learning_rate": 1.622963845848232e-05, |
|
"loss": 2.7304, |
|
"step": 153000 |
|
}, |
|
{ |
|
"epoch": 6.78, |
|
"learning_rate": 1.611927779984991e-05, |
|
"loss": 2.7166, |
|
"step": 153500 |
|
}, |
|
{ |
|
"epoch": 6.8, |
|
"learning_rate": 1.60089171412175e-05, |
|
"loss": 2.704, |
|
"step": 154000 |
|
}, |
|
{ |
|
"epoch": 6.82, |
|
"learning_rate": 1.589855648258509e-05, |
|
"loss": 2.7101, |
|
"step": 154500 |
|
}, |
|
{ |
|
"epoch": 6.84, |
|
"learning_rate": 1.5788195823952677e-05, |
|
"loss": 2.7131, |
|
"step": 155000 |
|
}, |
|
{ |
|
"epoch": 6.86, |
|
"learning_rate": 1.567783516532027e-05, |
|
"loss": 2.7249, |
|
"step": 155500 |
|
}, |
|
{ |
|
"epoch": 6.89, |
|
"learning_rate": 1.5567474506687857e-05, |
|
"loss": 2.7081, |
|
"step": 156000 |
|
}, |
|
{ |
|
"epoch": 6.91, |
|
"learning_rate": 1.5457113848055445e-05, |
|
"loss": 2.7138, |
|
"step": 156500 |
|
}, |
|
{ |
|
"epoch": 6.93, |
|
"learning_rate": 1.5346753189423037e-05, |
|
"loss": 2.7035, |
|
"step": 157000 |
|
}, |
|
{ |
|
"epoch": 6.95, |
|
"learning_rate": 1.5236392530790625e-05, |
|
"loss": 2.6958, |
|
"step": 157500 |
|
}, |
|
{ |
|
"epoch": 6.97, |
|
"learning_rate": 1.5126031872158213e-05, |
|
"loss": 2.7289, |
|
"step": 158000 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"learning_rate": 1.5015671213525803e-05, |
|
"loss": 2.7104, |
|
"step": 158500 |
|
}, |
|
{ |
|
"epoch": 7.02, |
|
"learning_rate": 1.4905310554893393e-05, |
|
"loss": 2.7047, |
|
"step": 159000 |
|
}, |
|
{ |
|
"epoch": 7.04, |
|
"learning_rate": 1.4794949896260981e-05, |
|
"loss": 2.6943, |
|
"step": 159500 |
|
}, |
|
{ |
|
"epoch": 7.06, |
|
"learning_rate": 1.4684589237628571e-05, |
|
"loss": 2.6976, |
|
"step": 160000 |
|
}, |
|
{ |
|
"epoch": 7.09, |
|
"learning_rate": 1.4574228578996161e-05, |
|
"loss": 2.7057, |
|
"step": 160500 |
|
}, |
|
{ |
|
"epoch": 7.11, |
|
"learning_rate": 1.446386792036375e-05, |
|
"loss": 2.7014, |
|
"step": 161000 |
|
}, |
|
{ |
|
"epoch": 7.13, |
|
"learning_rate": 1.4353507261731339e-05, |
|
"loss": 2.6955, |
|
"step": 161500 |
|
}, |
|
{ |
|
"epoch": 7.15, |
|
"learning_rate": 1.4243146603098927e-05, |
|
"loss": 2.6801, |
|
"step": 162000 |
|
}, |
|
{ |
|
"epoch": 7.17, |
|
"learning_rate": 1.4132785944466517e-05, |
|
"loss": 2.6839, |
|
"step": 162500 |
|
}, |
|
{ |
|
"epoch": 7.2, |
|
"learning_rate": 1.4022425285834107e-05, |
|
"loss": 2.6768, |
|
"step": 163000 |
|
}, |
|
{ |
|
"epoch": 7.22, |
|
"learning_rate": 1.3912064627201695e-05, |
|
"loss": 2.682, |
|
"step": 163500 |
|
}, |
|
{ |
|
"epoch": 7.24, |
|
"learning_rate": 1.3801703968569285e-05, |
|
"loss": 2.6826, |
|
"step": 164000 |
|
}, |
|
{ |
|
"epoch": 7.26, |
|
"learning_rate": 1.3691343309936873e-05, |
|
"loss": 2.6889, |
|
"step": 164500 |
|
}, |
|
{ |
|
"epoch": 7.28, |
|
"learning_rate": 1.3580982651304463e-05, |
|
"loss": 2.6773, |
|
"step": 165000 |
|
}, |
|
{ |
|
"epoch": 7.31, |
|
"learning_rate": 1.3470621992672053e-05, |
|
"loss": 2.6864, |
|
"step": 165500 |
|
}, |
|
{ |
|
"epoch": 7.33, |
|
"learning_rate": 1.3360261334039641e-05, |
|
"loss": 2.6754, |
|
"step": 166000 |
|
}, |
|
{ |
|
"epoch": 7.35, |
|
"learning_rate": 1.3249900675407231e-05, |
|
"loss": 2.6709, |
|
"step": 166500 |
|
}, |
|
{ |
|
"epoch": 7.37, |
|
"learning_rate": 1.3139540016774821e-05, |
|
"loss": 2.6883, |
|
"step": 167000 |
|
}, |
|
{ |
|
"epoch": 7.39, |
|
"learning_rate": 1.302917935814241e-05, |
|
"loss": 2.6711, |
|
"step": 167500 |
|
}, |
|
{ |
|
"epoch": 7.42, |
|
"learning_rate": 1.291881869951e-05, |
|
"loss": 2.6678, |
|
"step": 168000 |
|
}, |
|
{ |
|
"epoch": 7.44, |
|
"learning_rate": 1.2808458040877588e-05, |
|
"loss": 2.6892, |
|
"step": 168500 |
|
}, |
|
{ |
|
"epoch": 7.46, |
|
"learning_rate": 1.2698097382245178e-05, |
|
"loss": 2.6791, |
|
"step": 169000 |
|
}, |
|
{ |
|
"epoch": 7.48, |
|
"learning_rate": 1.2587736723612767e-05, |
|
"loss": 2.6766, |
|
"step": 169500 |
|
}, |
|
{ |
|
"epoch": 7.5, |
|
"learning_rate": 1.2477376064980356e-05, |
|
"loss": 2.6463, |
|
"step": 170000 |
|
}, |
|
{ |
|
"epoch": 7.53, |
|
"learning_rate": 1.2367015406347946e-05, |
|
"loss": 2.6794, |
|
"step": 170500 |
|
}, |
|
{ |
|
"epoch": 7.55, |
|
"learning_rate": 1.2256654747715535e-05, |
|
"loss": 2.6526, |
|
"step": 171000 |
|
}, |
|
{ |
|
"epoch": 7.57, |
|
"learning_rate": 1.2146294089083124e-05, |
|
"loss": 2.6555, |
|
"step": 171500 |
|
}, |
|
{ |
|
"epoch": 7.59, |
|
"learning_rate": 1.2035933430450714e-05, |
|
"loss": 2.6601, |
|
"step": 172000 |
|
}, |
|
{ |
|
"epoch": 7.61, |
|
"learning_rate": 1.1925572771818302e-05, |
|
"loss": 2.6551, |
|
"step": 172500 |
|
}, |
|
{ |
|
"epoch": 7.64, |
|
"learning_rate": 1.1815212113185892e-05, |
|
"loss": 2.6652, |
|
"step": 173000 |
|
}, |
|
{ |
|
"epoch": 7.66, |
|
"learning_rate": 1.1704851454553482e-05, |
|
"loss": 2.6643, |
|
"step": 173500 |
|
}, |
|
{ |
|
"epoch": 7.68, |
|
"learning_rate": 1.159449079592107e-05, |
|
"loss": 2.6539, |
|
"step": 174000 |
|
}, |
|
{ |
|
"epoch": 7.7, |
|
"learning_rate": 1.148413013728866e-05, |
|
"loss": 2.6737, |
|
"step": 174500 |
|
}, |
|
{ |
|
"epoch": 7.73, |
|
"learning_rate": 1.1373769478656248e-05, |
|
"loss": 2.6559, |
|
"step": 175000 |
|
}, |
|
{ |
|
"epoch": 7.75, |
|
"learning_rate": 1.1263408820023838e-05, |
|
"loss": 2.6595, |
|
"step": 175500 |
|
}, |
|
{ |
|
"epoch": 7.77, |
|
"learning_rate": 1.1153048161391428e-05, |
|
"loss": 2.6624, |
|
"step": 176000 |
|
}, |
|
{ |
|
"epoch": 7.79, |
|
"learning_rate": 1.1042687502759016e-05, |
|
"loss": 2.6431, |
|
"step": 176500 |
|
}, |
|
{ |
|
"epoch": 7.81, |
|
"learning_rate": 1.0932326844126606e-05, |
|
"loss": 2.6496, |
|
"step": 177000 |
|
}, |
|
{ |
|
"epoch": 7.84, |
|
"learning_rate": 1.0821966185494196e-05, |
|
"loss": 2.6586, |
|
"step": 177500 |
|
}, |
|
{ |
|
"epoch": 7.86, |
|
"learning_rate": 1.0711605526861784e-05, |
|
"loss": 2.655, |
|
"step": 178000 |
|
}, |
|
{ |
|
"epoch": 7.88, |
|
"learning_rate": 1.0601244868229374e-05, |
|
"loss": 2.6589, |
|
"step": 178500 |
|
}, |
|
{ |
|
"epoch": 7.9, |
|
"learning_rate": 1.0490884209596962e-05, |
|
"loss": 2.6389, |
|
"step": 179000 |
|
}, |
|
{ |
|
"epoch": 7.92, |
|
"learning_rate": 1.0380523550964552e-05, |
|
"loss": 2.6326, |
|
"step": 179500 |
|
}, |
|
{ |
|
"epoch": 7.95, |
|
"learning_rate": 1.0270162892332142e-05, |
|
"loss": 2.6503, |
|
"step": 180000 |
|
}, |
|
{ |
|
"epoch": 7.97, |
|
"learning_rate": 1.015980223369973e-05, |
|
"loss": 2.646, |
|
"step": 180500 |
|
}, |
|
{ |
|
"epoch": 7.99, |
|
"learning_rate": 1.004944157506732e-05, |
|
"loss": 2.6389, |
|
"step": 181000 |
|
}, |
|
{ |
|
"epoch": 8.01, |
|
"learning_rate": 9.939080916434908e-06, |
|
"loss": 2.6488, |
|
"step": 181500 |
|
}, |
|
{ |
|
"epoch": 8.03, |
|
"learning_rate": 9.828720257802498e-06, |
|
"loss": 2.6256, |
|
"step": 182000 |
|
}, |
|
{ |
|
"epoch": 8.06, |
|
"learning_rate": 9.718359599170088e-06, |
|
"loss": 2.6521, |
|
"step": 182500 |
|
}, |
|
{ |
|
"epoch": 8.08, |
|
"learning_rate": 9.607998940537676e-06, |
|
"loss": 2.6273, |
|
"step": 183000 |
|
}, |
|
{ |
|
"epoch": 8.1, |
|
"learning_rate": 9.497638281905268e-06, |
|
"loss": 2.6379, |
|
"step": 183500 |
|
}, |
|
{ |
|
"epoch": 8.12, |
|
"learning_rate": 9.387277623272856e-06, |
|
"loss": 2.6416, |
|
"step": 184000 |
|
}, |
|
{ |
|
"epoch": 8.14, |
|
"learning_rate": 9.276916964640446e-06, |
|
"loss": 2.6296, |
|
"step": 184500 |
|
}, |
|
{ |
|
"epoch": 8.17, |
|
"learning_rate": 9.166556306008036e-06, |
|
"loss": 2.6396, |
|
"step": 185000 |
|
}, |
|
{ |
|
"epoch": 8.19, |
|
"learning_rate": 9.056195647375624e-06, |
|
"loss": 2.6305, |
|
"step": 185500 |
|
}, |
|
{ |
|
"epoch": 8.21, |
|
"learning_rate": 8.945834988743214e-06, |
|
"loss": 2.6318, |
|
"step": 186000 |
|
}, |
|
{ |
|
"epoch": 8.23, |
|
"learning_rate": 8.835474330110802e-06, |
|
"loss": 2.6333, |
|
"step": 186500 |
|
}, |
|
{ |
|
"epoch": 8.25, |
|
"learning_rate": 8.725113671478392e-06, |
|
"loss": 2.6185, |
|
"step": 187000 |
|
}, |
|
{ |
|
"epoch": 8.28, |
|
"learning_rate": 8.614753012845982e-06, |
|
"loss": 2.6275, |
|
"step": 187500 |
|
}, |
|
{ |
|
"epoch": 8.3, |
|
"learning_rate": 8.50439235421357e-06, |
|
"loss": 2.6265, |
|
"step": 188000 |
|
}, |
|
{ |
|
"epoch": 8.32, |
|
"learning_rate": 8.39403169558116e-06, |
|
"loss": 2.6203, |
|
"step": 188500 |
|
}, |
|
{ |
|
"epoch": 8.34, |
|
"learning_rate": 8.28367103694875e-06, |
|
"loss": 2.6228, |
|
"step": 189000 |
|
}, |
|
{ |
|
"epoch": 8.37, |
|
"learning_rate": 8.173310378316338e-06, |
|
"loss": 2.6316, |
|
"step": 189500 |
|
}, |
|
{ |
|
"epoch": 8.39, |
|
"learning_rate": 8.062949719683928e-06, |
|
"loss": 2.6137, |
|
"step": 190000 |
|
}, |
|
{ |
|
"epoch": 8.41, |
|
"learning_rate": 7.952589061051516e-06, |
|
"loss": 2.6243, |
|
"step": 190500 |
|
}, |
|
{ |
|
"epoch": 8.43, |
|
"learning_rate": 7.842228402419106e-06, |
|
"loss": 2.6192, |
|
"step": 191000 |
|
}, |
|
{ |
|
"epoch": 8.45, |
|
"learning_rate": 7.731867743786696e-06, |
|
"loss": 2.6191, |
|
"step": 191500 |
|
}, |
|
{ |
|
"epoch": 8.48, |
|
"learning_rate": 7.621507085154285e-06, |
|
"loss": 2.6222, |
|
"step": 192000 |
|
}, |
|
{ |
|
"epoch": 8.5, |
|
"learning_rate": 7.511146426521874e-06, |
|
"loss": 2.6101, |
|
"step": 192500 |
|
}, |
|
{ |
|
"epoch": 8.52, |
|
"learning_rate": 7.4007857678894635e-06, |
|
"loss": 2.6214, |
|
"step": 193000 |
|
}, |
|
{ |
|
"epoch": 8.54, |
|
"learning_rate": 7.2904251092570525e-06, |
|
"loss": 2.6073, |
|
"step": 193500 |
|
}, |
|
{ |
|
"epoch": 8.56, |
|
"learning_rate": 7.180064450624642e-06, |
|
"loss": 2.6024, |
|
"step": 194000 |
|
}, |
|
{ |
|
"epoch": 8.59, |
|
"learning_rate": 7.0697037919922315e-06, |
|
"loss": 2.6149, |
|
"step": 194500 |
|
}, |
|
{ |
|
"epoch": 8.61, |
|
"learning_rate": 6.9593431333598205e-06, |
|
"loss": 2.6251, |
|
"step": 195000 |
|
}, |
|
{ |
|
"epoch": 8.63, |
|
"learning_rate": 6.84898247472741e-06, |
|
"loss": 2.6094, |
|
"step": 195500 |
|
}, |
|
{ |
|
"epoch": 8.65, |
|
"learning_rate": 6.738621816094999e-06, |
|
"loss": 2.6137, |
|
"step": 196000 |
|
}, |
|
{ |
|
"epoch": 8.67, |
|
"learning_rate": 6.6282611574625886e-06, |
|
"loss": 2.609, |
|
"step": 196500 |
|
}, |
|
{ |
|
"epoch": 8.7, |
|
"learning_rate": 6.517900498830178e-06, |
|
"loss": 2.6086, |
|
"step": 197000 |
|
}, |
|
{ |
|
"epoch": 8.72, |
|
"learning_rate": 6.407539840197767e-06, |
|
"loss": 2.6021, |
|
"step": 197500 |
|
}, |
|
{ |
|
"epoch": 8.74, |
|
"learning_rate": 6.297179181565356e-06, |
|
"loss": 2.6147, |
|
"step": 198000 |
|
}, |
|
{ |
|
"epoch": 8.76, |
|
"learning_rate": 6.186818522932946e-06, |
|
"loss": 2.6163, |
|
"step": 198500 |
|
}, |
|
{ |
|
"epoch": 8.78, |
|
"learning_rate": 6.076457864300535e-06, |
|
"loss": 2.6073, |
|
"step": 199000 |
|
}, |
|
{ |
|
"epoch": 8.81, |
|
"learning_rate": 5.966097205668124e-06, |
|
"loss": 2.6089, |
|
"step": 199500 |
|
}, |
|
{ |
|
"epoch": 8.83, |
|
"learning_rate": 5.855736547035713e-06, |
|
"loss": 2.596, |
|
"step": 200000 |
|
}, |
|
{ |
|
"epoch": 8.85, |
|
"learning_rate": 5.745375888403303e-06, |
|
"loss": 2.593, |
|
"step": 200500 |
|
}, |
|
{ |
|
"epoch": 8.87, |
|
"learning_rate": 5.635015229770892e-06, |
|
"loss": 2.6106, |
|
"step": 201000 |
|
}, |
|
{ |
|
"epoch": 8.9, |
|
"learning_rate": 5.524654571138481e-06, |
|
"loss": 2.5894, |
|
"step": 201500 |
|
}, |
|
{ |
|
"epoch": 8.92, |
|
"learning_rate": 5.41429391250607e-06, |
|
"loss": 2.5895, |
|
"step": 202000 |
|
}, |
|
{ |
|
"epoch": 8.94, |
|
"learning_rate": 5.30393325387366e-06, |
|
"loss": 2.5983, |
|
"step": 202500 |
|
}, |
|
{ |
|
"epoch": 8.96, |
|
"learning_rate": 5.193572595241249e-06, |
|
"loss": 2.6066, |
|
"step": 203000 |
|
}, |
|
{ |
|
"epoch": 8.98, |
|
"learning_rate": 5.083211936608838e-06, |
|
"loss": 2.5866, |
|
"step": 203500 |
|
}, |
|
{ |
|
"epoch": 9.01, |
|
"learning_rate": 4.972851277976427e-06, |
|
"loss": 2.6039, |
|
"step": 204000 |
|
}, |
|
{ |
|
"epoch": 9.03, |
|
"learning_rate": 4.862490619344016e-06, |
|
"loss": 2.5904, |
|
"step": 204500 |
|
}, |
|
{ |
|
"epoch": 9.05, |
|
"learning_rate": 4.752129960711606e-06, |
|
"loss": 2.5951, |
|
"step": 205000 |
|
}, |
|
{ |
|
"epoch": 9.07, |
|
"learning_rate": 4.641769302079195e-06, |
|
"loss": 2.5923, |
|
"step": 205500 |
|
}, |
|
{ |
|
"epoch": 9.09, |
|
"learning_rate": 4.531408643446784e-06, |
|
"loss": 2.6, |
|
"step": 206000 |
|
}, |
|
{ |
|
"epoch": 9.12, |
|
"learning_rate": 4.421047984814373e-06, |
|
"loss": 2.6014, |
|
"step": 206500 |
|
}, |
|
{ |
|
"epoch": 9.14, |
|
"learning_rate": 4.310687326181963e-06, |
|
"loss": 2.5878, |
|
"step": 207000 |
|
}, |
|
{ |
|
"epoch": 9.16, |
|
"learning_rate": 4.200326667549552e-06, |
|
"loss": 2.595, |
|
"step": 207500 |
|
}, |
|
{ |
|
"epoch": 9.18, |
|
"learning_rate": 4.089966008917141e-06, |
|
"loss": 2.5872, |
|
"step": 208000 |
|
}, |
|
{ |
|
"epoch": 9.2, |
|
"learning_rate": 3.97960535028473e-06, |
|
"loss": 2.5912, |
|
"step": 208500 |
|
}, |
|
{ |
|
"epoch": 9.23, |
|
"learning_rate": 3.86924469165232e-06, |
|
"loss": 2.5883, |
|
"step": 209000 |
|
}, |
|
{ |
|
"epoch": 9.25, |
|
"learning_rate": 3.758884033019909e-06, |
|
"loss": 2.5867, |
|
"step": 209500 |
|
}, |
|
{ |
|
"epoch": 9.27, |
|
"learning_rate": 3.6485233743874983e-06, |
|
"loss": 2.5768, |
|
"step": 210000 |
|
}, |
|
{ |
|
"epoch": 9.29, |
|
"learning_rate": 3.5381627157550878e-06, |
|
"loss": 2.5746, |
|
"step": 210500 |
|
}, |
|
{ |
|
"epoch": 9.31, |
|
"learning_rate": 3.427802057122677e-06, |
|
"loss": 2.5878, |
|
"step": 211000 |
|
}, |
|
{ |
|
"epoch": 9.34, |
|
"learning_rate": 3.317441398490266e-06, |
|
"loss": 2.5827, |
|
"step": 211500 |
|
}, |
|
{ |
|
"epoch": 9.36, |
|
"learning_rate": 3.2070807398578554e-06, |
|
"loss": 2.5782, |
|
"step": 212000 |
|
}, |
|
{ |
|
"epoch": 9.38, |
|
"learning_rate": 3.096720081225445e-06, |
|
"loss": 2.5837, |
|
"step": 212500 |
|
}, |
|
{ |
|
"epoch": 9.4, |
|
"learning_rate": 2.9863594225930343e-06, |
|
"loss": 2.58, |
|
"step": 213000 |
|
}, |
|
{ |
|
"epoch": 9.42, |
|
"learning_rate": 2.8759987639606234e-06, |
|
"loss": 2.5829, |
|
"step": 213500 |
|
}, |
|
{ |
|
"epoch": 9.45, |
|
"learning_rate": 2.765638105328213e-06, |
|
"loss": 2.5873, |
|
"step": 214000 |
|
}, |
|
{ |
|
"epoch": 9.47, |
|
"learning_rate": 2.655277446695802e-06, |
|
"loss": 2.5786, |
|
"step": 214500 |
|
}, |
|
{ |
|
"epoch": 9.49, |
|
"learning_rate": 2.5449167880633914e-06, |
|
"loss": 2.6002, |
|
"step": 215000 |
|
}, |
|
{ |
|
"epoch": 9.51, |
|
"learning_rate": 2.4345561294309805e-06, |
|
"loss": 2.5866, |
|
"step": 215500 |
|
}, |
|
{ |
|
"epoch": 9.54, |
|
"learning_rate": 2.32419547079857e-06, |
|
"loss": 2.5861, |
|
"step": 216000 |
|
}, |
|
{ |
|
"epoch": 9.56, |
|
"learning_rate": 2.213834812166159e-06, |
|
"loss": 2.5893, |
|
"step": 216500 |
|
}, |
|
{ |
|
"epoch": 9.58, |
|
"learning_rate": 2.103474153533748e-06, |
|
"loss": 2.5722, |
|
"step": 217000 |
|
}, |
|
{ |
|
"epoch": 9.6, |
|
"learning_rate": 1.9931134949013376e-06, |
|
"loss": 2.5867, |
|
"step": 217500 |
|
}, |
|
{ |
|
"epoch": 9.62, |
|
"learning_rate": 1.8827528362689268e-06, |
|
"loss": 2.5773, |
|
"step": 218000 |
|
}, |
|
{ |
|
"epoch": 9.65, |
|
"learning_rate": 1.772392177636516e-06, |
|
"loss": 2.5921, |
|
"step": 218500 |
|
}, |
|
{ |
|
"epoch": 9.67, |
|
"learning_rate": 1.6620315190041056e-06, |
|
"loss": 2.5922, |
|
"step": 219000 |
|
}, |
|
{ |
|
"epoch": 9.69, |
|
"learning_rate": 1.5516708603716949e-06, |
|
"loss": 2.5771, |
|
"step": 219500 |
|
}, |
|
{ |
|
"epoch": 9.71, |
|
"learning_rate": 1.441310201739284e-06, |
|
"loss": 2.5658, |
|
"step": 220000 |
|
}, |
|
{ |
|
"epoch": 9.73, |
|
"learning_rate": 1.3309495431068732e-06, |
|
"loss": 2.5758, |
|
"step": 220500 |
|
}, |
|
{ |
|
"epoch": 9.76, |
|
"learning_rate": 1.2205888844744625e-06, |
|
"loss": 2.5781, |
|
"step": 221000 |
|
}, |
|
{ |
|
"epoch": 9.78, |
|
"learning_rate": 1.1102282258420517e-06, |
|
"loss": 2.5685, |
|
"step": 221500 |
|
}, |
|
{ |
|
"epoch": 9.8, |
|
"learning_rate": 9.998675672096412e-07, |
|
"loss": 2.578, |
|
"step": 222000 |
|
}, |
|
{ |
|
"epoch": 9.82, |
|
"learning_rate": 8.895069085772305e-07, |
|
"loss": 2.5726, |
|
"step": 222500 |
|
}, |
|
{ |
|
"epoch": 9.84, |
|
"learning_rate": 7.791462499448198e-07, |
|
"loss": 2.5737, |
|
"step": 223000 |
|
}, |
|
{ |
|
"epoch": 9.87, |
|
"learning_rate": 6.68785591312409e-07, |
|
"loss": 2.5841, |
|
"step": 223500 |
|
}, |
|
{ |
|
"epoch": 9.89, |
|
"learning_rate": 5.584249326799983e-07, |
|
"loss": 2.5824, |
|
"step": 224000 |
|
}, |
|
{ |
|
"epoch": 9.91, |
|
"learning_rate": 4.480642740475875e-07, |
|
"loss": 2.5898, |
|
"step": 224500 |
|
}, |
|
{ |
|
"epoch": 9.93, |
|
"learning_rate": 3.3770361541517685e-07, |
|
"loss": 2.5766, |
|
"step": 225000 |
|
}, |
|
{ |
|
"epoch": 9.95, |
|
"learning_rate": 2.273429567827661e-07, |
|
"loss": 2.5611, |
|
"step": 225500 |
|
}, |
|
{ |
|
"epoch": 9.98, |
|
"learning_rate": 1.1698229815035537e-07, |
|
"loss": 2.575, |
|
"step": 226000 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"learning_rate": 6.621639517944644e-09, |
|
"loss": 2.5662, |
|
"step": 226500 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"step": 226530, |
|
"total_flos": 6.757045384548372e+18, |
|
"train_loss": 3.1833379993784776, |
|
"train_runtime": 282485.8482, |
|
"train_samples_per_second": 25.661, |
|
"train_steps_per_second": 0.802 |
|
} |
|
], |
|
"logging_steps": 500, |
|
"max_steps": 226530, |
|
"num_train_epochs": 10, |
|
"save_steps": 100000, |
|
"total_flos": 6.757045384548372e+18, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|