|
{ |
|
"best_metric": 2.6206448078155518, |
|
"best_model_checkpoint": "./model_tweets_2020_Q3_50/checkpoint-1696000", |
|
"epoch": 10.143273741600101, |
|
"eval_steps": 8000, |
|
"global_step": 2400000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.03, |
|
"eval_loss": 2.9380319118499756, |
|
"eval_runtime": 413.1894, |
|
"eval_samples_per_second": 482.224, |
|
"eval_steps_per_second": 30.141, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.0726666666666665e-07, |
|
"loss": 3.1308, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"eval_loss": 2.85927152633667, |
|
"eval_runtime": 411.8608, |
|
"eval_samples_per_second": 483.78, |
|
"eval_steps_per_second": 30.238, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"eval_loss": 2.806320905685425, |
|
"eval_runtime": 412.6446, |
|
"eval_samples_per_second": 482.861, |
|
"eval_steps_per_second": 30.181, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.0453333333333336e-07, |
|
"loss": 2.9519, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"eval_loss": 2.7831950187683105, |
|
"eval_runtime": 413.8798, |
|
"eval_samples_per_second": 481.42, |
|
"eval_steps_per_second": 30.091, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"eval_loss": 2.75207257270813, |
|
"eval_runtime": 415.2779, |
|
"eval_samples_per_second": 479.799, |
|
"eval_steps_per_second": 29.99, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.018e-07, |
|
"loss": 2.889, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"eval_loss": 2.732224225997925, |
|
"eval_runtime": 412.3868, |
|
"eval_samples_per_second": 483.163, |
|
"eval_steps_per_second": 30.2, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"eval_loss": 2.7259225845336914, |
|
"eval_runtime": 413.3395, |
|
"eval_samples_per_second": 482.049, |
|
"eval_steps_per_second": 30.13, |
|
"step": 56000 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 3.9906666666666667e-07, |
|
"loss": 2.8592, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"eval_loss": 2.7218177318573, |
|
"eval_runtime": 416.6569, |
|
"eval_samples_per_second": 478.211, |
|
"eval_steps_per_second": 29.89, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"eval_loss": 2.7105536460876465, |
|
"eval_runtime": 416.4392, |
|
"eval_samples_per_second": 478.461, |
|
"eval_steps_per_second": 29.906, |
|
"step": 72000 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 3.963333333333333e-07, |
|
"loss": 2.8345, |
|
"step": 80000 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"eval_loss": 2.7070746421813965, |
|
"eval_runtime": 412.3646, |
|
"eval_samples_per_second": 483.189, |
|
"eval_steps_per_second": 30.201, |
|
"step": 80000 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"eval_loss": 2.690347671508789, |
|
"eval_runtime": 411.5327, |
|
"eval_samples_per_second": 484.166, |
|
"eval_steps_per_second": 30.262, |
|
"step": 88000 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 3.936e-07, |
|
"loss": 2.8303, |
|
"step": 96000 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"eval_loss": 2.700122356414795, |
|
"eval_runtime": 415.2403, |
|
"eval_samples_per_second": 479.843, |
|
"eval_steps_per_second": 29.992, |
|
"step": 96000 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"eval_loss": 2.692945957183838, |
|
"eval_runtime": 418.3575, |
|
"eval_samples_per_second": 476.267, |
|
"eval_steps_per_second": 29.769, |
|
"step": 104000 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 3.908666666666667e-07, |
|
"loss": 2.824, |
|
"step": 112000 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"eval_loss": 2.690654993057251, |
|
"eval_runtime": 418.1816, |
|
"eval_samples_per_second": 476.468, |
|
"eval_steps_per_second": 29.781, |
|
"step": 112000 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"eval_loss": 2.685214042663574, |
|
"eval_runtime": 414.1853, |
|
"eval_samples_per_second": 481.065, |
|
"eval_steps_per_second": 30.069, |
|
"step": 120000 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 3.8813333333333334e-07, |
|
"loss": 2.8223, |
|
"step": 128000 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"eval_loss": 2.6804039478302, |
|
"eval_runtime": 415.7725, |
|
"eval_samples_per_second": 479.228, |
|
"eval_steps_per_second": 29.954, |
|
"step": 128000 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"eval_loss": 2.672717332839966, |
|
"eval_runtime": 413.8751, |
|
"eval_samples_per_second": 481.425, |
|
"eval_steps_per_second": 30.091, |
|
"step": 136000 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.854e-07, |
|
"loss": 2.8141, |
|
"step": 144000 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"eval_loss": 2.678410291671753, |
|
"eval_runtime": 416.4592, |
|
"eval_samples_per_second": 478.438, |
|
"eval_steps_per_second": 29.904, |
|
"step": 144000 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"eval_loss": 2.6774685382843018, |
|
"eval_runtime": 415.2116, |
|
"eval_samples_per_second": 479.876, |
|
"eval_steps_per_second": 29.994, |
|
"step": 152000 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 3.8266666666666665e-07, |
|
"loss": 2.8124, |
|
"step": 160000 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"eval_loss": 2.6723482608795166, |
|
"eval_runtime": 414.2419, |
|
"eval_samples_per_second": 480.999, |
|
"eval_steps_per_second": 30.065, |
|
"step": 160000 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"eval_loss": 2.668304681777954, |
|
"eval_runtime": 416.7765, |
|
"eval_samples_per_second": 478.074, |
|
"eval_steps_per_second": 29.882, |
|
"step": 168000 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.799333333333333e-07, |
|
"loss": 2.8042, |
|
"step": 176000 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"eval_loss": 2.671178102493286, |
|
"eval_runtime": 416.7365, |
|
"eval_samples_per_second": 478.12, |
|
"eval_steps_per_second": 29.885, |
|
"step": 176000 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"eval_loss": 2.6660544872283936, |
|
"eval_runtime": 416.2186, |
|
"eval_samples_per_second": 478.715, |
|
"eval_steps_per_second": 29.922, |
|
"step": 184000 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.772e-07, |
|
"loss": 2.8051, |
|
"step": 192000 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"eval_loss": 2.67832612991333, |
|
"eval_runtime": 414.4038, |
|
"eval_samples_per_second": 480.811, |
|
"eval_steps_per_second": 30.053, |
|
"step": 192000 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"eval_loss": 2.6682960987091064, |
|
"eval_runtime": 415.8345, |
|
"eval_samples_per_second": 479.157, |
|
"eval_steps_per_second": 29.949, |
|
"step": 200000 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 3.7446666666666667e-07, |
|
"loss": 2.798, |
|
"step": 208000 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"eval_loss": 2.6656229496002197, |
|
"eval_runtime": 416.0143, |
|
"eval_samples_per_second": 478.95, |
|
"eval_steps_per_second": 29.936, |
|
"step": 208000 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"eval_loss": 2.6659064292907715, |
|
"eval_runtime": 415.0891, |
|
"eval_samples_per_second": 480.017, |
|
"eval_steps_per_second": 30.003, |
|
"step": 216000 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.7173333333333333e-07, |
|
"loss": 2.8043, |
|
"step": 224000 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"eval_loss": 2.6699647903442383, |
|
"eval_runtime": 414.3393, |
|
"eval_samples_per_second": 480.886, |
|
"eval_steps_per_second": 30.057, |
|
"step": 224000 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"eval_loss": 2.667956829071045, |
|
"eval_runtime": 415.3586, |
|
"eval_samples_per_second": 479.706, |
|
"eval_steps_per_second": 29.984, |
|
"step": 232000 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.69e-07, |
|
"loss": 2.8055, |
|
"step": 240000 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"eval_loss": 2.659658432006836, |
|
"eval_runtime": 417.4574, |
|
"eval_samples_per_second": 477.294, |
|
"eval_steps_per_second": 29.833, |
|
"step": 240000 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"eval_loss": 2.659682512283325, |
|
"eval_runtime": 415.1848, |
|
"eval_samples_per_second": 479.907, |
|
"eval_steps_per_second": 29.996, |
|
"step": 248000 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.6626666666666664e-07, |
|
"loss": 2.8048, |
|
"step": 256000 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"eval_loss": 2.656872272491455, |
|
"eval_runtime": 413.9085, |
|
"eval_samples_per_second": 481.387, |
|
"eval_steps_per_second": 30.089, |
|
"step": 256000 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"eval_loss": 2.6502044200897217, |
|
"eval_runtime": 415.1132, |
|
"eval_samples_per_second": 479.99, |
|
"eval_steps_per_second": 30.001, |
|
"step": 264000 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 3.6353333333333335e-07, |
|
"loss": 2.806, |
|
"step": 272000 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"eval_loss": 2.659311532974243, |
|
"eval_runtime": 416.7743, |
|
"eval_samples_per_second": 478.076, |
|
"eval_steps_per_second": 29.882, |
|
"step": 272000 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"eval_loss": 2.659724473953247, |
|
"eval_runtime": 416.7046, |
|
"eval_samples_per_second": 478.157, |
|
"eval_steps_per_second": 29.887, |
|
"step": 280000 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 3.608e-07, |
|
"loss": 2.8012, |
|
"step": 288000 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"eval_loss": 2.6603612899780273, |
|
"eval_runtime": 415.9031, |
|
"eval_samples_per_second": 479.078, |
|
"eval_steps_per_second": 29.944, |
|
"step": 288000 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"eval_loss": 2.6545276641845703, |
|
"eval_runtime": 414.5962, |
|
"eval_samples_per_second": 480.588, |
|
"eval_steps_per_second": 30.039, |
|
"step": 296000 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 3.5806666666666666e-07, |
|
"loss": 2.8029, |
|
"step": 304000 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"eval_loss": 2.6571340560913086, |
|
"eval_runtime": 416.6303, |
|
"eval_samples_per_second": 478.242, |
|
"eval_steps_per_second": 29.892, |
|
"step": 304000 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"eval_loss": 2.6534266471862793, |
|
"eval_runtime": 416.4252, |
|
"eval_samples_per_second": 478.477, |
|
"eval_steps_per_second": 29.907, |
|
"step": 312000 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 3.553333333333333e-07, |
|
"loss": 2.7991, |
|
"step": 320000 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"eval_loss": 2.665046215057373, |
|
"eval_runtime": 414.7989, |
|
"eval_samples_per_second": 480.353, |
|
"eval_steps_per_second": 30.024, |
|
"step": 320000 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"eval_loss": 2.6680290699005127, |
|
"eval_runtime": 413.9236, |
|
"eval_samples_per_second": 481.369, |
|
"eval_steps_per_second": 30.088, |
|
"step": 328000 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 3.5259999999999997e-07, |
|
"loss": 2.7949, |
|
"step": 336000 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"eval_loss": 2.6544442176818848, |
|
"eval_runtime": 415.4503, |
|
"eval_samples_per_second": 479.6, |
|
"eval_steps_per_second": 29.977, |
|
"step": 336000 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"eval_loss": 2.645993709564209, |
|
"eval_runtime": 414.7891, |
|
"eval_samples_per_second": 480.365, |
|
"eval_steps_per_second": 30.025, |
|
"step": 344000 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 3.498666666666667e-07, |
|
"loss": 2.7972, |
|
"step": 352000 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"eval_loss": 2.655319929122925, |
|
"eval_runtime": 413.6708, |
|
"eval_samples_per_second": 481.663, |
|
"eval_steps_per_second": 30.106, |
|
"step": 352000 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"eval_loss": 2.6427876949310303, |
|
"eval_runtime": 415.818, |
|
"eval_samples_per_second": 479.176, |
|
"eval_steps_per_second": 29.951, |
|
"step": 360000 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 3.4713333333333333e-07, |
|
"loss": 2.7924, |
|
"step": 368000 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"eval_loss": 2.6535725593566895, |
|
"eval_runtime": 415.5951, |
|
"eval_samples_per_second": 479.433, |
|
"eval_steps_per_second": 29.967, |
|
"step": 368000 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"eval_loss": 2.6550498008728027, |
|
"eval_runtime": 416.627, |
|
"eval_samples_per_second": 478.246, |
|
"eval_steps_per_second": 29.892, |
|
"step": 376000 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 3.444e-07, |
|
"loss": 2.805, |
|
"step": 384000 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"eval_loss": 2.6524453163146973, |
|
"eval_runtime": 417.0486, |
|
"eval_samples_per_second": 477.762, |
|
"eval_steps_per_second": 29.862, |
|
"step": 384000 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"eval_loss": 2.652374744415283, |
|
"eval_runtime": 416.3041, |
|
"eval_samples_per_second": 478.616, |
|
"eval_steps_per_second": 29.916, |
|
"step": 392000 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 3.416666666666667e-07, |
|
"loss": 2.7972, |
|
"step": 400000 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"eval_loss": 2.657935619354248, |
|
"eval_runtime": 415.2722, |
|
"eval_samples_per_second": 479.806, |
|
"eval_steps_per_second": 29.99, |
|
"step": 400000 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"eval_loss": 2.649958372116089, |
|
"eval_runtime": 416.9007, |
|
"eval_samples_per_second": 477.932, |
|
"eval_steps_per_second": 29.873, |
|
"step": 408000 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 3.3893333333333335e-07, |
|
"loss": 2.8003, |
|
"step": 416000 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"eval_loss": 2.652555227279663, |
|
"eval_runtime": 416.2811, |
|
"eval_samples_per_second": 478.643, |
|
"eval_steps_per_second": 29.917, |
|
"step": 416000 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"eval_loss": 2.6443850994110107, |
|
"eval_runtime": 416.0471, |
|
"eval_samples_per_second": 478.912, |
|
"eval_steps_per_second": 29.934, |
|
"step": 424000 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 3.3619999999999995e-07, |
|
"loss": 2.8005, |
|
"step": 432000 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"eval_loss": 2.6462562084198, |
|
"eval_runtime": 413.6662, |
|
"eval_samples_per_second": 481.669, |
|
"eval_steps_per_second": 30.106, |
|
"step": 432000 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"eval_loss": 2.6549031734466553, |
|
"eval_runtime": 414.0878, |
|
"eval_samples_per_second": 481.178, |
|
"eval_steps_per_second": 30.076, |
|
"step": 440000 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 3.3346666666666666e-07, |
|
"loss": 2.7957, |
|
"step": 448000 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"eval_loss": 2.6530463695526123, |
|
"eval_runtime": 415.4694, |
|
"eval_samples_per_second": 479.578, |
|
"eval_steps_per_second": 29.976, |
|
"step": 448000 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"eval_loss": 2.6503868103027344, |
|
"eval_runtime": 415.0439, |
|
"eval_samples_per_second": 480.07, |
|
"eval_steps_per_second": 30.006, |
|
"step": 456000 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 3.307333333333333e-07, |
|
"loss": 2.7949, |
|
"step": 464000 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"eval_loss": 2.6479649543762207, |
|
"eval_runtime": 415.3875, |
|
"eval_samples_per_second": 479.673, |
|
"eval_steps_per_second": 29.982, |
|
"step": 464000 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"eval_loss": 2.64969539642334, |
|
"eval_runtime": 415.7352, |
|
"eval_samples_per_second": 479.271, |
|
"eval_steps_per_second": 29.957, |
|
"step": 472000 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 3.28e-07, |
|
"loss": 2.7978, |
|
"step": 480000 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"eval_loss": 2.6489925384521484, |
|
"eval_runtime": 416.7057, |
|
"eval_samples_per_second": 478.155, |
|
"eval_steps_per_second": 29.887, |
|
"step": 480000 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"eval_loss": 2.6505074501037598, |
|
"eval_runtime": 415.397, |
|
"eval_samples_per_second": 479.662, |
|
"eval_steps_per_second": 29.981, |
|
"step": 488000 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 3.252666666666667e-07, |
|
"loss": 2.8041, |
|
"step": 496000 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"eval_loss": 2.6387767791748047, |
|
"eval_runtime": 414.3947, |
|
"eval_samples_per_second": 480.822, |
|
"eval_steps_per_second": 30.053, |
|
"step": 496000 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"eval_loss": 2.645991325378418, |
|
"eval_runtime": 415.4791, |
|
"eval_samples_per_second": 479.567, |
|
"eval_steps_per_second": 29.975, |
|
"step": 504000 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 3.2253333333333334e-07, |
|
"loss": 2.7935, |
|
"step": 512000 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"eval_loss": 2.6518516540527344, |
|
"eval_runtime": 415.3071, |
|
"eval_samples_per_second": 479.765, |
|
"eval_steps_per_second": 29.987, |
|
"step": 512000 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"eval_loss": 2.6494412422180176, |
|
"eval_runtime": 416.2526, |
|
"eval_samples_per_second": 478.676, |
|
"eval_steps_per_second": 29.919, |
|
"step": 520000 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 3.198e-07, |
|
"loss": 2.7982, |
|
"step": 528000 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"eval_loss": 2.654994249343872, |
|
"eval_runtime": 415.1386, |
|
"eval_samples_per_second": 479.96, |
|
"eval_steps_per_second": 30.0, |
|
"step": 528000 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"eval_loss": 2.6460273265838623, |
|
"eval_runtime": 415.2099, |
|
"eval_samples_per_second": 479.878, |
|
"eval_steps_per_second": 29.994, |
|
"step": 536000 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 3.1706666666666665e-07, |
|
"loss": 2.7949, |
|
"step": 544000 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"eval_loss": 2.649731397628784, |
|
"eval_runtime": 416.6025, |
|
"eval_samples_per_second": 478.274, |
|
"eval_steps_per_second": 29.894, |
|
"step": 544000 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"eval_loss": 2.6478219032287598, |
|
"eval_runtime": 416.4871, |
|
"eval_samples_per_second": 478.406, |
|
"eval_steps_per_second": 29.902, |
|
"step": 552000 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 3.1433333333333336e-07, |
|
"loss": 2.7953, |
|
"step": 560000 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"eval_loss": 2.6487133502960205, |
|
"eval_runtime": 414.3193, |
|
"eval_samples_per_second": 480.909, |
|
"eval_steps_per_second": 30.059, |
|
"step": 560000 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"eval_loss": 2.640001058578491, |
|
"eval_runtime": 414.4485, |
|
"eval_samples_per_second": 480.759, |
|
"eval_steps_per_second": 30.05, |
|
"step": 568000 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 3.116e-07, |
|
"loss": 2.7942, |
|
"step": 576000 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"eval_loss": 2.6439836025238037, |
|
"eval_runtime": 415.349, |
|
"eval_samples_per_second": 479.717, |
|
"eval_steps_per_second": 29.984, |
|
"step": 576000 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"eval_loss": NaN, |
|
"eval_runtime": 415.6748, |
|
"eval_samples_per_second": 479.341, |
|
"eval_steps_per_second": 29.961, |
|
"step": 584000 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 3.0886666666666667e-07, |
|
"loss": 2.803, |
|
"step": 592000 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"eval_loss": 2.6454689502716064, |
|
"eval_runtime": 416.6019, |
|
"eval_samples_per_second": 478.274, |
|
"eval_steps_per_second": 29.894, |
|
"step": 592000 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"eval_loss": 2.64009165763855, |
|
"eval_runtime": 414.1362, |
|
"eval_samples_per_second": 481.122, |
|
"eval_steps_per_second": 30.072, |
|
"step": 600000 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 3.061333333333333e-07, |
|
"loss": 2.7961, |
|
"step": 608000 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"eval_loss": 2.6510584354400635, |
|
"eval_runtime": 414.5176, |
|
"eval_samples_per_second": 480.679, |
|
"eval_steps_per_second": 30.045, |
|
"step": 608000 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"eval_loss": 2.6400928497314453, |
|
"eval_runtime": 414.9292, |
|
"eval_samples_per_second": 480.202, |
|
"eval_steps_per_second": 30.015, |
|
"step": 616000 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 3.034e-07, |
|
"loss": 2.7975, |
|
"step": 624000 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"eval_loss": 2.6436855792999268, |
|
"eval_runtime": 414.0912, |
|
"eval_samples_per_second": 481.174, |
|
"eval_steps_per_second": 30.075, |
|
"step": 624000 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"eval_loss": 2.6431784629821777, |
|
"eval_runtime": 414.8023, |
|
"eval_samples_per_second": 480.349, |
|
"eval_steps_per_second": 30.024, |
|
"step": 632000 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 3.0066666666666663e-07, |
|
"loss": 2.7946, |
|
"step": 640000 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"eval_loss": 2.6461291313171387, |
|
"eval_runtime": 415.6685, |
|
"eval_samples_per_second": 479.348, |
|
"eval_steps_per_second": 29.961, |
|
"step": 640000 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"eval_loss": 2.6491315364837646, |
|
"eval_runtime": 417.1915, |
|
"eval_samples_per_second": 477.598, |
|
"eval_steps_per_second": 29.852, |
|
"step": 648000 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 2.9793333333333334e-07, |
|
"loss": 2.7963, |
|
"step": 656000 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"eval_loss": 2.6441593170166016, |
|
"eval_runtime": 417.5373, |
|
"eval_samples_per_second": 477.203, |
|
"eval_steps_per_second": 29.827, |
|
"step": 656000 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"eval_loss": 2.641571283340454, |
|
"eval_runtime": 414.7923, |
|
"eval_samples_per_second": 480.361, |
|
"eval_steps_per_second": 30.025, |
|
"step": 664000 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 2.952e-07, |
|
"loss": 2.7924, |
|
"step": 672000 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"eval_loss": 2.6403403282165527, |
|
"eval_runtime": 413.6349, |
|
"eval_samples_per_second": 481.705, |
|
"eval_steps_per_second": 30.109, |
|
"step": 672000 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"eval_loss": 2.6465871334075928, |
|
"eval_runtime": 416.286, |
|
"eval_samples_per_second": 478.637, |
|
"eval_steps_per_second": 29.917, |
|
"step": 680000 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 2.9246666666666665e-07, |
|
"loss": 2.8004, |
|
"step": 688000 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"eval_loss": 2.6435861587524414, |
|
"eval_runtime": 414.7095, |
|
"eval_samples_per_second": 480.457, |
|
"eval_steps_per_second": 30.031, |
|
"step": 688000 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"eval_loss": 2.6446969509124756, |
|
"eval_runtime": 415.0332, |
|
"eval_samples_per_second": 480.082, |
|
"eval_steps_per_second": 30.007, |
|
"step": 696000 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 2.897333333333333e-07, |
|
"loss": 2.8039, |
|
"step": 704000 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"eval_loss": 2.641172409057617, |
|
"eval_runtime": 414.5379, |
|
"eval_samples_per_second": 480.656, |
|
"eval_steps_per_second": 30.043, |
|
"step": 704000 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"eval_loss": 2.639988422393799, |
|
"eval_runtime": 414.2916, |
|
"eval_samples_per_second": 480.941, |
|
"eval_steps_per_second": 30.061, |
|
"step": 712000 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"learning_rate": 2.8699999999999996e-07, |
|
"loss": 2.7958, |
|
"step": 720000 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"eval_loss": 2.6418933868408203, |
|
"eval_runtime": 415.0166, |
|
"eval_samples_per_second": 480.101, |
|
"eval_steps_per_second": 30.008, |
|
"step": 720000 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"eval_loss": 2.6413352489471436, |
|
"eval_runtime": 413.087, |
|
"eval_samples_per_second": 482.344, |
|
"eval_steps_per_second": 30.149, |
|
"step": 728000 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"learning_rate": 2.8426666666666667e-07, |
|
"loss": 2.7967, |
|
"step": 736000 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"eval_loss": NaN, |
|
"eval_runtime": 413.1334, |
|
"eval_samples_per_second": 482.29, |
|
"eval_steps_per_second": 30.145, |
|
"step": 736000 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"eval_loss": 2.639918327331543, |
|
"eval_runtime": 422.7228, |
|
"eval_samples_per_second": 471.349, |
|
"eval_steps_per_second": 29.461, |
|
"step": 744000 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"learning_rate": 2.815333333333333e-07, |
|
"loss": 2.7934, |
|
"step": 752000 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"eval_loss": 2.640524387359619, |
|
"eval_runtime": 422.6777, |
|
"eval_samples_per_second": 471.399, |
|
"eval_steps_per_second": 29.465, |
|
"step": 752000 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"eval_loss": 2.6387245655059814, |
|
"eval_runtime": 420.9074, |
|
"eval_samples_per_second": 473.382, |
|
"eval_steps_per_second": 29.588, |
|
"step": 760000 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 2.7880000000000003e-07, |
|
"loss": 2.7988, |
|
"step": 768000 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"eval_loss": 2.64625883102417, |
|
"eval_runtime": 419.8681, |
|
"eval_samples_per_second": 474.554, |
|
"eval_steps_per_second": 29.662, |
|
"step": 768000 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"eval_loss": 2.6307883262634277, |
|
"eval_runtime": 413.6046, |
|
"eval_samples_per_second": 481.74, |
|
"eval_steps_per_second": 30.111, |
|
"step": 776000 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"learning_rate": 2.7606666666666664e-07, |
|
"loss": 2.793, |
|
"step": 784000 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"eval_loss": 2.6342904567718506, |
|
"eval_runtime": 414.4973, |
|
"eval_samples_per_second": 480.703, |
|
"eval_steps_per_second": 30.046, |
|
"step": 784000 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"eval_loss": 2.6358492374420166, |
|
"eval_runtime": 414.875, |
|
"eval_samples_per_second": 480.265, |
|
"eval_steps_per_second": 30.019, |
|
"step": 792000 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"learning_rate": 2.733333333333333e-07, |
|
"loss": 2.797, |
|
"step": 800000 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"eval_loss": 2.639730930328369, |
|
"eval_runtime": 414.0225, |
|
"eval_samples_per_second": 481.254, |
|
"eval_steps_per_second": 30.08, |
|
"step": 800000 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"eval_loss": 2.6340529918670654, |
|
"eval_runtime": 415.9272, |
|
"eval_samples_per_second": 479.05, |
|
"eval_steps_per_second": 29.943, |
|
"step": 808000 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"learning_rate": 2.706e-07, |
|
"loss": 2.7832, |
|
"step": 816000 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"eval_loss": 2.6393821239471436, |
|
"eval_runtime": 419.307, |
|
"eval_samples_per_second": 475.189, |
|
"eval_steps_per_second": 29.701, |
|
"step": 816000 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"eval_loss": 2.6341216564178467, |
|
"eval_runtime": 415.9801, |
|
"eval_samples_per_second": 478.989, |
|
"eval_steps_per_second": 29.939, |
|
"step": 824000 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"learning_rate": 2.6786666666666666e-07, |
|
"loss": 2.792, |
|
"step": 832000 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"eval_loss": 2.642409086227417, |
|
"eval_runtime": 416.1328, |
|
"eval_samples_per_second": 478.814, |
|
"eval_steps_per_second": 29.928, |
|
"step": 832000 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"eval_loss": 2.638009786605835, |
|
"eval_runtime": 412.7545, |
|
"eval_samples_per_second": 482.733, |
|
"eval_steps_per_second": 30.173, |
|
"step": 840000 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"learning_rate": 2.651333333333333e-07, |
|
"loss": 2.7945, |
|
"step": 848000 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"eval_loss": 2.6373209953308105, |
|
"eval_runtime": 415.2424, |
|
"eval_samples_per_second": 479.84, |
|
"eval_steps_per_second": 29.992, |
|
"step": 848000 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"eval_loss": 2.636631965637207, |
|
"eval_runtime": 415.4773, |
|
"eval_samples_per_second": 479.569, |
|
"eval_steps_per_second": 29.975, |
|
"step": 856000 |
|
}, |
|
{ |
|
"epoch": 3.65, |
|
"learning_rate": 2.624e-07, |
|
"loss": 2.7876, |
|
"step": 864000 |
|
}, |
|
{ |
|
"epoch": 3.65, |
|
"eval_loss": 2.6409096717834473, |
|
"eval_runtime": 413.1752, |
|
"eval_samples_per_second": 482.241, |
|
"eval_steps_per_second": 30.142, |
|
"step": 864000 |
|
}, |
|
{ |
|
"epoch": 3.69, |
|
"eval_loss": 2.638249635696411, |
|
"eval_runtime": 412.6062, |
|
"eval_samples_per_second": 482.906, |
|
"eval_steps_per_second": 30.184, |
|
"step": 872000 |
|
}, |
|
{ |
|
"epoch": 3.72, |
|
"learning_rate": 2.596666666666667e-07, |
|
"loss": 2.7975, |
|
"step": 880000 |
|
}, |
|
{ |
|
"epoch": 3.72, |
|
"eval_loss": 2.62591290473938, |
|
"eval_runtime": 413.5939, |
|
"eval_samples_per_second": 481.753, |
|
"eval_steps_per_second": 30.112, |
|
"step": 880000 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"eval_loss": 2.6443488597869873, |
|
"eval_runtime": 415.3476, |
|
"eval_samples_per_second": 479.719, |
|
"eval_steps_per_second": 29.985, |
|
"step": 888000 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"learning_rate": 2.5693333333333333e-07, |
|
"loss": 2.7965, |
|
"step": 896000 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"eval_loss": 2.624751567840576, |
|
"eval_runtime": 415.4056, |
|
"eval_samples_per_second": 479.652, |
|
"eval_steps_per_second": 29.98, |
|
"step": 896000 |
|
}, |
|
{ |
|
"epoch": 3.82, |
|
"eval_loss": 2.639540433883667, |
|
"eval_runtime": 414.6525, |
|
"eval_samples_per_second": 480.523, |
|
"eval_steps_per_second": 30.035, |
|
"step": 904000 |
|
}, |
|
{ |
|
"epoch": 3.85, |
|
"learning_rate": 2.542e-07, |
|
"loss": 2.7991, |
|
"step": 912000 |
|
}, |
|
{ |
|
"epoch": 3.85, |
|
"eval_loss": 2.632520914077759, |
|
"eval_runtime": 414.5399, |
|
"eval_samples_per_second": 480.653, |
|
"eval_steps_per_second": 30.043, |
|
"step": 912000 |
|
}, |
|
{ |
|
"epoch": 3.89, |
|
"eval_loss": 2.635369300842285, |
|
"eval_runtime": 415.2855, |
|
"eval_samples_per_second": 479.79, |
|
"eval_steps_per_second": 29.989, |
|
"step": 920000 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"learning_rate": 2.5146666666666664e-07, |
|
"loss": 2.7947, |
|
"step": 928000 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"eval_loss": 2.6342415809631348, |
|
"eval_runtime": 414.7268, |
|
"eval_samples_per_second": 480.437, |
|
"eval_steps_per_second": 30.029, |
|
"step": 928000 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"eval_loss": 2.6290154457092285, |
|
"eval_runtime": 414.3923, |
|
"eval_samples_per_second": 480.825, |
|
"eval_steps_per_second": 30.054, |
|
"step": 936000 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"learning_rate": 2.4873333333333335e-07, |
|
"loss": 2.7977, |
|
"step": 944000 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"eval_loss": 2.631457567214966, |
|
"eval_runtime": 414.9085, |
|
"eval_samples_per_second": 480.226, |
|
"eval_steps_per_second": 30.016, |
|
"step": 944000 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"eval_loss": 2.634674310684204, |
|
"eval_runtime": 415.5412, |
|
"eval_samples_per_second": 479.495, |
|
"eval_steps_per_second": 29.971, |
|
"step": 952000 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"learning_rate": 2.46e-07, |
|
"loss": 2.8, |
|
"step": 960000 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"eval_loss": 2.631823778152466, |
|
"eval_runtime": 415.1034, |
|
"eval_samples_per_second": 480.001, |
|
"eval_steps_per_second": 30.002, |
|
"step": 960000 |
|
}, |
|
{ |
|
"epoch": 4.09, |
|
"eval_loss": 2.6328303813934326, |
|
"eval_runtime": 415.6276, |
|
"eval_samples_per_second": 479.395, |
|
"eval_steps_per_second": 29.964, |
|
"step": 968000 |
|
}, |
|
{ |
|
"epoch": 4.12, |
|
"learning_rate": 2.4326666666666666e-07, |
|
"loss": 2.7945, |
|
"step": 976000 |
|
}, |
|
{ |
|
"epoch": 4.12, |
|
"eval_loss": 2.6315038204193115, |
|
"eval_runtime": 414.41, |
|
"eval_samples_per_second": 480.804, |
|
"eval_steps_per_second": 30.052, |
|
"step": 976000 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"eval_loss": 2.6296520233154297, |
|
"eval_runtime": 415.5411, |
|
"eval_samples_per_second": 479.495, |
|
"eval_steps_per_second": 29.971, |
|
"step": 984000 |
|
}, |
|
{ |
|
"epoch": 4.19, |
|
"learning_rate": 2.405333333333333e-07, |
|
"loss": 2.7946, |
|
"step": 992000 |
|
}, |
|
{ |
|
"epoch": 4.19, |
|
"eval_loss": 2.6377837657928467, |
|
"eval_runtime": 415.4925, |
|
"eval_samples_per_second": 479.551, |
|
"eval_steps_per_second": 29.974, |
|
"step": 992000 |
|
}, |
|
{ |
|
"epoch": 4.23, |
|
"eval_loss": 2.632783889770508, |
|
"eval_runtime": 415.0903, |
|
"eval_samples_per_second": 480.016, |
|
"eval_steps_per_second": 30.003, |
|
"step": 1000000 |
|
}, |
|
{ |
|
"epoch": 4.26, |
|
"learning_rate": 2.3779999999999997e-07, |
|
"loss": 2.7962, |
|
"step": 1008000 |
|
}, |
|
{ |
|
"epoch": 4.26, |
|
"eval_loss": 2.6295533180236816, |
|
"eval_runtime": 413.2357, |
|
"eval_samples_per_second": 482.17, |
|
"eval_steps_per_second": 30.138, |
|
"step": 1008000 |
|
}, |
|
{ |
|
"epoch": 4.29, |
|
"eval_loss": 2.6346757411956787, |
|
"eval_runtime": 416.3926, |
|
"eval_samples_per_second": 478.515, |
|
"eval_steps_per_second": 29.909, |
|
"step": 1016000 |
|
}, |
|
{ |
|
"epoch": 4.33, |
|
"learning_rate": 2.3506666666666668e-07, |
|
"loss": 2.7932, |
|
"step": 1024000 |
|
}, |
|
{ |
|
"epoch": 4.33, |
|
"eval_loss": 2.6355090141296387, |
|
"eval_runtime": 417.2451, |
|
"eval_samples_per_second": 477.537, |
|
"eval_steps_per_second": 29.848, |
|
"step": 1024000 |
|
}, |
|
{ |
|
"epoch": 4.36, |
|
"eval_loss": 2.6363675594329834, |
|
"eval_runtime": 416.1756, |
|
"eval_samples_per_second": 478.764, |
|
"eval_steps_per_second": 29.925, |
|
"step": 1032000 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"learning_rate": 2.3233333333333334e-07, |
|
"loss": 2.7992, |
|
"step": 1040000 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"eval_loss": 2.632688522338867, |
|
"eval_runtime": 414.8733, |
|
"eval_samples_per_second": 480.267, |
|
"eval_steps_per_second": 30.019, |
|
"step": 1040000 |
|
}, |
|
{ |
|
"epoch": 4.43, |
|
"eval_loss": 2.6272976398468018, |
|
"eval_runtime": 416.0306, |
|
"eval_samples_per_second": 478.931, |
|
"eval_steps_per_second": 29.935, |
|
"step": 1048000 |
|
}, |
|
{ |
|
"epoch": 4.46, |
|
"learning_rate": 2.2960000000000002e-07, |
|
"loss": 2.7922, |
|
"step": 1056000 |
|
}, |
|
{ |
|
"epoch": 4.46, |
|
"eval_loss": 2.6301381587982178, |
|
"eval_runtime": 418.1292, |
|
"eval_samples_per_second": 476.527, |
|
"eval_steps_per_second": 29.785, |
|
"step": 1056000 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"eval_loss": 2.634971857070923, |
|
"eval_runtime": 415.4628, |
|
"eval_samples_per_second": 479.586, |
|
"eval_steps_per_second": 29.976, |
|
"step": 1064000 |
|
}, |
|
{ |
|
"epoch": 4.53, |
|
"learning_rate": 2.2686666666666667e-07, |
|
"loss": 2.7939, |
|
"step": 1072000 |
|
}, |
|
{ |
|
"epoch": 4.53, |
|
"eval_loss": 2.63580584526062, |
|
"eval_runtime": 414.2262, |
|
"eval_samples_per_second": 481.017, |
|
"eval_steps_per_second": 30.066, |
|
"step": 1072000 |
|
}, |
|
{ |
|
"epoch": 4.56, |
|
"eval_loss": NaN, |
|
"eval_runtime": 415.858, |
|
"eval_samples_per_second": 479.13, |
|
"eval_steps_per_second": 29.948, |
|
"step": 1080000 |
|
}, |
|
{ |
|
"epoch": 4.6, |
|
"learning_rate": 2.2413333333333333e-07, |
|
"loss": 2.789, |
|
"step": 1088000 |
|
}, |
|
{ |
|
"epoch": 4.6, |
|
"eval_loss": 2.628849983215332, |
|
"eval_runtime": 416.3917, |
|
"eval_samples_per_second": 478.516, |
|
"eval_steps_per_second": 29.909, |
|
"step": 1088000 |
|
}, |
|
{ |
|
"epoch": 4.63, |
|
"eval_loss": 2.626696825027466, |
|
"eval_runtime": 415.8123, |
|
"eval_samples_per_second": 479.183, |
|
"eval_steps_per_second": 29.951, |
|
"step": 1096000 |
|
}, |
|
{ |
|
"epoch": 4.67, |
|
"learning_rate": 2.214e-07, |
|
"loss": 2.7965, |
|
"step": 1104000 |
|
}, |
|
{ |
|
"epoch": 4.67, |
|
"eval_loss": 2.6229259967803955, |
|
"eval_runtime": 415.4378, |
|
"eval_samples_per_second": 479.615, |
|
"eval_steps_per_second": 29.978, |
|
"step": 1104000 |
|
}, |
|
{ |
|
"epoch": 4.7, |
|
"eval_loss": 2.6331067085266113, |
|
"eval_runtime": 415.4213, |
|
"eval_samples_per_second": 479.634, |
|
"eval_steps_per_second": 29.979, |
|
"step": 1112000 |
|
}, |
|
{ |
|
"epoch": 4.73, |
|
"learning_rate": 2.1866666666666667e-07, |
|
"loss": 2.7963, |
|
"step": 1120000 |
|
}, |
|
{ |
|
"epoch": 4.73, |
|
"eval_loss": 2.6367764472961426, |
|
"eval_runtime": 416.9274, |
|
"eval_samples_per_second": 477.901, |
|
"eval_steps_per_second": 29.871, |
|
"step": 1120000 |
|
}, |
|
{ |
|
"epoch": 4.77, |
|
"eval_loss": 2.6436047554016113, |
|
"eval_runtime": 414.3896, |
|
"eval_samples_per_second": 480.828, |
|
"eval_steps_per_second": 30.054, |
|
"step": 1128000 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"learning_rate": 2.1593333333333332e-07, |
|
"loss": 2.7993, |
|
"step": 1136000 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"eval_loss": 2.6363236904144287, |
|
"eval_runtime": 414.4703, |
|
"eval_samples_per_second": 480.734, |
|
"eval_steps_per_second": 30.048, |
|
"step": 1136000 |
|
}, |
|
{ |
|
"epoch": 4.83, |
|
"eval_loss": 2.628790855407715, |
|
"eval_runtime": 413.6012, |
|
"eval_samples_per_second": 481.744, |
|
"eval_steps_per_second": 30.111, |
|
"step": 1144000 |
|
}, |
|
{ |
|
"epoch": 4.87, |
|
"learning_rate": 2.132e-07, |
|
"loss": 2.7952, |
|
"step": 1152000 |
|
}, |
|
{ |
|
"epoch": 4.87, |
|
"eval_loss": 2.6294453144073486, |
|
"eval_runtime": 414.7494, |
|
"eval_samples_per_second": 480.411, |
|
"eval_steps_per_second": 30.028, |
|
"step": 1152000 |
|
}, |
|
{ |
|
"epoch": 4.9, |
|
"eval_loss": 2.6337034702301025, |
|
"eval_runtime": 416.1788, |
|
"eval_samples_per_second": 478.761, |
|
"eval_steps_per_second": 29.925, |
|
"step": 1160000 |
|
}, |
|
{ |
|
"epoch": 4.94, |
|
"learning_rate": 2.1046666666666666e-07, |
|
"loss": 2.7972, |
|
"step": 1168000 |
|
}, |
|
{ |
|
"epoch": 4.94, |
|
"eval_loss": 2.623483657836914, |
|
"eval_runtime": 414.4419, |
|
"eval_samples_per_second": 480.767, |
|
"eval_steps_per_second": 30.05, |
|
"step": 1168000 |
|
}, |
|
{ |
|
"epoch": 4.97, |
|
"eval_loss": 2.640516996383667, |
|
"eval_runtime": 414.5607, |
|
"eval_samples_per_second": 480.629, |
|
"eval_steps_per_second": 30.041, |
|
"step": 1176000 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"learning_rate": 2.0773333333333334e-07, |
|
"loss": 2.7988, |
|
"step": 1184000 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"eval_loss": 2.626560926437378, |
|
"eval_runtime": 415.66, |
|
"eval_samples_per_second": 479.358, |
|
"eval_steps_per_second": 29.962, |
|
"step": 1184000 |
|
}, |
|
{ |
|
"epoch": 5.04, |
|
"eval_loss": 2.632770299911499, |
|
"eval_runtime": 422.5737, |
|
"eval_samples_per_second": 471.515, |
|
"eval_steps_per_second": 29.472, |
|
"step": 1192000 |
|
}, |
|
{ |
|
"epoch": 5.07, |
|
"learning_rate": 2.05e-07, |
|
"loss": 2.7901, |
|
"step": 1200000 |
|
}, |
|
{ |
|
"epoch": 5.07, |
|
"eval_loss": 2.6335248947143555, |
|
"eval_runtime": 419.9786, |
|
"eval_samples_per_second": 474.429, |
|
"eval_steps_per_second": 29.654, |
|
"step": 1200000 |
|
}, |
|
{ |
|
"epoch": 5.11, |
|
"eval_loss": 2.6405279636383057, |
|
"eval_runtime": 419.4258, |
|
"eval_samples_per_second": 475.054, |
|
"eval_steps_per_second": 29.693, |
|
"step": 1208000 |
|
}, |
|
{ |
|
"epoch": 5.14, |
|
"learning_rate": 2.0226666666666668e-07, |
|
"loss": 2.7975, |
|
"step": 1216000 |
|
}, |
|
{ |
|
"epoch": 5.14, |
|
"eval_loss": 2.624608278274536, |
|
"eval_runtime": 421.319, |
|
"eval_samples_per_second": 472.92, |
|
"eval_steps_per_second": 29.56, |
|
"step": 1216000 |
|
}, |
|
{ |
|
"epoch": 5.17, |
|
"eval_loss": 2.6314573287963867, |
|
"eval_runtime": 417.198, |
|
"eval_samples_per_second": 477.591, |
|
"eval_steps_per_second": 29.852, |
|
"step": 1224000 |
|
}, |
|
{ |
|
"epoch": 5.21, |
|
"learning_rate": 1.9953333333333333e-07, |
|
"loss": 2.7974, |
|
"step": 1232000 |
|
}, |
|
{ |
|
"epoch": 5.21, |
|
"eval_loss": 2.639028549194336, |
|
"eval_runtime": 414.299, |
|
"eval_samples_per_second": 480.933, |
|
"eval_steps_per_second": 30.06, |
|
"step": 1232000 |
|
}, |
|
{ |
|
"epoch": 5.24, |
|
"eval_loss": 2.631808280944824, |
|
"eval_runtime": 413.617, |
|
"eval_samples_per_second": 481.726, |
|
"eval_steps_per_second": 30.11, |
|
"step": 1240000 |
|
}, |
|
{ |
|
"epoch": 5.27, |
|
"learning_rate": 1.968e-07, |
|
"loss": 2.7909, |
|
"step": 1248000 |
|
}, |
|
{ |
|
"epoch": 5.27, |
|
"eval_loss": 2.6236965656280518, |
|
"eval_runtime": 414.1647, |
|
"eval_samples_per_second": 481.089, |
|
"eval_steps_per_second": 30.07, |
|
"step": 1248000 |
|
}, |
|
{ |
|
"epoch": 5.31, |
|
"eval_loss": 2.6343328952789307, |
|
"eval_runtime": 416.7312, |
|
"eval_samples_per_second": 478.126, |
|
"eval_steps_per_second": 29.885, |
|
"step": 1256000 |
|
}, |
|
{ |
|
"epoch": 5.34, |
|
"learning_rate": 1.9406666666666667e-07, |
|
"loss": 2.7899, |
|
"step": 1264000 |
|
}, |
|
{ |
|
"epoch": 5.34, |
|
"eval_loss": 2.628756284713745, |
|
"eval_runtime": 415.9879, |
|
"eval_samples_per_second": 478.98, |
|
"eval_steps_per_second": 29.938, |
|
"step": 1264000 |
|
}, |
|
{ |
|
"epoch": 5.38, |
|
"eval_loss": 2.629678964614868, |
|
"eval_runtime": 415.5157, |
|
"eval_samples_per_second": 479.525, |
|
"eval_steps_per_second": 29.972, |
|
"step": 1272000 |
|
}, |
|
{ |
|
"epoch": 5.41, |
|
"learning_rate": 1.9133333333333333e-07, |
|
"loss": 2.7937, |
|
"step": 1280000 |
|
}, |
|
{ |
|
"epoch": 5.41, |
|
"eval_loss": 2.63431715965271, |
|
"eval_runtime": 416.1785, |
|
"eval_samples_per_second": 478.761, |
|
"eval_steps_per_second": 29.925, |
|
"step": 1280000 |
|
}, |
|
{ |
|
"epoch": 5.44, |
|
"eval_loss": 2.6306326389312744, |
|
"eval_runtime": 421.3392, |
|
"eval_samples_per_second": 472.897, |
|
"eval_steps_per_second": 29.558, |
|
"step": 1288000 |
|
}, |
|
{ |
|
"epoch": 5.48, |
|
"learning_rate": 1.886e-07, |
|
"loss": 2.7916, |
|
"step": 1296000 |
|
}, |
|
{ |
|
"epoch": 5.48, |
|
"eval_loss": 2.6267640590667725, |
|
"eval_runtime": 419.4553, |
|
"eval_samples_per_second": 475.021, |
|
"eval_steps_per_second": 29.691, |
|
"step": 1296000 |
|
}, |
|
{ |
|
"epoch": 5.51, |
|
"eval_loss": 2.63167667388916, |
|
"eval_runtime": 419.3882, |
|
"eval_samples_per_second": 475.097, |
|
"eval_steps_per_second": 29.696, |
|
"step": 1304000 |
|
}, |
|
{ |
|
"epoch": 5.54, |
|
"learning_rate": 1.8586666666666666e-07, |
|
"loss": 2.7874, |
|
"step": 1312000 |
|
}, |
|
{ |
|
"epoch": 5.54, |
|
"eval_loss": 2.6380093097686768, |
|
"eval_runtime": 418.5148, |
|
"eval_samples_per_second": 476.088, |
|
"eval_steps_per_second": 29.758, |
|
"step": 1312000 |
|
}, |
|
{ |
|
"epoch": 5.58, |
|
"eval_loss": 2.6281051635742188, |
|
"eval_runtime": 415.0221, |
|
"eval_samples_per_second": 480.095, |
|
"eval_steps_per_second": 30.008, |
|
"step": 1320000 |
|
}, |
|
{ |
|
"epoch": 5.61, |
|
"learning_rate": 1.8313333333333332e-07, |
|
"loss": 2.7967, |
|
"step": 1328000 |
|
}, |
|
{ |
|
"epoch": 5.61, |
|
"eval_loss": 2.633375406265259, |
|
"eval_runtime": 414.3238, |
|
"eval_samples_per_second": 480.904, |
|
"eval_steps_per_second": 30.059, |
|
"step": 1328000 |
|
}, |
|
{ |
|
"epoch": 5.65, |
|
"eval_loss": 2.6272847652435303, |
|
"eval_runtime": 413.511, |
|
"eval_samples_per_second": 481.849, |
|
"eval_steps_per_second": 30.118, |
|
"step": 1336000 |
|
}, |
|
{ |
|
"epoch": 5.68, |
|
"learning_rate": 1.804e-07, |
|
"loss": 2.791, |
|
"step": 1344000 |
|
}, |
|
{ |
|
"epoch": 5.68, |
|
"eval_loss": 2.6339497566223145, |
|
"eval_runtime": 414.4191, |
|
"eval_samples_per_second": 480.794, |
|
"eval_steps_per_second": 30.052, |
|
"step": 1344000 |
|
}, |
|
{ |
|
"epoch": 5.71, |
|
"eval_loss": 2.6276426315307617, |
|
"eval_runtime": 415.1118, |
|
"eval_samples_per_second": 479.991, |
|
"eval_steps_per_second": 30.002, |
|
"step": 1352000 |
|
}, |
|
{ |
|
"epoch": 5.75, |
|
"learning_rate": 1.7766666666666666e-07, |
|
"loss": 2.791, |
|
"step": 1360000 |
|
}, |
|
{ |
|
"epoch": 5.75, |
|
"eval_loss": 2.6246891021728516, |
|
"eval_runtime": 414.8524, |
|
"eval_samples_per_second": 480.291, |
|
"eval_steps_per_second": 30.02, |
|
"step": 1360000 |
|
}, |
|
{ |
|
"epoch": 5.78, |
|
"eval_loss": 2.630341053009033, |
|
"eval_runtime": 413.0138, |
|
"eval_samples_per_second": 482.429, |
|
"eval_steps_per_second": 30.154, |
|
"step": 1368000 |
|
}, |
|
{ |
|
"epoch": 5.82, |
|
"learning_rate": 1.7493333333333334e-07, |
|
"loss": 2.7909, |
|
"step": 1376000 |
|
}, |
|
{ |
|
"epoch": 5.82, |
|
"eval_loss": 2.635547637939453, |
|
"eval_runtime": 414.6547, |
|
"eval_samples_per_second": 480.52, |
|
"eval_steps_per_second": 30.035, |
|
"step": 1376000 |
|
}, |
|
{ |
|
"epoch": 5.85, |
|
"eval_loss": 2.6352195739746094, |
|
"eval_runtime": 415.8747, |
|
"eval_samples_per_second": 479.111, |
|
"eval_steps_per_second": 29.947, |
|
"step": 1384000 |
|
}, |
|
{ |
|
"epoch": 5.88, |
|
"learning_rate": 1.722e-07, |
|
"loss": 2.7833, |
|
"step": 1392000 |
|
}, |
|
{ |
|
"epoch": 5.88, |
|
"eval_loss": 2.6320648193359375, |
|
"eval_runtime": 415.8557, |
|
"eval_samples_per_second": 479.133, |
|
"eval_steps_per_second": 29.948, |
|
"step": 1392000 |
|
}, |
|
{ |
|
"epoch": 5.92, |
|
"eval_loss": 2.6335620880126953, |
|
"eval_runtime": 415.5817, |
|
"eval_samples_per_second": 479.448, |
|
"eval_steps_per_second": 29.968, |
|
"step": 1400000 |
|
}, |
|
{ |
|
"epoch": 5.95, |
|
"learning_rate": 1.6946666666666668e-07, |
|
"loss": 2.7944, |
|
"step": 1408000 |
|
}, |
|
{ |
|
"epoch": 5.95, |
|
"eval_loss": 2.631206750869751, |
|
"eval_runtime": 414.1531, |
|
"eval_samples_per_second": 481.102, |
|
"eval_steps_per_second": 30.071, |
|
"step": 1408000 |
|
}, |
|
{ |
|
"epoch": 5.98, |
|
"eval_loss": 2.6222622394561768, |
|
"eval_runtime": 414.6922, |
|
"eval_samples_per_second": 480.477, |
|
"eval_steps_per_second": 30.032, |
|
"step": 1416000 |
|
}, |
|
{ |
|
"epoch": 6.02, |
|
"learning_rate": 1.6673333333333333e-07, |
|
"loss": 2.8001, |
|
"step": 1424000 |
|
}, |
|
{ |
|
"epoch": 6.02, |
|
"eval_loss": 2.6369099617004395, |
|
"eval_runtime": 415.0521, |
|
"eval_samples_per_second": 480.06, |
|
"eval_steps_per_second": 30.006, |
|
"step": 1424000 |
|
}, |
|
{ |
|
"epoch": 6.05, |
|
"eval_loss": 2.629903793334961, |
|
"eval_runtime": 414.081, |
|
"eval_samples_per_second": 481.186, |
|
"eval_steps_per_second": 30.076, |
|
"step": 1432000 |
|
}, |
|
{ |
|
"epoch": 6.09, |
|
"learning_rate": 1.64e-07, |
|
"loss": 2.7954, |
|
"step": 1440000 |
|
}, |
|
{ |
|
"epoch": 6.09, |
|
"eval_loss": 2.637263536453247, |
|
"eval_runtime": 414.6606, |
|
"eval_samples_per_second": 480.513, |
|
"eval_steps_per_second": 30.034, |
|
"step": 1440000 |
|
}, |
|
{ |
|
"epoch": 6.12, |
|
"eval_loss": 2.622265100479126, |
|
"eval_runtime": 416.7434, |
|
"eval_samples_per_second": 478.112, |
|
"eval_steps_per_second": 29.884, |
|
"step": 1448000 |
|
}, |
|
{ |
|
"epoch": 6.15, |
|
"learning_rate": 1.6126666666666667e-07, |
|
"loss": 2.7914, |
|
"step": 1456000 |
|
}, |
|
{ |
|
"epoch": 6.15, |
|
"eval_loss": 2.622511863708496, |
|
"eval_runtime": 417.3412, |
|
"eval_samples_per_second": 477.427, |
|
"eval_steps_per_second": 29.841, |
|
"step": 1456000 |
|
}, |
|
{ |
|
"epoch": 6.19, |
|
"eval_loss": 2.627666473388672, |
|
"eval_runtime": 416.5994, |
|
"eval_samples_per_second": 478.277, |
|
"eval_steps_per_second": 29.894, |
|
"step": 1464000 |
|
}, |
|
{ |
|
"epoch": 6.22, |
|
"learning_rate": 1.5853333333333332e-07, |
|
"loss": 2.7896, |
|
"step": 1472000 |
|
}, |
|
{ |
|
"epoch": 6.22, |
|
"eval_loss": 2.633397102355957, |
|
"eval_runtime": 415.8503, |
|
"eval_samples_per_second": 479.139, |
|
"eval_steps_per_second": 29.948, |
|
"step": 1472000 |
|
}, |
|
{ |
|
"epoch": 6.26, |
|
"eval_loss": 2.625962495803833, |
|
"eval_runtime": 414.2001, |
|
"eval_samples_per_second": 481.048, |
|
"eval_steps_per_second": 30.068, |
|
"step": 1480000 |
|
}, |
|
{ |
|
"epoch": 6.29, |
|
"learning_rate": 1.558e-07, |
|
"loss": 2.7925, |
|
"step": 1488000 |
|
}, |
|
{ |
|
"epoch": 6.29, |
|
"eval_loss": 2.6312003135681152, |
|
"eval_runtime": 415.5727, |
|
"eval_samples_per_second": 479.459, |
|
"eval_steps_per_second": 29.968, |
|
"step": 1488000 |
|
}, |
|
{ |
|
"epoch": 6.32, |
|
"eval_loss": 2.633601427078247, |
|
"eval_runtime": 416.5001, |
|
"eval_samples_per_second": 478.391, |
|
"eval_steps_per_second": 29.902, |
|
"step": 1496000 |
|
}, |
|
{ |
|
"epoch": 6.36, |
|
"learning_rate": 1.5306666666666666e-07, |
|
"loss": 2.7976, |
|
"step": 1504000 |
|
}, |
|
{ |
|
"epoch": 6.36, |
|
"eval_loss": 2.626960277557373, |
|
"eval_runtime": 413.9057, |
|
"eval_samples_per_second": 481.39, |
|
"eval_steps_per_second": 30.089, |
|
"step": 1504000 |
|
}, |
|
{ |
|
"epoch": 6.39, |
|
"eval_loss": 2.6285572052001953, |
|
"eval_runtime": 415.4246, |
|
"eval_samples_per_second": 479.63, |
|
"eval_steps_per_second": 29.979, |
|
"step": 1512000 |
|
}, |
|
{ |
|
"epoch": 6.42, |
|
"learning_rate": 1.5033333333333332e-07, |
|
"loss": 2.8025, |
|
"step": 1520000 |
|
}, |
|
{ |
|
"epoch": 6.42, |
|
"eval_loss": 2.632030487060547, |
|
"eval_runtime": 415.8213, |
|
"eval_samples_per_second": 479.172, |
|
"eval_steps_per_second": 29.95, |
|
"step": 1520000 |
|
}, |
|
{ |
|
"epoch": 6.46, |
|
"eval_loss": 2.6252386569976807, |
|
"eval_runtime": 415.748, |
|
"eval_samples_per_second": 479.257, |
|
"eval_steps_per_second": 29.956, |
|
"step": 1528000 |
|
}, |
|
{ |
|
"epoch": 6.49, |
|
"learning_rate": 1.476e-07, |
|
"loss": 2.7953, |
|
"step": 1536000 |
|
}, |
|
{ |
|
"epoch": 6.49, |
|
"eval_loss": 2.631873607635498, |
|
"eval_runtime": 413.9705, |
|
"eval_samples_per_second": 481.314, |
|
"eval_steps_per_second": 30.084, |
|
"step": 1536000 |
|
}, |
|
{ |
|
"epoch": 6.53, |
|
"eval_loss": 2.622326135635376, |
|
"eval_runtime": 417.8553, |
|
"eval_samples_per_second": 476.84, |
|
"eval_steps_per_second": 29.805, |
|
"step": 1544000 |
|
}, |
|
{ |
|
"epoch": 6.56, |
|
"learning_rate": 1.4486666666666665e-07, |
|
"loss": 2.7994, |
|
"step": 1552000 |
|
}, |
|
{ |
|
"epoch": 6.56, |
|
"eval_loss": 2.6357638835906982, |
|
"eval_runtime": 417.2592, |
|
"eval_samples_per_second": 477.521, |
|
"eval_steps_per_second": 29.847, |
|
"step": 1552000 |
|
}, |
|
{ |
|
"epoch": 6.59, |
|
"eval_loss": 2.629568576812744, |
|
"eval_runtime": 416.6599, |
|
"eval_samples_per_second": 478.208, |
|
"eval_steps_per_second": 29.89, |
|
"step": 1560000 |
|
}, |
|
{ |
|
"epoch": 6.63, |
|
"learning_rate": 1.4213333333333334e-07, |
|
"loss": 2.7966, |
|
"step": 1568000 |
|
}, |
|
{ |
|
"epoch": 6.63, |
|
"eval_loss": 2.636014938354492, |
|
"eval_runtime": 416.2491, |
|
"eval_samples_per_second": 478.68, |
|
"eval_steps_per_second": 29.92, |
|
"step": 1568000 |
|
}, |
|
{ |
|
"epoch": 6.66, |
|
"eval_loss": 2.6326565742492676, |
|
"eval_runtime": 414.8509, |
|
"eval_samples_per_second": 480.293, |
|
"eval_steps_per_second": 30.02, |
|
"step": 1576000 |
|
}, |
|
{ |
|
"epoch": 6.69, |
|
"learning_rate": 1.3940000000000002e-07, |
|
"loss": 2.7883, |
|
"step": 1584000 |
|
}, |
|
{ |
|
"epoch": 6.69, |
|
"eval_loss": 2.6365151405334473, |
|
"eval_runtime": 416.5987, |
|
"eval_samples_per_second": 478.278, |
|
"eval_steps_per_second": 29.894, |
|
"step": 1584000 |
|
}, |
|
{ |
|
"epoch": 6.73, |
|
"eval_loss": 2.6257715225219727, |
|
"eval_runtime": 417.307, |
|
"eval_samples_per_second": 477.466, |
|
"eval_steps_per_second": 29.844, |
|
"step": 1592000 |
|
}, |
|
{ |
|
"epoch": 6.76, |
|
"learning_rate": 1.3666666666666665e-07, |
|
"loss": 2.7963, |
|
"step": 1600000 |
|
}, |
|
{ |
|
"epoch": 6.76, |
|
"eval_loss": 2.6400821208953857, |
|
"eval_runtime": 416.6899, |
|
"eval_samples_per_second": 478.173, |
|
"eval_steps_per_second": 29.888, |
|
"step": 1600000 |
|
}, |
|
{ |
|
"epoch": 6.8, |
|
"eval_loss": 2.6317903995513916, |
|
"eval_runtime": 424.5231, |
|
"eval_samples_per_second": 469.35, |
|
"eval_steps_per_second": 29.336, |
|
"step": 1608000 |
|
}, |
|
{ |
|
"epoch": 6.83, |
|
"learning_rate": 1.3393333333333333e-07, |
|
"loss": 2.7923, |
|
"step": 1616000 |
|
}, |
|
{ |
|
"epoch": 6.83, |
|
"eval_loss": 2.633047103881836, |
|
"eval_runtime": 425.6323, |
|
"eval_samples_per_second": 468.127, |
|
"eval_steps_per_second": 29.26, |
|
"step": 1616000 |
|
}, |
|
{ |
|
"epoch": 6.86, |
|
"eval_loss": 2.637237071990967, |
|
"eval_runtime": 424.7976, |
|
"eval_samples_per_second": 469.047, |
|
"eval_steps_per_second": 29.317, |
|
"step": 1624000 |
|
}, |
|
{ |
|
"epoch": 6.9, |
|
"learning_rate": 1.312e-07, |
|
"loss": 2.789, |
|
"step": 1632000 |
|
}, |
|
{ |
|
"epoch": 6.9, |
|
"eval_loss": 2.636323928833008, |
|
"eval_runtime": 425.1309, |
|
"eval_samples_per_second": 468.679, |
|
"eval_steps_per_second": 29.295, |
|
"step": 1632000 |
|
}, |
|
{ |
|
"epoch": 6.93, |
|
"eval_loss": 2.6345624923706055, |
|
"eval_runtime": 421.3336, |
|
"eval_samples_per_second": 472.903, |
|
"eval_steps_per_second": 29.559, |
|
"step": 1640000 |
|
}, |
|
{ |
|
"epoch": 6.97, |
|
"learning_rate": 1.2846666666666667e-07, |
|
"loss": 2.7883, |
|
"step": 1648000 |
|
}, |
|
{ |
|
"epoch": 6.97, |
|
"eval_loss": 2.629164934158325, |
|
"eval_runtime": 419.6709, |
|
"eval_samples_per_second": 474.777, |
|
"eval_steps_per_second": 29.676, |
|
"step": 1648000 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"eval_loss": 2.6284499168395996, |
|
"eval_runtime": 421.1638, |
|
"eval_samples_per_second": 473.094, |
|
"eval_steps_per_second": 29.57, |
|
"step": 1656000 |
|
}, |
|
{ |
|
"epoch": 7.03, |
|
"learning_rate": 1.2573333333333332e-07, |
|
"loss": 2.7965, |
|
"step": 1664000 |
|
}, |
|
{ |
|
"epoch": 7.03, |
|
"eval_loss": 2.6407673358917236, |
|
"eval_runtime": 421.0368, |
|
"eval_samples_per_second": 473.237, |
|
"eval_steps_per_second": 29.579, |
|
"step": 1664000 |
|
}, |
|
{ |
|
"epoch": 7.07, |
|
"eval_loss": 2.629608154296875, |
|
"eval_runtime": 418.0892, |
|
"eval_samples_per_second": 476.573, |
|
"eval_steps_per_second": 29.788, |
|
"step": 1672000 |
|
}, |
|
{ |
|
"epoch": 7.1, |
|
"learning_rate": 1.23e-07, |
|
"loss": 2.7963, |
|
"step": 1680000 |
|
}, |
|
{ |
|
"epoch": 7.1, |
|
"eval_loss": 2.633134603500366, |
|
"eval_runtime": 417.1134, |
|
"eval_samples_per_second": 477.688, |
|
"eval_steps_per_second": 29.858, |
|
"step": 1680000 |
|
}, |
|
{ |
|
"epoch": 7.13, |
|
"eval_loss": 2.633925437927246, |
|
"eval_runtime": 416.8801, |
|
"eval_samples_per_second": 477.955, |
|
"eval_steps_per_second": 29.874, |
|
"step": 1688000 |
|
}, |
|
{ |
|
"epoch": 7.17, |
|
"learning_rate": 1.2026666666666666e-07, |
|
"loss": 2.7911, |
|
"step": 1696000 |
|
}, |
|
{ |
|
"epoch": 7.17, |
|
"eval_loss": 2.6206448078155518, |
|
"eval_runtime": 416.5239, |
|
"eval_samples_per_second": 478.364, |
|
"eval_steps_per_second": 29.9, |
|
"step": 1696000 |
|
}, |
|
{ |
|
"epoch": 7.2, |
|
"eval_loss": 2.626826047897339, |
|
"eval_runtime": 423.5563, |
|
"eval_samples_per_second": 470.422, |
|
"eval_steps_per_second": 29.403, |
|
"step": 1704000 |
|
}, |
|
{ |
|
"epoch": 7.24, |
|
"learning_rate": 1.1753333333333334e-07, |
|
"loss": 2.794, |
|
"step": 1712000 |
|
}, |
|
{ |
|
"epoch": 7.24, |
|
"eval_loss": 2.6277658939361572, |
|
"eval_runtime": 424.0426, |
|
"eval_samples_per_second": 469.882, |
|
"eval_steps_per_second": 29.37, |
|
"step": 1712000 |
|
}, |
|
{ |
|
"epoch": 7.27, |
|
"eval_loss": 2.624177932739258, |
|
"eval_runtime": 424.5606, |
|
"eval_samples_per_second": 469.309, |
|
"eval_steps_per_second": 29.334, |
|
"step": 1720000 |
|
}, |
|
{ |
|
"epoch": 7.3, |
|
"learning_rate": 1.1480000000000001e-07, |
|
"loss": 2.7893, |
|
"step": 1728000 |
|
}, |
|
{ |
|
"epoch": 7.3, |
|
"eval_loss": 2.6329119205474854, |
|
"eval_runtime": 423.4044, |
|
"eval_samples_per_second": 470.59, |
|
"eval_steps_per_second": 29.414, |
|
"step": 1728000 |
|
}, |
|
{ |
|
"epoch": 7.34, |
|
"eval_loss": 2.634227991104126, |
|
"eval_runtime": 416.548, |
|
"eval_samples_per_second": 478.336, |
|
"eval_steps_per_second": 29.898, |
|
"step": 1736000 |
|
}, |
|
{ |
|
"epoch": 7.37, |
|
"learning_rate": 1.1206666666666666e-07, |
|
"loss": 2.7935, |
|
"step": 1744000 |
|
}, |
|
{ |
|
"epoch": 7.37, |
|
"eval_loss": 2.632899284362793, |
|
"eval_runtime": 416.0609, |
|
"eval_samples_per_second": 478.896, |
|
"eval_steps_per_second": 29.933, |
|
"step": 1744000 |
|
}, |
|
{ |
|
"epoch": 7.4, |
|
"eval_loss": 2.6293511390686035, |
|
"eval_runtime": 417.5782, |
|
"eval_samples_per_second": 477.156, |
|
"eval_steps_per_second": 29.824, |
|
"step": 1752000 |
|
}, |
|
{ |
|
"epoch": 7.44, |
|
"learning_rate": 1.0933333333333333e-07, |
|
"loss": 2.7936, |
|
"step": 1760000 |
|
}, |
|
{ |
|
"epoch": 7.44, |
|
"eval_loss": 2.6300759315490723, |
|
"eval_runtime": 417.5993, |
|
"eval_samples_per_second": 477.132, |
|
"eval_steps_per_second": 29.823, |
|
"step": 1760000 |
|
}, |
|
{ |
|
"epoch": 7.47, |
|
"eval_loss": 2.629504442214966, |
|
"eval_runtime": 417.0022, |
|
"eval_samples_per_second": 477.815, |
|
"eval_steps_per_second": 29.866, |
|
"step": 1768000 |
|
}, |
|
{ |
|
"epoch": 7.51, |
|
"learning_rate": 1.066e-07, |
|
"loss": 2.7922, |
|
"step": 1776000 |
|
}, |
|
{ |
|
"epoch": 7.51, |
|
"eval_loss": 2.6260571479797363, |
|
"eval_runtime": 415.7532, |
|
"eval_samples_per_second": 479.251, |
|
"eval_steps_per_second": 29.955, |
|
"step": 1776000 |
|
}, |
|
{ |
|
"epoch": 7.54, |
|
"eval_loss": 2.6370482444763184, |
|
"eval_runtime": 417.3737, |
|
"eval_samples_per_second": 477.39, |
|
"eval_steps_per_second": 29.839, |
|
"step": 1784000 |
|
}, |
|
{ |
|
"epoch": 7.57, |
|
"learning_rate": 1.0386666666666667e-07, |
|
"loss": 2.7911, |
|
"step": 1792000 |
|
}, |
|
{ |
|
"epoch": 7.57, |
|
"eval_loss": 2.636384963989258, |
|
"eval_runtime": 417.7415, |
|
"eval_samples_per_second": 476.97, |
|
"eval_steps_per_second": 29.813, |
|
"step": 1792000 |
|
}, |
|
{ |
|
"epoch": 7.61, |
|
"eval_loss": 2.623237371444702, |
|
"eval_runtime": 418.4252, |
|
"eval_samples_per_second": 476.19, |
|
"eval_steps_per_second": 29.764, |
|
"step": 1800000 |
|
}, |
|
{ |
|
"epoch": 7.64, |
|
"learning_rate": 1.0113333333333334e-07, |
|
"loss": 2.795, |
|
"step": 1808000 |
|
}, |
|
{ |
|
"epoch": 7.64, |
|
"eval_loss": 2.6200833320617676, |
|
"eval_runtime": 419.0681, |
|
"eval_samples_per_second": 475.46, |
|
"eval_steps_per_second": 29.718, |
|
"step": 1808000 |
|
}, |
|
{ |
|
"epoch": 7.68, |
|
"eval_loss": 2.6329450607299805, |
|
"eval_runtime": 420.2799, |
|
"eval_samples_per_second": 474.089, |
|
"eval_steps_per_second": 29.633, |
|
"step": 1816000 |
|
}, |
|
{ |
|
"epoch": 7.71, |
|
"learning_rate": 9.84e-08, |
|
"loss": 2.7898, |
|
"step": 1824000 |
|
}, |
|
{ |
|
"epoch": 7.71, |
|
"eval_loss": 2.6248958110809326, |
|
"eval_runtime": 419.6056, |
|
"eval_samples_per_second": 474.851, |
|
"eval_steps_per_second": 29.68, |
|
"step": 1824000 |
|
}, |
|
{ |
|
"epoch": 7.74, |
|
"eval_loss": 2.6249101161956787, |
|
"eval_runtime": 420.4509, |
|
"eval_samples_per_second": 473.896, |
|
"eval_steps_per_second": 29.621, |
|
"step": 1832000 |
|
}, |
|
{ |
|
"epoch": 7.78, |
|
"learning_rate": 9.566666666666666e-08, |
|
"loss": 2.7931, |
|
"step": 1840000 |
|
}, |
|
{ |
|
"epoch": 7.78, |
|
"eval_loss": 2.636091947555542, |
|
"eval_runtime": 419.4224, |
|
"eval_samples_per_second": 475.058, |
|
"eval_steps_per_second": 29.693, |
|
"step": 1840000 |
|
}, |
|
{ |
|
"epoch": 7.81, |
|
"eval_loss": NaN, |
|
"eval_runtime": 419.9847, |
|
"eval_samples_per_second": 474.422, |
|
"eval_steps_per_second": 29.653, |
|
"step": 1848000 |
|
}, |
|
{ |
|
"epoch": 7.84, |
|
"learning_rate": 9.293333333333333e-08, |
|
"loss": 2.7919, |
|
"step": 1856000 |
|
}, |
|
{ |
|
"epoch": 7.84, |
|
"eval_loss": 2.627026319503784, |
|
"eval_runtime": 421.2325, |
|
"eval_samples_per_second": 473.017, |
|
"eval_steps_per_second": 29.566, |
|
"step": 1856000 |
|
}, |
|
{ |
|
"epoch": 7.88, |
|
"eval_loss": 2.636218309402466, |
|
"eval_runtime": 416.4114, |
|
"eval_samples_per_second": 478.493, |
|
"eval_steps_per_second": 29.908, |
|
"step": 1864000 |
|
}, |
|
{ |
|
"epoch": 7.91, |
|
"learning_rate": 9.02e-08, |
|
"loss": 2.7833, |
|
"step": 1872000 |
|
}, |
|
{ |
|
"epoch": 7.91, |
|
"eval_loss": 2.6277663707733154, |
|
"eval_runtime": 417.346, |
|
"eval_samples_per_second": 477.422, |
|
"eval_steps_per_second": 29.841, |
|
"step": 1872000 |
|
}, |
|
{ |
|
"epoch": 7.95, |
|
"eval_loss": 2.623185634613037, |
|
"eval_runtime": 417.1182, |
|
"eval_samples_per_second": 477.682, |
|
"eval_steps_per_second": 29.857, |
|
"step": 1880000 |
|
}, |
|
{ |
|
"epoch": 7.98, |
|
"learning_rate": 8.746666666666667e-08, |
|
"loss": 2.8067, |
|
"step": 1888000 |
|
}, |
|
{ |
|
"epoch": 7.98, |
|
"eval_loss": 2.626035451889038, |
|
"eval_runtime": 417.7897, |
|
"eval_samples_per_second": 476.915, |
|
"eval_steps_per_second": 29.809, |
|
"step": 1888000 |
|
}, |
|
{ |
|
"epoch": 8.01, |
|
"eval_loss": 2.626246929168701, |
|
"eval_runtime": 419.7511, |
|
"eval_samples_per_second": 474.686, |
|
"eval_steps_per_second": 29.67, |
|
"step": 1896000 |
|
}, |
|
{ |
|
"epoch": 8.05, |
|
"learning_rate": 8.473333333333334e-08, |
|
"loss": 2.7953, |
|
"step": 1904000 |
|
}, |
|
{ |
|
"epoch": 8.05, |
|
"eval_loss": 2.6271278858184814, |
|
"eval_runtime": 416.9341, |
|
"eval_samples_per_second": 477.893, |
|
"eval_steps_per_second": 29.87, |
|
"step": 1904000 |
|
}, |
|
{ |
|
"epoch": 8.08, |
|
"eval_loss": 2.6269619464874268, |
|
"eval_runtime": 417.5655, |
|
"eval_samples_per_second": 477.171, |
|
"eval_steps_per_second": 29.825, |
|
"step": 1912000 |
|
}, |
|
{ |
|
"epoch": 8.11, |
|
"learning_rate": 8.2e-08, |
|
"loss": 2.7953, |
|
"step": 1920000 |
|
}, |
|
{ |
|
"epoch": 8.11, |
|
"eval_loss": 2.6304750442504883, |
|
"eval_runtime": 418.9519, |
|
"eval_samples_per_second": 475.592, |
|
"eval_steps_per_second": 29.727, |
|
"step": 1920000 |
|
}, |
|
{ |
|
"epoch": 8.15, |
|
"eval_loss": 2.62542724609375, |
|
"eval_runtime": 417.0471, |
|
"eval_samples_per_second": 477.764, |
|
"eval_steps_per_second": 29.862, |
|
"step": 1928000 |
|
}, |
|
{ |
|
"epoch": 8.18, |
|
"learning_rate": 7.926666666666666e-08, |
|
"loss": 2.7881, |
|
"step": 1936000 |
|
}, |
|
{ |
|
"epoch": 8.18, |
|
"eval_loss": 2.6296772956848145, |
|
"eval_runtime": 416.8665, |
|
"eval_samples_per_second": 477.971, |
|
"eval_steps_per_second": 29.875, |
|
"step": 1936000 |
|
}, |
|
{ |
|
"epoch": 8.22, |
|
"eval_loss": 2.6271204948425293, |
|
"eval_runtime": 416.2392, |
|
"eval_samples_per_second": 478.691, |
|
"eval_steps_per_second": 29.92, |
|
"step": 1944000 |
|
}, |
|
{ |
|
"epoch": 8.25, |
|
"learning_rate": 7.653333333333333e-08, |
|
"loss": 2.7928, |
|
"step": 1952000 |
|
}, |
|
{ |
|
"epoch": 8.25, |
|
"eval_loss": 2.6253867149353027, |
|
"eval_runtime": 417.7441, |
|
"eval_samples_per_second": 476.967, |
|
"eval_steps_per_second": 29.813, |
|
"step": 1952000 |
|
}, |
|
{ |
|
"epoch": 8.28, |
|
"eval_loss": 2.6286251544952393, |
|
"eval_runtime": 418.17, |
|
"eval_samples_per_second": 476.481, |
|
"eval_steps_per_second": 29.782, |
|
"step": 1960000 |
|
}, |
|
{ |
|
"epoch": 8.32, |
|
"learning_rate": 7.38e-08, |
|
"loss": 2.8003, |
|
"step": 1968000 |
|
}, |
|
{ |
|
"epoch": 8.32, |
|
"eval_loss": 2.632986307144165, |
|
"eval_runtime": 416.6612, |
|
"eval_samples_per_second": 478.206, |
|
"eval_steps_per_second": 29.89, |
|
"step": 1968000 |
|
}, |
|
{ |
|
"epoch": 8.35, |
|
"eval_loss": 2.6285812854766846, |
|
"eval_runtime": 416.6788, |
|
"eval_samples_per_second": 478.186, |
|
"eval_steps_per_second": 29.889, |
|
"step": 1976000 |
|
}, |
|
{ |
|
"epoch": 8.39, |
|
"learning_rate": 7.106666666666667e-08, |
|
"loss": 2.7935, |
|
"step": 1984000 |
|
}, |
|
{ |
|
"epoch": 8.39, |
|
"eval_loss": 2.640784502029419, |
|
"eval_runtime": 417.7728, |
|
"eval_samples_per_second": 476.934, |
|
"eval_steps_per_second": 29.81, |
|
"step": 1984000 |
|
}, |
|
{ |
|
"epoch": 8.42, |
|
"eval_loss": 2.627523422241211, |
|
"eval_runtime": 419.2453, |
|
"eval_samples_per_second": 475.259, |
|
"eval_steps_per_second": 29.706, |
|
"step": 1992000 |
|
}, |
|
{ |
|
"epoch": 8.45, |
|
"learning_rate": 6.833333333333332e-08, |
|
"loss": 2.7925, |
|
"step": 2000000 |
|
}, |
|
{ |
|
"epoch": 8.45, |
|
"eval_loss": 2.625904083251953, |
|
"eval_runtime": 418.4291, |
|
"eval_samples_per_second": 476.186, |
|
"eval_steps_per_second": 29.764, |
|
"step": 2000000 |
|
}, |
|
{ |
|
"epoch": 8.49, |
|
"eval_loss": 2.6302177906036377, |
|
"eval_runtime": 418.3648, |
|
"eval_samples_per_second": 476.259, |
|
"eval_steps_per_second": 29.768, |
|
"step": 2008000 |
|
}, |
|
{ |
|
"epoch": 8.52, |
|
"learning_rate": 6.56e-08, |
|
"loss": 2.7924, |
|
"step": 2016000 |
|
}, |
|
{ |
|
"epoch": 8.52, |
|
"eval_loss": 2.6320409774780273, |
|
"eval_runtime": 420.0509, |
|
"eval_samples_per_second": 474.347, |
|
"eval_steps_per_second": 29.649, |
|
"step": 2016000 |
|
}, |
|
{ |
|
"epoch": 8.55, |
|
"eval_loss": 2.629477024078369, |
|
"eval_runtime": 418.2782, |
|
"eval_samples_per_second": 476.358, |
|
"eval_steps_per_second": 29.774, |
|
"step": 2024000 |
|
}, |
|
{ |
|
"epoch": 8.59, |
|
"learning_rate": 6.286666666666666e-08, |
|
"loss": 2.799, |
|
"step": 2032000 |
|
}, |
|
{ |
|
"epoch": 8.59, |
|
"eval_loss": 2.6258628368377686, |
|
"eval_runtime": 417.2044, |
|
"eval_samples_per_second": 477.584, |
|
"eval_steps_per_second": 29.851, |
|
"step": 2032000 |
|
}, |
|
{ |
|
"epoch": 8.62, |
|
"eval_loss": 2.6245615482330322, |
|
"eval_runtime": 418.8359, |
|
"eval_samples_per_second": 475.723, |
|
"eval_steps_per_second": 29.735, |
|
"step": 2040000 |
|
}, |
|
{ |
|
"epoch": 8.66, |
|
"learning_rate": 6.013333333333333e-08, |
|
"loss": 2.7983, |
|
"step": 2048000 |
|
}, |
|
{ |
|
"epoch": 8.66, |
|
"eval_loss": 2.629530429840088, |
|
"eval_runtime": 418.9926, |
|
"eval_samples_per_second": 475.545, |
|
"eval_steps_per_second": 29.724, |
|
"step": 2048000 |
|
}, |
|
{ |
|
"epoch": 8.69, |
|
"eval_loss": 2.619439125061035, |
|
"eval_runtime": 421.9976, |
|
"eval_samples_per_second": 472.159, |
|
"eval_steps_per_second": 29.512, |
|
"step": 2056000 |
|
}, |
|
{ |
|
"epoch": 8.72, |
|
"learning_rate": 5.7400000000000004e-08, |
|
"loss": 2.7901, |
|
"step": 2064000 |
|
}, |
|
{ |
|
"epoch": 8.72, |
|
"eval_loss": 2.6258339881896973, |
|
"eval_runtime": 420.472, |
|
"eval_samples_per_second": 473.872, |
|
"eval_steps_per_second": 29.619, |
|
"step": 2064000 |
|
}, |
|
{ |
|
"epoch": 8.76, |
|
"eval_loss": 2.633378267288208, |
|
"eval_runtime": 421.3576, |
|
"eval_samples_per_second": 472.876, |
|
"eval_steps_per_second": 29.557, |
|
"step": 2072000 |
|
}, |
|
{ |
|
"epoch": 8.79, |
|
"learning_rate": 5.4666666666666666e-08, |
|
"loss": 2.7956, |
|
"step": 2080000 |
|
}, |
|
{ |
|
"epoch": 8.79, |
|
"eval_loss": 2.6361238956451416, |
|
"eval_runtime": 421.9485, |
|
"eval_samples_per_second": 472.214, |
|
"eval_steps_per_second": 29.515, |
|
"step": 2080000 |
|
}, |
|
{ |
|
"epoch": 8.82, |
|
"eval_loss": 2.617746591567993, |
|
"eval_runtime": 418.7323, |
|
"eval_samples_per_second": 475.841, |
|
"eval_steps_per_second": 29.742, |
|
"step": 2088000 |
|
}, |
|
{ |
|
"epoch": 8.86, |
|
"learning_rate": 5.1933333333333335e-08, |
|
"loss": 2.8008, |
|
"step": 2096000 |
|
}, |
|
{ |
|
"epoch": 8.86, |
|
"eval_loss": 2.632232189178467, |
|
"eval_runtime": 418.7251, |
|
"eval_samples_per_second": 475.849, |
|
"eval_steps_per_second": 29.743, |
|
"step": 2096000 |
|
}, |
|
{ |
|
"epoch": 8.89, |
|
"eval_loss": 2.6280999183654785, |
|
"eval_runtime": 417.4346, |
|
"eval_samples_per_second": 477.32, |
|
"eval_steps_per_second": 29.835, |
|
"step": 2104000 |
|
}, |
|
{ |
|
"epoch": 8.93, |
|
"learning_rate": 4.92e-08, |
|
"loss": 2.791, |
|
"step": 2112000 |
|
}, |
|
{ |
|
"epoch": 8.93, |
|
"eval_loss": 2.624938726425171, |
|
"eval_runtime": 418.5637, |
|
"eval_samples_per_second": 476.033, |
|
"eval_steps_per_second": 29.754, |
|
"step": 2112000 |
|
}, |
|
{ |
|
"epoch": 8.96, |
|
"eval_loss": 2.628361225128174, |
|
"eval_runtime": 419.8496, |
|
"eval_samples_per_second": 474.575, |
|
"eval_steps_per_second": 29.663, |
|
"step": 2120000 |
|
}, |
|
{ |
|
"epoch": 8.99, |
|
"learning_rate": 4.6466666666666666e-08, |
|
"loss": 2.7933, |
|
"step": 2128000 |
|
}, |
|
{ |
|
"epoch": 8.99, |
|
"eval_loss": 2.6270060539245605, |
|
"eval_runtime": 419.4985, |
|
"eval_samples_per_second": 474.972, |
|
"eval_steps_per_second": 29.688, |
|
"step": 2128000 |
|
}, |
|
{ |
|
"epoch": 9.03, |
|
"eval_loss": 2.624077081680298, |
|
"eval_runtime": 422.5644, |
|
"eval_samples_per_second": 471.526, |
|
"eval_steps_per_second": 29.472, |
|
"step": 2136000 |
|
}, |
|
{ |
|
"epoch": 9.06, |
|
"learning_rate": 4.3733333333333335e-08, |
|
"loss": 2.7825, |
|
"step": 2144000 |
|
}, |
|
{ |
|
"epoch": 9.06, |
|
"eval_loss": 2.6254072189331055, |
|
"eval_runtime": 421.1669, |
|
"eval_samples_per_second": 473.09, |
|
"eval_steps_per_second": 29.57, |
|
"step": 2144000 |
|
}, |
|
{ |
|
"epoch": 9.1, |
|
"eval_loss": 2.6282851696014404, |
|
"eval_runtime": 427.7816, |
|
"eval_samples_per_second": 465.775, |
|
"eval_steps_per_second": 29.113, |
|
"step": 2152000 |
|
}, |
|
{ |
|
"epoch": 9.13, |
|
"learning_rate": 4.1e-08, |
|
"loss": 2.7854, |
|
"step": 2160000 |
|
}, |
|
{ |
|
"epoch": 9.13, |
|
"eval_loss": 2.634307622909546, |
|
"eval_runtime": 424.5298, |
|
"eval_samples_per_second": 469.343, |
|
"eval_steps_per_second": 29.336, |
|
"step": 2160000 |
|
}, |
|
{ |
|
"epoch": 9.16, |
|
"eval_loss": 2.6208443641662598, |
|
"eval_runtime": 422.4964, |
|
"eval_samples_per_second": 471.602, |
|
"eval_steps_per_second": 29.477, |
|
"step": 2168000 |
|
}, |
|
{ |
|
"epoch": 9.2, |
|
"learning_rate": 3.8266666666666665e-08, |
|
"loss": 2.7949, |
|
"step": 2176000 |
|
}, |
|
{ |
|
"epoch": 9.2, |
|
"eval_loss": 2.62925386428833, |
|
"eval_runtime": 420.757, |
|
"eval_samples_per_second": 473.551, |
|
"eval_steps_per_second": 29.599, |
|
"step": 2176000 |
|
}, |
|
{ |
|
"epoch": 9.23, |
|
"eval_loss": 2.6265780925750732, |
|
"eval_runtime": 420.6962, |
|
"eval_samples_per_second": 473.62, |
|
"eval_steps_per_second": 29.603, |
|
"step": 2184000 |
|
}, |
|
{ |
|
"epoch": 9.26, |
|
"learning_rate": 3.5533333333333334e-08, |
|
"loss": 2.7938, |
|
"step": 2192000 |
|
}, |
|
{ |
|
"epoch": 9.26, |
|
"eval_loss": 2.6270058155059814, |
|
"eval_runtime": 420.3162, |
|
"eval_samples_per_second": 474.048, |
|
"eval_steps_per_second": 29.63, |
|
"step": 2192000 |
|
}, |
|
{ |
|
"epoch": 9.3, |
|
"eval_loss": 2.6237611770629883, |
|
"eval_runtime": 418.1737, |
|
"eval_samples_per_second": 476.477, |
|
"eval_steps_per_second": 29.782, |
|
"step": 2200000 |
|
}, |
|
{ |
|
"epoch": 9.33, |
|
"learning_rate": 3.28e-08, |
|
"loss": 2.7905, |
|
"step": 2208000 |
|
}, |
|
{ |
|
"epoch": 9.33, |
|
"eval_loss": 2.628171920776367, |
|
"eval_runtime": 420.3324, |
|
"eval_samples_per_second": 474.03, |
|
"eval_steps_per_second": 29.629, |
|
"step": 2208000 |
|
}, |
|
{ |
|
"epoch": 9.37, |
|
"eval_loss": 2.6245853900909424, |
|
"eval_runtime": 421.5905, |
|
"eval_samples_per_second": 472.615, |
|
"eval_steps_per_second": 29.541, |
|
"step": 2216000 |
|
}, |
|
{ |
|
"epoch": 9.4, |
|
"learning_rate": 3.0066666666666665e-08, |
|
"loss": 2.8004, |
|
"step": 2224000 |
|
}, |
|
{ |
|
"epoch": 9.4, |
|
"eval_loss": 2.6274161338806152, |
|
"eval_runtime": 419.7336, |
|
"eval_samples_per_second": 474.706, |
|
"eval_steps_per_second": 29.671, |
|
"step": 2224000 |
|
}, |
|
{ |
|
"epoch": 9.43, |
|
"eval_loss": 2.6251978874206543, |
|
"eval_runtime": 418.2128, |
|
"eval_samples_per_second": 476.432, |
|
"eval_steps_per_second": 29.779, |
|
"step": 2232000 |
|
}, |
|
{ |
|
"epoch": 9.47, |
|
"learning_rate": 2.7333333333333333e-08, |
|
"loss": 2.7921, |
|
"step": 2240000 |
|
}, |
|
{ |
|
"epoch": 9.47, |
|
"eval_loss": 2.6343250274658203, |
|
"eval_runtime": 419.0507, |
|
"eval_samples_per_second": 475.479, |
|
"eval_steps_per_second": 29.72, |
|
"step": 2240000 |
|
}, |
|
{ |
|
"epoch": 9.5, |
|
"eval_loss": 2.632768154144287, |
|
"eval_runtime": 424.1088, |
|
"eval_samples_per_second": 469.809, |
|
"eval_steps_per_second": 29.365, |
|
"step": 2248000 |
|
}, |
|
{ |
|
"epoch": 9.53, |
|
"learning_rate": 2.46e-08, |
|
"loss": 2.7964, |
|
"step": 2256000 |
|
}, |
|
{ |
|
"epoch": 9.53, |
|
"eval_loss": 2.6206092834472656, |
|
"eval_runtime": 423.0049, |
|
"eval_samples_per_second": 471.035, |
|
"eval_steps_per_second": 29.442, |
|
"step": 2256000 |
|
}, |
|
{ |
|
"epoch": 9.57, |
|
"eval_loss": 2.6235222816467285, |
|
"eval_runtime": 421.2675, |
|
"eval_samples_per_second": 472.977, |
|
"eval_steps_per_second": 29.563, |
|
"step": 2264000 |
|
}, |
|
{ |
|
"epoch": 9.6, |
|
"learning_rate": 2.1866666666666667e-08, |
|
"loss": 2.7954, |
|
"step": 2272000 |
|
}, |
|
{ |
|
"epoch": 9.6, |
|
"eval_loss": 2.6287684440612793, |
|
"eval_runtime": 421.0647, |
|
"eval_samples_per_second": 473.205, |
|
"eval_steps_per_second": 29.577, |
|
"step": 2272000 |
|
}, |
|
{ |
|
"epoch": 9.64, |
|
"eval_loss": 2.6203510761260986, |
|
"eval_runtime": 420.6158, |
|
"eval_samples_per_second": 473.71, |
|
"eval_steps_per_second": 29.609, |
|
"step": 2280000 |
|
}, |
|
{ |
|
"epoch": 9.67, |
|
"learning_rate": 1.9133333333333333e-08, |
|
"loss": 2.7902, |
|
"step": 2288000 |
|
}, |
|
{ |
|
"epoch": 9.67, |
|
"eval_loss": 2.6231720447540283, |
|
"eval_runtime": 419.7124, |
|
"eval_samples_per_second": 474.73, |
|
"eval_steps_per_second": 29.673, |
|
"step": 2288000 |
|
}, |
|
{ |
|
"epoch": 9.7, |
|
"eval_loss": 2.6238837242126465, |
|
"eval_runtime": 419.2797, |
|
"eval_samples_per_second": 475.22, |
|
"eval_steps_per_second": 29.703, |
|
"step": 2296000 |
|
}, |
|
{ |
|
"epoch": 9.74, |
|
"learning_rate": 1.64e-08, |
|
"loss": 2.8046, |
|
"step": 2304000 |
|
}, |
|
{ |
|
"epoch": 9.74, |
|
"eval_loss": 2.6240670680999756, |
|
"eval_runtime": 419.389, |
|
"eval_samples_per_second": 475.096, |
|
"eval_steps_per_second": 29.696, |
|
"step": 2304000 |
|
}, |
|
{ |
|
"epoch": 9.77, |
|
"eval_loss": 2.6258645057678223, |
|
"eval_runtime": 420.6507, |
|
"eval_samples_per_second": 473.671, |
|
"eval_steps_per_second": 29.607, |
|
"step": 2312000 |
|
}, |
|
{ |
|
"epoch": 9.81, |
|
"learning_rate": 1.3666666666666667e-08, |
|
"loss": 2.793, |
|
"step": 2320000 |
|
}, |
|
{ |
|
"epoch": 9.81, |
|
"eval_loss": 2.6274592876434326, |
|
"eval_runtime": 419.5581, |
|
"eval_samples_per_second": 474.904, |
|
"eval_steps_per_second": 29.684, |
|
"step": 2320000 |
|
}, |
|
{ |
|
"epoch": 9.84, |
|
"eval_loss": 2.6264114379882812, |
|
"eval_runtime": 418.2082, |
|
"eval_samples_per_second": 476.437, |
|
"eval_steps_per_second": 29.779, |
|
"step": 2328000 |
|
}, |
|
{ |
|
"epoch": 9.87, |
|
"learning_rate": 1.0933333333333334e-08, |
|
"loss": 2.7893, |
|
"step": 2336000 |
|
}, |
|
{ |
|
"epoch": 9.87, |
|
"eval_loss": 2.633244037628174, |
|
"eval_runtime": 419.1985, |
|
"eval_samples_per_second": 475.312, |
|
"eval_steps_per_second": 29.709, |
|
"step": 2336000 |
|
}, |
|
{ |
|
"epoch": 9.91, |
|
"eval_loss": 2.621447801589966, |
|
"eval_runtime": 423.5885, |
|
"eval_samples_per_second": 470.386, |
|
"eval_steps_per_second": 29.401, |
|
"step": 2344000 |
|
}, |
|
{ |
|
"epoch": 9.94, |
|
"learning_rate": 8.2e-09, |
|
"loss": 2.7898, |
|
"step": 2352000 |
|
}, |
|
{ |
|
"epoch": 9.94, |
|
"eval_loss": 2.631804943084717, |
|
"eval_runtime": 424.5578, |
|
"eval_samples_per_second": 469.312, |
|
"eval_steps_per_second": 29.334, |
|
"step": 2352000 |
|
}, |
|
{ |
|
"epoch": 9.97, |
|
"eval_loss": 2.623945474624634, |
|
"eval_runtime": 421.6182, |
|
"eval_samples_per_second": 472.584, |
|
"eval_steps_per_second": 29.539, |
|
"step": 2360000 |
|
}, |
|
{ |
|
"epoch": 10.01, |
|
"learning_rate": 5.466666666666667e-09, |
|
"loss": 2.7906, |
|
"step": 2368000 |
|
}, |
|
{ |
|
"epoch": 10.01, |
|
"eval_loss": 2.6214942932128906, |
|
"eval_runtime": 421.2919, |
|
"eval_samples_per_second": 472.95, |
|
"eval_steps_per_second": 29.561, |
|
"step": 2368000 |
|
}, |
|
{ |
|
"epoch": 10.04, |
|
"eval_loss": 2.633603096008301, |
|
"eval_runtime": 422.9875, |
|
"eval_samples_per_second": 471.054, |
|
"eval_steps_per_second": 29.443, |
|
"step": 2376000 |
|
}, |
|
{ |
|
"epoch": 10.08, |
|
"learning_rate": 2.7333333333333334e-09, |
|
"loss": 2.7942, |
|
"step": 2384000 |
|
}, |
|
{ |
|
"epoch": 10.08, |
|
"eval_loss": 2.6217610836029053, |
|
"eval_runtime": 422.4965, |
|
"eval_samples_per_second": 471.602, |
|
"eval_steps_per_second": 29.477, |
|
"step": 2384000 |
|
}, |
|
{ |
|
"epoch": 10.11, |
|
"eval_loss": 2.6299171447753906, |
|
"eval_runtime": 419.6936, |
|
"eval_samples_per_second": 474.751, |
|
"eval_steps_per_second": 29.674, |
|
"step": 2392000 |
|
}, |
|
{ |
|
"epoch": 10.14, |
|
"learning_rate": 0.0, |
|
"loss": 2.7997, |
|
"step": 2400000 |
|
}, |
|
{ |
|
"epoch": 10.14, |
|
"eval_loss": 2.6303224563598633, |
|
"eval_runtime": 419.2829, |
|
"eval_samples_per_second": 475.216, |
|
"eval_steps_per_second": 29.703, |
|
"step": 2400000 |
|
}, |
|
{ |
|
"epoch": 10.14, |
|
"step": 2400000, |
|
"total_flos": 7.667884600087345e+17, |
|
"train_loss": 2.8006172998046877, |
|
"train_runtime": 396247.3581, |
|
"train_samples_per_second": 96.909, |
|
"train_steps_per_second": 6.057 |
|
} |
|
], |
|
"logging_steps": 16000, |
|
"max_steps": 2400000, |
|
"num_train_epochs": 11, |
|
"save_steps": 32000, |
|
"total_flos": 7.667884600087345e+17, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|