|
{ |
|
"best_metric": 1.0, |
|
"best_model_checkpoint": "./models/checkpoint-602", |
|
"epoch": 98.0, |
|
"global_step": 58996, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 5.99e-05, |
|
"loss": 9.7183, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 3.9950010776519775, |
|
"eval_runtime": 377.3539, |
|
"eval_samples_per_second": 12.24, |
|
"eval_steps_per_second": 0.154, |
|
"eval_wer": 1.0, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 9.966047297297298e-05, |
|
"loss": 2.4521, |
|
"step": 1204 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_loss": 0.7947585582733154, |
|
"eval_runtime": 335.0691, |
|
"eval_samples_per_second": 13.785, |
|
"eval_steps_per_second": 0.173, |
|
"eval_wer": 0.7436259622854722, |
|
"step": 1204 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 9.864358108108108e-05, |
|
"loss": 0.7245, |
|
"step": 1806 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_loss": 0.5130816698074341, |
|
"eval_runtime": 367.2509, |
|
"eval_samples_per_second": 12.577, |
|
"eval_steps_per_second": 0.158, |
|
"eval_wer": 0.6208418673635143, |
|
"step": 1806 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"learning_rate": 9.762668918918919e-05, |
|
"loss": 0.5488, |
|
"step": 2408 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_loss": 0.4323376417160034, |
|
"eval_runtime": 384.1184, |
|
"eval_samples_per_second": 12.025, |
|
"eval_steps_per_second": 0.151, |
|
"eval_wer": 0.5652941591920333, |
|
"step": 2408 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"learning_rate": 9.66097972972973e-05, |
|
"loss": 0.4613, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"eval_loss": 0.3905767500400543, |
|
"eval_runtime": 369.1827, |
|
"eval_samples_per_second": 12.511, |
|
"eval_steps_per_second": 0.157, |
|
"eval_wer": 0.5313934599901123, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"learning_rate": 9.559290540540541e-05, |
|
"loss": 0.4138, |
|
"step": 3612 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"eval_loss": 0.36640074849128723, |
|
"eval_runtime": 327.6142, |
|
"eval_samples_per_second": 14.099, |
|
"eval_steps_per_second": 0.177, |
|
"eval_wer": 0.5138074722791157, |
|
"step": 3612 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"learning_rate": 9.457601351351352e-05, |
|
"loss": 0.3768, |
|
"step": 4214 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"eval_loss": 0.34569260478019714, |
|
"eval_runtime": 320.036, |
|
"eval_samples_per_second": 14.433, |
|
"eval_steps_per_second": 0.181, |
|
"eval_wer": 0.49265484850625046, |
|
"step": 4214 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"learning_rate": 9.355912162162163e-05, |
|
"loss": 0.3493, |
|
"step": 4816 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"eval_loss": 0.3470996916294098, |
|
"eval_runtime": 322.8379, |
|
"eval_samples_per_second": 14.307, |
|
"eval_steps_per_second": 0.18, |
|
"eval_wer": 0.48280245780069214, |
|
"step": 4816 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"learning_rate": 9.254222972972973e-05, |
|
"loss": 0.3278, |
|
"step": 5418 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"eval_loss": 0.329658180475235, |
|
"eval_runtime": 351.53, |
|
"eval_samples_per_second": 13.14, |
|
"eval_steps_per_second": 0.165, |
|
"eval_wer": 0.4710431527650258, |
|
"step": 5418 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"learning_rate": 9.152533783783784e-05, |
|
"loss": 0.3042, |
|
"step": 6020 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"eval_loss": 0.3340738117694855, |
|
"eval_runtime": 307.6844, |
|
"eval_samples_per_second": 15.012, |
|
"eval_steps_per_second": 0.189, |
|
"eval_wer": 0.4614732678861502, |
|
"step": 6020 |
|
}, |
|
{ |
|
"epoch": 11.0, |
|
"learning_rate": 9.050844594594595e-05, |
|
"loss": 0.2902, |
|
"step": 6622 |
|
}, |
|
{ |
|
"epoch": 11.0, |
|
"eval_loss": 0.31241220235824585, |
|
"eval_runtime": 313.7253, |
|
"eval_samples_per_second": 14.723, |
|
"eval_steps_per_second": 0.185, |
|
"eval_wer": 0.4491842644254538, |
|
"step": 6622 |
|
}, |
|
{ |
|
"epoch": 12.0, |
|
"learning_rate": 8.949155405405406e-05, |
|
"loss": 0.2748, |
|
"step": 7224 |
|
}, |
|
{ |
|
"epoch": 12.0, |
|
"eval_loss": 0.3046228885650635, |
|
"eval_runtime": 310.6627, |
|
"eval_samples_per_second": 14.868, |
|
"eval_steps_per_second": 0.187, |
|
"eval_wer": 0.4424747510417402, |
|
"step": 7224 |
|
}, |
|
{ |
|
"epoch": 13.0, |
|
"learning_rate": 8.847466216216217e-05, |
|
"loss": 0.263, |
|
"step": 7826 |
|
}, |
|
{ |
|
"epoch": 13.0, |
|
"eval_loss": 0.3139520585536957, |
|
"eval_runtime": 306.886, |
|
"eval_samples_per_second": 15.051, |
|
"eval_steps_per_second": 0.189, |
|
"eval_wer": 0.4403206441132848, |
|
"step": 7826 |
|
}, |
|
{ |
|
"epoch": 14.0, |
|
"learning_rate": 8.745945945945946e-05, |
|
"loss": 0.2506, |
|
"step": 8428 |
|
}, |
|
{ |
|
"epoch": 14.0, |
|
"eval_loss": 0.3264484107494354, |
|
"eval_runtime": 314.9752, |
|
"eval_samples_per_second": 14.665, |
|
"eval_steps_per_second": 0.184, |
|
"eval_wer": 0.4372837064764461, |
|
"step": 8428 |
|
}, |
|
{ |
|
"epoch": 15.0, |
|
"learning_rate": 8.644256756756757e-05, |
|
"loss": 0.2395, |
|
"step": 9030 |
|
}, |
|
{ |
|
"epoch": 15.0, |
|
"eval_loss": 0.3298631012439728, |
|
"eval_runtime": 313.3642, |
|
"eval_samples_per_second": 14.74, |
|
"eval_steps_per_second": 0.185, |
|
"eval_wer": 0.4310332650610919, |
|
"step": 9030 |
|
}, |
|
{ |
|
"epoch": 16.0, |
|
"learning_rate": 8.542567567567568e-05, |
|
"loss": 0.2289, |
|
"step": 9632 |
|
}, |
|
{ |
|
"epoch": 16.0, |
|
"eval_loss": 0.32563555240631104, |
|
"eval_runtime": 316.6154, |
|
"eval_samples_per_second": 14.589, |
|
"eval_steps_per_second": 0.183, |
|
"eval_wer": 0.4247828236457377, |
|
"step": 9632 |
|
}, |
|
{ |
|
"epoch": 17.0, |
|
"learning_rate": 8.440878378378379e-05, |
|
"loss": 0.2229, |
|
"step": 10234 |
|
}, |
|
{ |
|
"epoch": 17.0, |
|
"eval_loss": 0.30847427248954773, |
|
"eval_runtime": 314.5985, |
|
"eval_samples_per_second": 14.682, |
|
"eval_steps_per_second": 0.184, |
|
"eval_wer": 0.4183205028603715, |
|
"step": 10234 |
|
}, |
|
{ |
|
"epoch": 18.0, |
|
"learning_rate": 8.339358108108109e-05, |
|
"loss": 0.2123, |
|
"step": 10836 |
|
}, |
|
{ |
|
"epoch": 18.0, |
|
"eval_loss": 0.32277214527130127, |
|
"eval_runtime": 319.0938, |
|
"eval_samples_per_second": 14.475, |
|
"eval_steps_per_second": 0.182, |
|
"eval_wer": 0.41549544459354476, |
|
"step": 10836 |
|
}, |
|
{ |
|
"epoch": 19.0, |
|
"learning_rate": 8.23766891891892e-05, |
|
"loss": 0.2059, |
|
"step": 11438 |
|
}, |
|
{ |
|
"epoch": 19.0, |
|
"eval_loss": 0.32871213555336, |
|
"eval_runtime": 322.1095, |
|
"eval_samples_per_second": 14.34, |
|
"eval_steps_per_second": 0.18, |
|
"eval_wer": 0.4104103397132566, |
|
"step": 11438 |
|
}, |
|
{ |
|
"epoch": 20.0, |
|
"learning_rate": 8.135979729729731e-05, |
|
"loss": 0.2011, |
|
"step": 12040 |
|
}, |
|
{ |
|
"epoch": 20.0, |
|
"eval_loss": 0.3290630877017975, |
|
"eval_runtime": 327.3838, |
|
"eval_samples_per_second": 14.109, |
|
"eval_steps_per_second": 0.177, |
|
"eval_wer": 0.4098100148315559, |
|
"step": 12040 |
|
}, |
|
{ |
|
"epoch": 21.0, |
|
"learning_rate": 8.03429054054054e-05, |
|
"loss": 0.1948, |
|
"step": 12642 |
|
}, |
|
{ |
|
"epoch": 21.0, |
|
"eval_loss": 0.3254837393760681, |
|
"eval_runtime": 343.6765, |
|
"eval_samples_per_second": 13.44, |
|
"eval_steps_per_second": 0.169, |
|
"eval_wer": 0.4006638886927043, |
|
"step": 12642 |
|
}, |
|
{ |
|
"epoch": 22.0, |
|
"learning_rate": 7.932770270270272e-05, |
|
"loss": 0.1884, |
|
"step": 13244 |
|
}, |
|
{ |
|
"epoch": 22.0, |
|
"eval_loss": 0.3118044137954712, |
|
"eval_runtime": 335.2687, |
|
"eval_samples_per_second": 13.777, |
|
"eval_steps_per_second": 0.173, |
|
"eval_wer": 0.39858040822091956, |
|
"step": 13244 |
|
}, |
|
{ |
|
"epoch": 23.0, |
|
"learning_rate": 7.831081081081081e-05, |
|
"loss": 0.1806, |
|
"step": 13846 |
|
}, |
|
{ |
|
"epoch": 23.0, |
|
"eval_loss": 0.3262838125228882, |
|
"eval_runtime": 390.3862, |
|
"eval_samples_per_second": 11.832, |
|
"eval_steps_per_second": 0.149, |
|
"eval_wer": 0.3974150716858535, |
|
"step": 13846 |
|
}, |
|
{ |
|
"epoch": 24.0, |
|
"learning_rate": 7.729391891891892e-05, |
|
"loss": 0.1782, |
|
"step": 14448 |
|
}, |
|
{ |
|
"epoch": 24.0, |
|
"eval_loss": 0.3207845687866211, |
|
"eval_runtime": 360.7865, |
|
"eval_samples_per_second": 12.803, |
|
"eval_steps_per_second": 0.161, |
|
"eval_wer": 0.39123525672717, |
|
"step": 14448 |
|
}, |
|
{ |
|
"epoch": 25.0, |
|
"learning_rate": 7.627702702702702e-05, |
|
"loss": 0.1729, |
|
"step": 15050 |
|
}, |
|
{ |
|
"epoch": 25.0, |
|
"eval_loss": 0.3174630105495453, |
|
"eval_runtime": 356.0372, |
|
"eval_samples_per_second": 12.973, |
|
"eval_steps_per_second": 0.163, |
|
"eval_wer": 0.3948372060173741, |
|
"step": 15050 |
|
}, |
|
{ |
|
"epoch": 26.0, |
|
"learning_rate": 7.526013513513513e-05, |
|
"loss": 0.1687, |
|
"step": 15652 |
|
}, |
|
{ |
|
"epoch": 26.0, |
|
"eval_loss": 0.3447326123714447, |
|
"eval_runtime": 369.502, |
|
"eval_samples_per_second": 12.501, |
|
"eval_steps_per_second": 0.157, |
|
"eval_wer": 0.38544388728017515, |
|
"step": 15652 |
|
}, |
|
{ |
|
"epoch": 27.0, |
|
"learning_rate": 7.424324324324324e-05, |
|
"loss": 0.167, |
|
"step": 16254 |
|
}, |
|
{ |
|
"epoch": 27.0, |
|
"eval_loss": 0.3525393605232239, |
|
"eval_runtime": 355.9444, |
|
"eval_samples_per_second": 12.977, |
|
"eval_steps_per_second": 0.163, |
|
"eval_wer": 0.3901052334204393, |
|
"step": 16254 |
|
}, |
|
{ |
|
"epoch": 28.0, |
|
"learning_rate": 7.322635135135135e-05, |
|
"loss": 0.1619, |
|
"step": 16856 |
|
}, |
|
{ |
|
"epoch": 28.0, |
|
"eval_loss": 0.3377859592437744, |
|
"eval_runtime": 331.4224, |
|
"eval_samples_per_second": 13.937, |
|
"eval_steps_per_second": 0.175, |
|
"eval_wer": 0.3853732608235045, |
|
"step": 16856 |
|
}, |
|
{ |
|
"epoch": 29.0, |
|
"learning_rate": 7.220945945945946e-05, |
|
"loss": 0.1549, |
|
"step": 17458 |
|
}, |
|
{ |
|
"epoch": 29.0, |
|
"eval_loss": 0.35701897740364075, |
|
"eval_runtime": 327.8021, |
|
"eval_samples_per_second": 14.091, |
|
"eval_steps_per_second": 0.177, |
|
"eval_wer": 0.3870682957836005, |
|
"step": 17458 |
|
}, |
|
{ |
|
"epoch": 30.0, |
|
"learning_rate": 7.119425675675676e-05, |
|
"loss": 0.1543, |
|
"step": 18060 |
|
}, |
|
{ |
|
"epoch": 30.0, |
|
"eval_loss": 0.3298207223415375, |
|
"eval_runtime": 329.7188, |
|
"eval_samples_per_second": 14.009, |
|
"eval_steps_per_second": 0.176, |
|
"eval_wer": 0.38085316759658167, |
|
"step": 18060 |
|
}, |
|
{ |
|
"epoch": 31.0, |
|
"learning_rate": 7.017736486486487e-05, |
|
"loss": 0.15, |
|
"step": 18662 |
|
}, |
|
{ |
|
"epoch": 31.0, |
|
"eval_loss": 0.34915244579315186, |
|
"eval_runtime": 362.9462, |
|
"eval_samples_per_second": 12.726, |
|
"eval_steps_per_second": 0.16, |
|
"eval_wer": 0.3779221696447489, |
|
"step": 18662 |
|
}, |
|
{ |
|
"epoch": 32.0, |
|
"learning_rate": 6.916047297297298e-05, |
|
"loss": 0.1459, |
|
"step": 19264 |
|
}, |
|
{ |
|
"epoch": 32.0, |
|
"eval_loss": 0.3343390226364136, |
|
"eval_runtime": 363.1934, |
|
"eval_samples_per_second": 12.718, |
|
"eval_steps_per_second": 0.16, |
|
"eval_wer": 0.37530899074793417, |
|
"step": 19264 |
|
}, |
|
{ |
|
"epoch": 33.0, |
|
"learning_rate": 6.814527027027028e-05, |
|
"loss": 0.1439, |
|
"step": 19866 |
|
}, |
|
{ |
|
"epoch": 33.0, |
|
"eval_loss": 0.3707464039325714, |
|
"eval_runtime": 319.1722, |
|
"eval_samples_per_second": 14.472, |
|
"eval_steps_per_second": 0.182, |
|
"eval_wer": 0.3783106151564376, |
|
"step": 19866 |
|
}, |
|
{ |
|
"epoch": 34.0, |
|
"learning_rate": 6.713006756756756e-05, |
|
"loss": 0.1386, |
|
"step": 20468 |
|
}, |
|
{ |
|
"epoch": 34.0, |
|
"eval_loss": 0.3442770838737488, |
|
"eval_runtime": 326.9396, |
|
"eval_samples_per_second": 14.128, |
|
"eval_steps_per_second": 0.177, |
|
"eval_wer": 0.3738964616145208, |
|
"step": 20468 |
|
}, |
|
{ |
|
"epoch": 35.0, |
|
"learning_rate": 6.611317567567568e-05, |
|
"loss": 0.1374, |
|
"step": 21070 |
|
}, |
|
{ |
|
"epoch": 35.0, |
|
"eval_loss": 0.36009541153907776, |
|
"eval_runtime": 311.0687, |
|
"eval_samples_per_second": 14.849, |
|
"eval_steps_per_second": 0.186, |
|
"eval_wer": 0.37350801610283213, |
|
"step": 21070 |
|
}, |
|
{ |
|
"epoch": 36.0, |
|
"learning_rate": 6.509628378378379e-05, |
|
"loss": 0.1334, |
|
"step": 21672 |
|
}, |
|
{ |
|
"epoch": 36.0, |
|
"eval_loss": 0.37101128697395325, |
|
"eval_runtime": 289.3281, |
|
"eval_samples_per_second": 15.965, |
|
"eval_steps_per_second": 0.2, |
|
"eval_wer": 0.37138922240271205, |
|
"step": 21672 |
|
}, |
|
{ |
|
"epoch": 37.0, |
|
"learning_rate": 6.40793918918919e-05, |
|
"loss": 0.13, |
|
"step": 22274 |
|
}, |
|
{ |
|
"epoch": 37.0, |
|
"eval_loss": 0.3545927107334137, |
|
"eval_runtime": 291.5095, |
|
"eval_samples_per_second": 15.845, |
|
"eval_steps_per_second": 0.199, |
|
"eval_wer": 0.3684935376792146, |
|
"step": 22274 |
|
}, |
|
{ |
|
"epoch": 38.0, |
|
"learning_rate": 6.306250000000001e-05, |
|
"loss": 0.1266, |
|
"step": 22876 |
|
}, |
|
{ |
|
"epoch": 38.0, |
|
"eval_loss": 0.37049075961112976, |
|
"eval_runtime": 328.1134, |
|
"eval_samples_per_second": 14.077, |
|
"eval_steps_per_second": 0.177, |
|
"eval_wer": 0.367787273112508, |
|
"step": 22876 |
|
}, |
|
{ |
|
"epoch": 39.0, |
|
"learning_rate": 6.20456081081081e-05, |
|
"loss": 0.1253, |
|
"step": 23478 |
|
}, |
|
{ |
|
"epoch": 39.0, |
|
"eval_loss": 0.3741365373134613, |
|
"eval_runtime": 476.2923, |
|
"eval_samples_per_second": 9.698, |
|
"eval_steps_per_second": 0.122, |
|
"eval_wer": 0.37011794618264, |
|
"step": 23478 |
|
}, |
|
{ |
|
"epoch": 40.0, |
|
"learning_rate": 6.103040540540541e-05, |
|
"loss": 0.1221, |
|
"step": 24080 |
|
}, |
|
{ |
|
"epoch": 40.0, |
|
"eval_loss": 0.3850318491458893, |
|
"eval_runtime": 331.7813, |
|
"eval_samples_per_second": 13.922, |
|
"eval_steps_per_second": 0.175, |
|
"eval_wer": 0.37220142665442474, |
|
"step": 24080 |
|
}, |
|
{ |
|
"epoch": 41.0, |
|
"learning_rate": 6.001351351351352e-05, |
|
"loss": 0.1205, |
|
"step": 24682 |
|
}, |
|
{ |
|
"epoch": 41.0, |
|
"eval_loss": 0.3865145146846771, |
|
"eval_runtime": 328.7451, |
|
"eval_samples_per_second": 14.05, |
|
"eval_steps_per_second": 0.176, |
|
"eval_wer": 0.3651034677590225, |
|
"step": 24682 |
|
}, |
|
{ |
|
"epoch": 42.0, |
|
"learning_rate": 5.899662162162163e-05, |
|
"loss": 0.1179, |
|
"step": 25284 |
|
}, |
|
{ |
|
"epoch": 42.0, |
|
"eval_loss": 0.3818467855453491, |
|
"eval_runtime": 325.7829, |
|
"eval_samples_per_second": 14.178, |
|
"eval_steps_per_second": 0.178, |
|
"eval_wer": 0.36644537043576525, |
|
"step": 25284 |
|
}, |
|
{ |
|
"epoch": 43.0, |
|
"learning_rate": 5.7981418918918915e-05, |
|
"loss": 0.1185, |
|
"step": 25886 |
|
}, |
|
{ |
|
"epoch": 43.0, |
|
"eval_loss": 0.3811143636703491, |
|
"eval_runtime": 326.5779, |
|
"eval_samples_per_second": 14.144, |
|
"eval_steps_per_second": 0.178, |
|
"eval_wer": 0.36644537043576525, |
|
"step": 25886 |
|
}, |
|
{ |
|
"epoch": 44.0, |
|
"learning_rate": 5.6964527027027026e-05, |
|
"loss": 0.114, |
|
"step": 26488 |
|
}, |
|
{ |
|
"epoch": 44.0, |
|
"eval_loss": 0.3834444284439087, |
|
"eval_runtime": 335.6248, |
|
"eval_samples_per_second": 13.762, |
|
"eval_steps_per_second": 0.173, |
|
"eval_wer": 0.3639028179956212, |
|
"step": 26488 |
|
}, |
|
{ |
|
"epoch": 45.0, |
|
"learning_rate": 5.594763513513514e-05, |
|
"loss": 0.1147, |
|
"step": 27090 |
|
}, |
|
{ |
|
"epoch": 45.0, |
|
"eval_loss": 0.41083571314811707, |
|
"eval_runtime": 333.8024, |
|
"eval_samples_per_second": 13.838, |
|
"eval_steps_per_second": 0.174, |
|
"eval_wer": 0.3716364150010594, |
|
"step": 27090 |
|
}, |
|
{ |
|
"epoch": 46.0, |
|
"learning_rate": 5.493074324324324e-05, |
|
"loss": 0.1115, |
|
"step": 27692 |
|
}, |
|
{ |
|
"epoch": 46.0, |
|
"eval_loss": 0.39661291241645813, |
|
"eval_runtime": 339.3131, |
|
"eval_samples_per_second": 13.613, |
|
"eval_steps_per_second": 0.171, |
|
"eval_wer": 0.36227840949219575, |
|
"step": 27692 |
|
}, |
|
{ |
|
"epoch": 47.0, |
|
"learning_rate": 5.3915540540540545e-05, |
|
"loss": 0.109, |
|
"step": 28294 |
|
}, |
|
{ |
|
"epoch": 47.0, |
|
"eval_loss": 0.38050925731658936, |
|
"eval_runtime": 397.3812, |
|
"eval_samples_per_second": 11.624, |
|
"eval_steps_per_second": 0.146, |
|
"eval_wer": 0.358817713115333, |
|
"step": 28294 |
|
}, |
|
{ |
|
"epoch": 48.0, |
|
"learning_rate": 5.289864864864865e-05, |
|
"loss": 0.1072, |
|
"step": 28896 |
|
}, |
|
{ |
|
"epoch": 48.0, |
|
"eval_loss": 0.3901868164539337, |
|
"eval_runtime": 304.2685, |
|
"eval_samples_per_second": 15.181, |
|
"eval_steps_per_second": 0.191, |
|
"eval_wer": 0.35631047390352427, |
|
"step": 28896 |
|
}, |
|
{ |
|
"epoch": 49.0, |
|
"learning_rate": 5.188175675675676e-05, |
|
"loss": 0.1066, |
|
"step": 29498 |
|
}, |
|
{ |
|
"epoch": 49.0, |
|
"eval_loss": 0.41520482301712036, |
|
"eval_runtime": 344.9108, |
|
"eval_samples_per_second": 13.392, |
|
"eval_steps_per_second": 0.168, |
|
"eval_wer": 0.36079525390211176, |
|
"step": 29498 |
|
}, |
|
{ |
|
"epoch": 50.0, |
|
"learning_rate": 5.086486486486487e-05, |
|
"loss": 0.103, |
|
"step": 30100 |
|
}, |
|
{ |
|
"epoch": 50.0, |
|
"eval_loss": 0.3830413520336151, |
|
"eval_runtime": 326.4997, |
|
"eval_samples_per_second": 14.147, |
|
"eval_steps_per_second": 0.178, |
|
"eval_wer": 0.3593120983120277, |
|
"step": 30100 |
|
}, |
|
{ |
|
"epoch": 51.0, |
|
"learning_rate": 4.9847972972972975e-05, |
|
"loss": 0.1014, |
|
"step": 30702 |
|
}, |
|
{ |
|
"epoch": 51.0, |
|
"eval_loss": 0.40889114141464233, |
|
"eval_runtime": 440.9092, |
|
"eval_samples_per_second": 10.476, |
|
"eval_steps_per_second": 0.132, |
|
"eval_wer": 0.35578077547849424, |
|
"step": 30702 |
|
}, |
|
{ |
|
"epoch": 52.0, |
|
"learning_rate": 4.883108108108108e-05, |
|
"loss": 0.1012, |
|
"step": 31304 |
|
}, |
|
{ |
|
"epoch": 52.0, |
|
"eval_loss": 0.3804776668548584, |
|
"eval_runtime": 399.7783, |
|
"eval_samples_per_second": 11.554, |
|
"eval_steps_per_second": 0.145, |
|
"eval_wer": 0.3551098241401229, |
|
"step": 31304 |
|
}, |
|
{ |
|
"epoch": 53.0, |
|
"learning_rate": 4.781418918918919e-05, |
|
"loss": 0.0988, |
|
"step": 31906 |
|
}, |
|
{ |
|
"epoch": 53.0, |
|
"eval_loss": 0.38413405418395996, |
|
"eval_runtime": 402.2767, |
|
"eval_samples_per_second": 11.482, |
|
"eval_steps_per_second": 0.144, |
|
"eval_wer": 0.35320290981001484, |
|
"step": 31906 |
|
}, |
|
{ |
|
"epoch": 54.0, |
|
"learning_rate": 4.679898648648649e-05, |
|
"loss": 0.0984, |
|
"step": 32508 |
|
}, |
|
{ |
|
"epoch": 54.0, |
|
"eval_loss": 0.4035479724407196, |
|
"eval_runtime": 382.7739, |
|
"eval_samples_per_second": 12.067, |
|
"eval_steps_per_second": 0.152, |
|
"eval_wer": 0.3548273183134402, |
|
"step": 32508 |
|
}, |
|
{ |
|
"epoch": 55.0, |
|
"learning_rate": 4.5783783783783785e-05, |
|
"loss": 0.0961, |
|
"step": 33110 |
|
}, |
|
{ |
|
"epoch": 55.0, |
|
"eval_loss": 0.3776606619358063, |
|
"eval_runtime": 439.3383, |
|
"eval_samples_per_second": 10.514, |
|
"eval_steps_per_second": 0.132, |
|
"eval_wer": 0.3542269934317395, |
|
"step": 33110 |
|
}, |
|
{ |
|
"epoch": 56.0, |
|
"learning_rate": 4.4766891891891895e-05, |
|
"loss": 0.0949, |
|
"step": 33712 |
|
}, |
|
{ |
|
"epoch": 56.0, |
|
"eval_loss": 0.41516759991645813, |
|
"eval_runtime": 422.6534, |
|
"eval_samples_per_second": 10.929, |
|
"eval_steps_per_second": 0.137, |
|
"eval_wer": 0.35461543894342823, |
|
"step": 33712 |
|
}, |
|
{ |
|
"epoch": 57.0, |
|
"learning_rate": 4.375e-05, |
|
"loss": 0.0935, |
|
"step": 34314 |
|
}, |
|
{ |
|
"epoch": 57.0, |
|
"eval_loss": 0.3894650340080261, |
|
"eval_runtime": 423.1466, |
|
"eval_samples_per_second": 10.916, |
|
"eval_steps_per_second": 0.137, |
|
"eval_wer": 0.347729359418038, |
|
"step": 34314 |
|
}, |
|
{ |
|
"epoch": 58.0, |
|
"learning_rate": 4.273310810810811e-05, |
|
"loss": 0.0925, |
|
"step": 34916 |
|
}, |
|
{ |
|
"epoch": 58.0, |
|
"eval_loss": 0.4071078896522522, |
|
"eval_runtime": 435.5872, |
|
"eval_samples_per_second": 10.604, |
|
"eval_steps_per_second": 0.133, |
|
"eval_wer": 0.35087223673988277, |
|
"step": 34916 |
|
}, |
|
{ |
|
"epoch": 59.0, |
|
"learning_rate": 4.171621621621622e-05, |
|
"loss": 0.0922, |
|
"step": 35518 |
|
}, |
|
{ |
|
"epoch": 59.0, |
|
"eval_loss": 0.4226244390010834, |
|
"eval_runtime": 403.3952, |
|
"eval_samples_per_second": 11.45, |
|
"eval_steps_per_second": 0.144, |
|
"eval_wer": 0.35309697012500885, |
|
"step": 35518 |
|
}, |
|
{ |
|
"epoch": 60.0, |
|
"learning_rate": 4.070101351351351e-05, |
|
"loss": 0.0895, |
|
"step": 36120 |
|
}, |
|
{ |
|
"epoch": 60.0, |
|
"eval_loss": 0.43153172731399536, |
|
"eval_runtime": 398.9709, |
|
"eval_samples_per_second": 11.577, |
|
"eval_steps_per_second": 0.145, |
|
"eval_wer": 0.35094286319655343, |
|
"step": 36120 |
|
}, |
|
{ |
|
"epoch": 61.0, |
|
"learning_rate": 3.968412162162162e-05, |
|
"loss": 0.0897, |
|
"step": 36722 |
|
}, |
|
{ |
|
"epoch": 61.0, |
|
"eval_loss": 0.4538232386112213, |
|
"eval_runtime": 383.2903, |
|
"eval_samples_per_second": 12.051, |
|
"eval_steps_per_second": 0.151, |
|
"eval_wer": 0.3565929797302069, |
|
"step": 36722 |
|
}, |
|
{ |
|
"epoch": 62.0, |
|
"learning_rate": 3.8667229729729734e-05, |
|
"loss": 0.0873, |
|
"step": 37324 |
|
}, |
|
{ |
|
"epoch": 62.0, |
|
"eval_loss": 0.421456903219223, |
|
"eval_runtime": 398.9508, |
|
"eval_samples_per_second": 11.578, |
|
"eval_steps_per_second": 0.145, |
|
"eval_wer": 0.34794123878805, |
|
"step": 37324 |
|
}, |
|
{ |
|
"epoch": 63.0, |
|
"learning_rate": 3.765202702702703e-05, |
|
"loss": 0.0871, |
|
"step": 37926 |
|
}, |
|
{ |
|
"epoch": 63.0, |
|
"eval_loss": 0.42824164032936096, |
|
"eval_runtime": 403.2616, |
|
"eval_samples_per_second": 11.454, |
|
"eval_steps_per_second": 0.144, |
|
"eval_wer": 0.3506956705982061, |
|
"step": 37926 |
|
}, |
|
{ |
|
"epoch": 64.0, |
|
"learning_rate": 3.663682432432433e-05, |
|
"loss": 0.0862, |
|
"step": 38528 |
|
}, |
|
{ |
|
"epoch": 64.0, |
|
"eval_loss": 0.4110530912876129, |
|
"eval_runtime": 413.0672, |
|
"eval_samples_per_second": 11.182, |
|
"eval_steps_per_second": 0.14, |
|
"eval_wer": 0.3477999858747087, |
|
"step": 38528 |
|
}, |
|
{ |
|
"epoch": 65.0, |
|
"learning_rate": 3.561993243243243e-05, |
|
"loss": 0.0847, |
|
"step": 39130 |
|
}, |
|
{ |
|
"epoch": 65.0, |
|
"eval_loss": 0.4184618890285492, |
|
"eval_runtime": 401.9246, |
|
"eval_samples_per_second": 11.492, |
|
"eval_steps_per_second": 0.144, |
|
"eval_wer": 0.3505544176848647, |
|
"step": 39130 |
|
}, |
|
{ |
|
"epoch": 66.0, |
|
"learning_rate": 3.460304054054054e-05, |
|
"loss": 0.0843, |
|
"step": 39732 |
|
}, |
|
{ |
|
"epoch": 66.0, |
|
"eval_loss": 0.41128143668174744, |
|
"eval_runtime": 410.7955, |
|
"eval_samples_per_second": 11.244, |
|
"eval_steps_per_second": 0.141, |
|
"eval_wer": 0.3463168302846246, |
|
"step": 39732 |
|
}, |
|
{ |
|
"epoch": 67.0, |
|
"learning_rate": 3.3586148648648654e-05, |
|
"loss": 0.0829, |
|
"step": 40334 |
|
}, |
|
{ |
|
"epoch": 67.0, |
|
"eval_loss": 0.4144081771373749, |
|
"eval_runtime": 396.7416, |
|
"eval_samples_per_second": 11.642, |
|
"eval_steps_per_second": 0.146, |
|
"eval_wer": 0.3457871318595946, |
|
"step": 40334 |
|
}, |
|
{ |
|
"epoch": 68.0, |
|
"learning_rate": 3.256925675675676e-05, |
|
"loss": 0.0831, |
|
"step": 40936 |
|
}, |
|
{ |
|
"epoch": 68.0, |
|
"eval_loss": 0.4325215220451355, |
|
"eval_runtime": 401.3277, |
|
"eval_samples_per_second": 11.509, |
|
"eval_steps_per_second": 0.145, |
|
"eval_wer": 0.34493961437954657, |
|
"step": 40936 |
|
}, |
|
{ |
|
"epoch": 69.0, |
|
"learning_rate": 3.155236486486486e-05, |
|
"loss": 0.082, |
|
"step": 41538 |
|
}, |
|
{ |
|
"epoch": 69.0, |
|
"eval_loss": 0.4353320300579071, |
|
"eval_runtime": 266.5733, |
|
"eval_samples_per_second": 17.327, |
|
"eval_steps_per_second": 0.218, |
|
"eval_wer": 0.34246768839607317, |
|
"step": 41538 |
|
}, |
|
{ |
|
"epoch": 70.0, |
|
"learning_rate": 3.053547297297297e-05, |
|
"loss": 0.0802, |
|
"step": 42140 |
|
}, |
|
{ |
|
"epoch": 70.0, |
|
"eval_loss": 0.39984482526779175, |
|
"eval_runtime": 366.598, |
|
"eval_samples_per_second": 12.6, |
|
"eval_steps_per_second": 0.158, |
|
"eval_wer": 0.34059608729430046, |
|
"step": 42140 |
|
}, |
|
{ |
|
"epoch": 71.0, |
|
"learning_rate": 2.951858108108108e-05, |
|
"loss": 0.0792, |
|
"step": 42742 |
|
}, |
|
{ |
|
"epoch": 71.0, |
|
"eval_loss": 0.4112567901611328, |
|
"eval_runtime": 290.4304, |
|
"eval_samples_per_second": 15.904, |
|
"eval_steps_per_second": 0.2, |
|
"eval_wer": 0.3403488946959531, |
|
"step": 42742 |
|
}, |
|
{ |
|
"epoch": 72.0, |
|
"learning_rate": 2.850337837837838e-05, |
|
"loss": 0.0761, |
|
"step": 43344 |
|
}, |
|
{ |
|
"epoch": 72.0, |
|
"eval_loss": 0.422593891620636, |
|
"eval_runtime": 332.1718, |
|
"eval_samples_per_second": 13.905, |
|
"eval_steps_per_second": 0.175, |
|
"eval_wer": 0.3413376650893425, |
|
"step": 43344 |
|
}, |
|
{ |
|
"epoch": 73.0, |
|
"learning_rate": 2.7488175675675675e-05, |
|
"loss": 0.078, |
|
"step": 43946 |
|
}, |
|
{ |
|
"epoch": 73.0, |
|
"eval_loss": 0.420549213886261, |
|
"eval_runtime": 348.4845, |
|
"eval_samples_per_second": 13.255, |
|
"eval_steps_per_second": 0.166, |
|
"eval_wer": 0.3414436047743485, |
|
"step": 43946 |
|
}, |
|
{ |
|
"epoch": 74.0, |
|
"learning_rate": 2.6471283783783786e-05, |
|
"loss": 0.0771, |
|
"step": 44548 |
|
}, |
|
{ |
|
"epoch": 74.0, |
|
"eval_loss": 0.4529767334461212, |
|
"eval_runtime": 337.122, |
|
"eval_samples_per_second": 13.701, |
|
"eval_steps_per_second": 0.172, |
|
"eval_wer": 0.34349177201779785, |
|
"step": 44548 |
|
}, |
|
{ |
|
"epoch": 75.0, |
|
"learning_rate": 2.5454391891891894e-05, |
|
"loss": 0.0753, |
|
"step": 45150 |
|
}, |
|
{ |
|
"epoch": 75.0, |
|
"eval_loss": 0.42464756965637207, |
|
"eval_runtime": 276.2812, |
|
"eval_samples_per_second": 16.718, |
|
"eval_steps_per_second": 0.21, |
|
"eval_wer": 0.33936012430256374, |
|
"step": 45150 |
|
}, |
|
{ |
|
"epoch": 76.0, |
|
"learning_rate": 2.44375e-05, |
|
"loss": 0.0747, |
|
"step": 45752 |
|
}, |
|
{ |
|
"epoch": 76.0, |
|
"eval_loss": 0.4271426200866699, |
|
"eval_runtime": 305.7967, |
|
"eval_samples_per_second": 15.105, |
|
"eval_steps_per_second": 0.19, |
|
"eval_wer": 0.3410904724909951, |
|
"step": 45752 |
|
}, |
|
{ |
|
"epoch": 77.0, |
|
"learning_rate": 2.34222972972973e-05, |
|
"loss": 0.0744, |
|
"step": 46354 |
|
}, |
|
{ |
|
"epoch": 77.0, |
|
"eval_loss": 0.4557636082172394, |
|
"eval_runtime": 291.9496, |
|
"eval_samples_per_second": 15.821, |
|
"eval_steps_per_second": 0.199, |
|
"eval_wer": 0.34193798997104313, |
|
"step": 46354 |
|
}, |
|
{ |
|
"epoch": 78.0, |
|
"learning_rate": 2.2405405405405406e-05, |
|
"loss": 0.0735, |
|
"step": 46956 |
|
}, |
|
{ |
|
"epoch": 78.0, |
|
"eval_loss": 0.44490668177604675, |
|
"eval_runtime": 315.5036, |
|
"eval_samples_per_second": 14.64, |
|
"eval_steps_per_second": 0.184, |
|
"eval_wer": 0.34151423123101915, |
|
"step": 46956 |
|
}, |
|
{ |
|
"epoch": 79.0, |
|
"learning_rate": 2.1388513513513513e-05, |
|
"loss": 0.073, |
|
"step": 47558 |
|
}, |
|
{ |
|
"epoch": 79.0, |
|
"eval_loss": 0.431576669216156, |
|
"eval_runtime": 341.3029, |
|
"eval_samples_per_second": 13.533, |
|
"eval_steps_per_second": 0.17, |
|
"eval_wer": 0.33844198036584505, |
|
"step": 47558 |
|
}, |
|
{ |
|
"epoch": 80.0, |
|
"learning_rate": 2.0371621621621624e-05, |
|
"loss": 0.0735, |
|
"step": 48160 |
|
}, |
|
{ |
|
"epoch": 80.0, |
|
"eval_loss": 0.4496508538722992, |
|
"eval_runtime": 314.7178, |
|
"eval_samples_per_second": 14.677, |
|
"eval_steps_per_second": 0.184, |
|
"eval_wer": 0.33833604068083906, |
|
"step": 48160 |
|
}, |
|
{ |
|
"epoch": 81.0, |
|
"learning_rate": 1.935641891891892e-05, |
|
"loss": 0.0721, |
|
"step": 48762 |
|
}, |
|
{ |
|
"epoch": 81.0, |
|
"eval_loss": 0.43915238976478577, |
|
"eval_runtime": 285.1559, |
|
"eval_samples_per_second": 16.198, |
|
"eval_steps_per_second": 0.203, |
|
"eval_wer": 0.33667631894907835, |
|
"step": 48762 |
|
}, |
|
{ |
|
"epoch": 82.0, |
|
"learning_rate": 1.8341216216216216e-05, |
|
"loss": 0.0705, |
|
"step": 49364 |
|
}, |
|
{ |
|
"epoch": 82.0, |
|
"eval_loss": 0.42993664741516113, |
|
"eval_runtime": 312.9699, |
|
"eval_samples_per_second": 14.759, |
|
"eval_steps_per_second": 0.185, |
|
"eval_wer": 0.33678225863408434, |
|
"step": 49364 |
|
}, |
|
{ |
|
"epoch": 83.0, |
|
"learning_rate": 1.7324324324324326e-05, |
|
"loss": 0.0706, |
|
"step": 49966 |
|
}, |
|
{ |
|
"epoch": 83.0, |
|
"eval_loss": 0.442228227853775, |
|
"eval_runtime": 282.6844, |
|
"eval_samples_per_second": 16.34, |
|
"eval_steps_per_second": 0.205, |
|
"eval_wer": 0.33738258351578504, |
|
"step": 49966 |
|
}, |
|
{ |
|
"epoch": 84.0, |
|
"learning_rate": 1.6307432432432434e-05, |
|
"loss": 0.0707, |
|
"step": 50568 |
|
}, |
|
{ |
|
"epoch": 84.0, |
|
"eval_loss": 0.43407562375068665, |
|
"eval_runtime": 348.7351, |
|
"eval_samples_per_second": 13.245, |
|
"eval_steps_per_second": 0.166, |
|
"eval_wer": 0.33586411469736566, |
|
"step": 50568 |
|
}, |
|
{ |
|
"epoch": 85.0, |
|
"learning_rate": 1.529054054054054e-05, |
|
"loss": 0.0692, |
|
"step": 51170 |
|
}, |
|
{ |
|
"epoch": 85.0, |
|
"eval_loss": 0.44134435057640076, |
|
"eval_runtime": 326.8142, |
|
"eval_samples_per_second": 14.133, |
|
"eval_steps_per_second": 0.177, |
|
"eval_wer": 0.33568754855568894, |
|
"step": 51170 |
|
}, |
|
{ |
|
"epoch": 86.0, |
|
"learning_rate": 1.4273648648648649e-05, |
|
"loss": 0.069, |
|
"step": 51772 |
|
}, |
|
{ |
|
"epoch": 86.0, |
|
"eval_loss": 0.4566088318824768, |
|
"eval_runtime": 315.2023, |
|
"eval_samples_per_second": 14.654, |
|
"eval_steps_per_second": 0.184, |
|
"eval_wer": 0.337912281940815, |
|
"step": 51772 |
|
}, |
|
{ |
|
"epoch": 87.0, |
|
"learning_rate": 1.3256756756756758e-05, |
|
"loss": 0.0673, |
|
"step": 52374 |
|
}, |
|
{ |
|
"epoch": 87.0, |
|
"eval_loss": 0.4371909201145172, |
|
"eval_runtime": 288.747, |
|
"eval_samples_per_second": 15.997, |
|
"eval_steps_per_second": 0.201, |
|
"eval_wer": 0.33957200367257573, |
|
"step": 52374 |
|
}, |
|
{ |
|
"epoch": 88.0, |
|
"learning_rate": 1.2239864864864865e-05, |
|
"loss": 0.0678, |
|
"step": 52976 |
|
}, |
|
{ |
|
"epoch": 88.0, |
|
"eval_loss": 0.438821017742157, |
|
"eval_runtime": 399.7437, |
|
"eval_samples_per_second": 11.555, |
|
"eval_steps_per_second": 0.145, |
|
"eval_wer": 0.33819478776749773, |
|
"step": 52976 |
|
}, |
|
{ |
|
"epoch": 89.0, |
|
"learning_rate": 1.1222972972972975e-05, |
|
"loss": 0.0679, |
|
"step": 53578 |
|
}, |
|
{ |
|
"epoch": 89.0, |
|
"eval_loss": 0.4385330379009247, |
|
"eval_runtime": 425.8088, |
|
"eval_samples_per_second": 10.848, |
|
"eval_steps_per_second": 0.136, |
|
"eval_wer": 0.33628787343738964, |
|
"step": 53578 |
|
}, |
|
{ |
|
"epoch": 90.0, |
|
"learning_rate": 1.0207770270270272e-05, |
|
"loss": 0.0676, |
|
"step": 54180 |
|
}, |
|
{ |
|
"epoch": 90.0, |
|
"eval_loss": 0.44581928849220276, |
|
"eval_runtime": 430.0001, |
|
"eval_samples_per_second": 10.742, |
|
"eval_steps_per_second": 0.135, |
|
"eval_wer": 0.3353344162723356, |
|
"step": 54180 |
|
}, |
|
{ |
|
"epoch": 91.0, |
|
"learning_rate": 9.19087837837838e-06, |
|
"loss": 0.0661, |
|
"step": 54782 |
|
}, |
|
{ |
|
"epoch": 91.0, |
|
"eval_loss": 0.44878506660461426, |
|
"eval_runtime": 414.3814, |
|
"eval_samples_per_second": 11.147, |
|
"eval_steps_per_second": 0.14, |
|
"eval_wer": 0.3352284765873296, |
|
"step": 54782 |
|
}, |
|
{ |
|
"epoch": 92.0, |
|
"learning_rate": 8.173986486486487e-06, |
|
"loss": 0.0667, |
|
"step": 55384 |
|
}, |
|
{ |
|
"epoch": 92.0, |
|
"eval_loss": 0.4502880275249481, |
|
"eval_runtime": 640.6777, |
|
"eval_samples_per_second": 7.21, |
|
"eval_steps_per_second": 0.091, |
|
"eval_wer": 0.33649975280740163, |
|
"step": 55384 |
|
}, |
|
{ |
|
"epoch": 93.0, |
|
"learning_rate": 7.157094594594594e-06, |
|
"loss": 0.0668, |
|
"step": 55986 |
|
}, |
|
{ |
|
"epoch": 93.0, |
|
"eval_loss": 0.4470549523830414, |
|
"eval_runtime": 464.6555, |
|
"eval_samples_per_second": 9.941, |
|
"eval_steps_per_second": 0.125, |
|
"eval_wer": 0.33371000776891024, |
|
"step": 55986 |
|
}, |
|
{ |
|
"epoch": 94.0, |
|
"learning_rate": 6.140202702702703e-06, |
|
"loss": 0.0663, |
|
"step": 56588 |
|
}, |
|
{ |
|
"epoch": 94.0, |
|
"eval_loss": 0.4470076858997345, |
|
"eval_runtime": 411.3931, |
|
"eval_samples_per_second": 11.228, |
|
"eval_steps_per_second": 0.141, |
|
"eval_wer": 0.3332862490288862, |
|
"step": 56588 |
|
}, |
|
{ |
|
"epoch": 95.0, |
|
"learning_rate": 5.123310810810811e-06, |
|
"loss": 0.0652, |
|
"step": 57190 |
|
}, |
|
{ |
|
"epoch": 95.0, |
|
"eval_loss": 0.4530947804450989, |
|
"eval_runtime": 429.7206, |
|
"eval_samples_per_second": 10.749, |
|
"eval_steps_per_second": 0.135, |
|
"eval_wer": 0.33476940461897026, |
|
"step": 57190 |
|
}, |
|
{ |
|
"epoch": 96.0, |
|
"learning_rate": 4.108108108108108e-06, |
|
"loss": 0.0648, |
|
"step": 57792 |
|
}, |
|
{ |
|
"epoch": 96.0, |
|
"eval_loss": 0.44802409410476685, |
|
"eval_runtime": 456.6508, |
|
"eval_samples_per_second": 10.115, |
|
"eval_steps_per_second": 0.127, |
|
"eval_wer": 0.33346281517056287, |
|
"step": 57792 |
|
}, |
|
{ |
|
"epoch": 97.0, |
|
"learning_rate": 3.0912162162162163e-06, |
|
"loss": 0.0638, |
|
"step": 58394 |
|
}, |
|
{ |
|
"epoch": 97.0, |
|
"eval_loss": 0.4479643702507019, |
|
"eval_runtime": 442.7361, |
|
"eval_samples_per_second": 10.433, |
|
"eval_steps_per_second": 0.131, |
|
"eval_wer": 0.33346281517056287, |
|
"step": 58394 |
|
}, |
|
{ |
|
"epoch": 98.0, |
|
"learning_rate": 2.0743243243243246e-06, |
|
"loss": 0.0637, |
|
"step": 58996 |
|
}, |
|
{ |
|
"epoch": 98.0, |
|
"eval_loss": 0.4514257311820984, |
|
"eval_runtime": 447.5804, |
|
"eval_samples_per_second": 10.32, |
|
"eval_steps_per_second": 0.13, |
|
"eval_wer": 0.3328624902888622, |
|
"step": 58996 |
|
} |
|
], |
|
"max_steps": 60200, |
|
"num_train_epochs": 100, |
|
"total_flos": 1.2978795625125155e+21, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|