|
{ |
|
"best_metric": 0.8883299798792756, |
|
"best_model_checkpoint": "PhoWhisper-small-vispeech-classifier-v4/checkpoint-490", |
|
"epoch": 2.0, |
|
"eval_steps": 500, |
|
"global_step": 980, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01020408163265306, |
|
"grad_norm": 51640.66015625, |
|
"learning_rate": 5.1020408163265303e-08, |
|
"loss": 2.0778, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.02040816326530612, |
|
"grad_norm": 46773.35546875, |
|
"learning_rate": 1.0204081632653061e-07, |
|
"loss": 2.0809, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.030612244897959183, |
|
"grad_norm": 45343.12109375, |
|
"learning_rate": 1.5306122448979592e-07, |
|
"loss": 2.0771, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.04081632653061224, |
|
"grad_norm": 49285.91796875, |
|
"learning_rate": 2.0408163265306121e-07, |
|
"loss": 2.077, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.05102040816326531, |
|
"grad_norm": 63271.82421875, |
|
"learning_rate": 2.5510204081632656e-07, |
|
"loss": 2.0755, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.061224489795918366, |
|
"grad_norm": 51649.79296875, |
|
"learning_rate": 3.0612244897959183e-07, |
|
"loss": 2.0764, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.07142857142857142, |
|
"grad_norm": 51633.98828125, |
|
"learning_rate": 3.5714285714285716e-07, |
|
"loss": 2.0763, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.08163265306122448, |
|
"grad_norm": 61777.23828125, |
|
"learning_rate": 4.0816326530612243e-07, |
|
"loss": 2.0772, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.09183673469387756, |
|
"grad_norm": 58467.15625, |
|
"learning_rate": 4.591836734693878e-07, |
|
"loss": 2.0743, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.10204081632653061, |
|
"grad_norm": 48974.37890625, |
|
"learning_rate": 5.102040816326531e-07, |
|
"loss": 2.072, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.11224489795918367, |
|
"grad_norm": 71007.515625, |
|
"learning_rate": 5.612244897959184e-07, |
|
"loss": 2.0751, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.12244897959183673, |
|
"grad_norm": 58237.77734375, |
|
"learning_rate": 6.122448979591837e-07, |
|
"loss": 2.0702, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.1326530612244898, |
|
"grad_norm": 58785.83203125, |
|
"learning_rate": 6.632653061224491e-07, |
|
"loss": 2.0713, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.14285714285714285, |
|
"grad_norm": 52771.265625, |
|
"learning_rate": 7.142857142857143e-07, |
|
"loss": 2.069, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.15306122448979592, |
|
"grad_norm": 69647.078125, |
|
"learning_rate": 7.653061224489796e-07, |
|
"loss": 2.0684, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.16326530612244897, |
|
"grad_norm": 59148.47265625, |
|
"learning_rate": 8.163265306122449e-07, |
|
"loss": 2.0634, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.17346938775510204, |
|
"grad_norm": 66723.921875, |
|
"learning_rate": 8.673469387755103e-07, |
|
"loss": 2.064, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.1836734693877551, |
|
"grad_norm": 72545.203125, |
|
"learning_rate": 9.183673469387756e-07, |
|
"loss": 2.0593, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.19387755102040816, |
|
"grad_norm": 59112.55859375, |
|
"learning_rate": 9.69387755102041e-07, |
|
"loss": 2.0553, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.20408163265306123, |
|
"grad_norm": 68369.265625, |
|
"learning_rate": 1.0204081632653063e-06, |
|
"loss": 2.0566, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.21428571428571427, |
|
"grad_norm": 51080.62890625, |
|
"learning_rate": 1.0714285714285714e-06, |
|
"loss": 2.0505, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.22448979591836735, |
|
"grad_norm": 78760.0859375, |
|
"learning_rate": 1.122448979591837e-06, |
|
"loss": 2.0496, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.23469387755102042, |
|
"grad_norm": 57198.7890625, |
|
"learning_rate": 1.1734693877551022e-06, |
|
"loss": 2.0458, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.24489795918367346, |
|
"grad_norm": 98369.4609375, |
|
"learning_rate": 1.2244897959183673e-06, |
|
"loss": 2.0336, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.25510204081632654, |
|
"grad_norm": 80801.5703125, |
|
"learning_rate": 1.2755102040816329e-06, |
|
"loss": 2.0354, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.2653061224489796, |
|
"grad_norm": 82179.9765625, |
|
"learning_rate": 1.3265306122448982e-06, |
|
"loss": 2.0263, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.2755102040816326, |
|
"grad_norm": 83225.078125, |
|
"learning_rate": 1.3775510204081633e-06, |
|
"loss": 2.0168, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.2857142857142857, |
|
"grad_norm": 62451.64453125, |
|
"learning_rate": 1.4285714285714286e-06, |
|
"loss": 2.023, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.29591836734693877, |
|
"grad_norm": 75567.9453125, |
|
"learning_rate": 1.479591836734694e-06, |
|
"loss": 2.0206, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.30612244897959184, |
|
"grad_norm": 98461.484375, |
|
"learning_rate": 1.5306122448979593e-06, |
|
"loss": 1.9984, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.3163265306122449, |
|
"grad_norm": 75078.1484375, |
|
"learning_rate": 1.5816326530612248e-06, |
|
"loss": 1.9899, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.32653061224489793, |
|
"grad_norm": 97895.671875, |
|
"learning_rate": 1.6326530612244897e-06, |
|
"loss": 1.9758, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.336734693877551, |
|
"grad_norm": 65880.2421875, |
|
"learning_rate": 1.6836734693877552e-06, |
|
"loss": 1.9918, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.3469387755102041, |
|
"grad_norm": 70675.578125, |
|
"learning_rate": 1.7346938775510206e-06, |
|
"loss": 1.9715, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.35714285714285715, |
|
"grad_norm": 64327.19921875, |
|
"learning_rate": 1.7857142857142859e-06, |
|
"loss": 1.9597, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.3673469387755102, |
|
"grad_norm": 80610.6171875, |
|
"learning_rate": 1.8367346938775512e-06, |
|
"loss": 1.9632, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.37755102040816324, |
|
"grad_norm": 110899.375, |
|
"learning_rate": 1.8877551020408163e-06, |
|
"loss": 1.9476, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.3877551020408163, |
|
"grad_norm": 111132.5703125, |
|
"learning_rate": 1.938775510204082e-06, |
|
"loss": 1.9369, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.3979591836734694, |
|
"grad_norm": 88722.625, |
|
"learning_rate": 1.989795918367347e-06, |
|
"loss": 1.9452, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.40816326530612246, |
|
"grad_norm": 105874.296875, |
|
"learning_rate": 2.0408163265306125e-06, |
|
"loss": 1.9459, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.41836734693877553, |
|
"grad_norm": 97286.0078125, |
|
"learning_rate": 2.0918367346938776e-06, |
|
"loss": 1.918, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.42857142857142855, |
|
"grad_norm": 82848.0625, |
|
"learning_rate": 2.1428571428571427e-06, |
|
"loss": 1.9135, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.4387755102040816, |
|
"grad_norm": 114854.4609375, |
|
"learning_rate": 2.1938775510204083e-06, |
|
"loss": 1.8869, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.4489795918367347, |
|
"grad_norm": 86661.25, |
|
"learning_rate": 2.244897959183674e-06, |
|
"loss": 1.8861, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.45918367346938777, |
|
"grad_norm": 127591.3984375, |
|
"learning_rate": 2.295918367346939e-06, |
|
"loss": 1.8396, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.46938775510204084, |
|
"grad_norm": 71038.8671875, |
|
"learning_rate": 2.3469387755102044e-06, |
|
"loss": 1.8995, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.47959183673469385, |
|
"grad_norm": 101402.421875, |
|
"learning_rate": 2.3979591836734696e-06, |
|
"loss": 1.8699, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.4897959183673469, |
|
"grad_norm": 113157.3828125, |
|
"learning_rate": 2.4489795918367347e-06, |
|
"loss": 1.8599, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 115485.0546875, |
|
"learning_rate": 2.5e-06, |
|
"loss": 1.8214, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.5102040816326531, |
|
"grad_norm": 139369.8125, |
|
"learning_rate": 2.5510204081632657e-06, |
|
"loss": 1.8539, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.5204081632653061, |
|
"grad_norm": 141401.46875, |
|
"learning_rate": 2.602040816326531e-06, |
|
"loss": 1.7507, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.5306122448979592, |
|
"grad_norm": 125767.3984375, |
|
"learning_rate": 2.6530612244897964e-06, |
|
"loss": 1.7948, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.5408163265306123, |
|
"grad_norm": 148848.5625, |
|
"learning_rate": 2.7040816326530615e-06, |
|
"loss": 1.7871, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.5510204081632653, |
|
"grad_norm": 152509.4375, |
|
"learning_rate": 2.7551020408163266e-06, |
|
"loss": 1.8054, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.5612244897959183, |
|
"grad_norm": 114681.078125, |
|
"learning_rate": 2.8061224489795917e-06, |
|
"loss": 1.7711, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.5714285714285714, |
|
"grad_norm": 119104.984375, |
|
"learning_rate": 2.8571428571428573e-06, |
|
"loss": 1.7139, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.5816326530612245, |
|
"grad_norm": 123160.5, |
|
"learning_rate": 2.908163265306123e-06, |
|
"loss": 1.703, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.5918367346938775, |
|
"grad_norm": 157218.203125, |
|
"learning_rate": 2.959183673469388e-06, |
|
"loss": 1.7685, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.6020408163265306, |
|
"grad_norm": 134804.828125, |
|
"learning_rate": 3.0102040816326534e-06, |
|
"loss": 1.7053, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.6122448979591837, |
|
"grad_norm": 153977.625, |
|
"learning_rate": 3.0612244897959185e-06, |
|
"loss": 1.6711, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.6224489795918368, |
|
"grad_norm": 204690.125, |
|
"learning_rate": 3.112244897959184e-06, |
|
"loss": 1.5873, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.6326530612244898, |
|
"grad_norm": 158604.1875, |
|
"learning_rate": 3.1632653061224496e-06, |
|
"loss": 1.6642, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.6428571428571429, |
|
"grad_norm": 181877.484375, |
|
"learning_rate": 3.2142857142857147e-06, |
|
"loss": 1.6204, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.6530612244897959, |
|
"grad_norm": 157735.734375, |
|
"learning_rate": 3.2653061224489794e-06, |
|
"loss": 1.6152, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.6632653061224489, |
|
"grad_norm": 121935.3828125, |
|
"learning_rate": 3.316326530612245e-06, |
|
"loss": 1.6357, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.673469387755102, |
|
"grad_norm": 133579.859375, |
|
"learning_rate": 3.3673469387755105e-06, |
|
"loss": 1.6085, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.6836734693877551, |
|
"grad_norm": 154886.953125, |
|
"learning_rate": 3.4183673469387756e-06, |
|
"loss": 1.5609, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.6938775510204082, |
|
"grad_norm": 151655.0625, |
|
"learning_rate": 3.469387755102041e-06, |
|
"loss": 1.5905, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.7040816326530612, |
|
"grad_norm": 198471.609375, |
|
"learning_rate": 3.5204081632653062e-06, |
|
"loss": 1.5052, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.7142857142857143, |
|
"grad_norm": 191516.453125, |
|
"learning_rate": 3.5714285714285718e-06, |
|
"loss": 1.5798, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.7244897959183674, |
|
"grad_norm": 207511.296875, |
|
"learning_rate": 3.6224489795918373e-06, |
|
"loss": 1.5382, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.7346938775510204, |
|
"grad_norm": 170820.609375, |
|
"learning_rate": 3.6734693877551024e-06, |
|
"loss": 1.485, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.7448979591836735, |
|
"grad_norm": 183925.796875, |
|
"learning_rate": 3.724489795918368e-06, |
|
"loss": 1.427, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.7551020408163265, |
|
"grad_norm": 302829.75, |
|
"learning_rate": 3.7755102040816327e-06, |
|
"loss": 1.4242, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.7653061224489796, |
|
"grad_norm": 292990.5625, |
|
"learning_rate": 3.826530612244898e-06, |
|
"loss": 1.484, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.7755102040816326, |
|
"grad_norm": 189517.421875, |
|
"learning_rate": 3.877551020408164e-06, |
|
"loss": 1.5039, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.7857142857142857, |
|
"grad_norm": 239271.890625, |
|
"learning_rate": 3.928571428571429e-06, |
|
"loss": 1.3332, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.7959183673469388, |
|
"grad_norm": 167386.28125, |
|
"learning_rate": 3.979591836734694e-06, |
|
"loss": 1.429, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.8061224489795918, |
|
"grad_norm": 165701.9375, |
|
"learning_rate": 4.03061224489796e-06, |
|
"loss": 1.3167, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.8163265306122449, |
|
"grad_norm": 284793.46875, |
|
"learning_rate": 4.081632653061225e-06, |
|
"loss": 1.3867, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.826530612244898, |
|
"grad_norm": 159807.671875, |
|
"learning_rate": 4.13265306122449e-06, |
|
"loss": 1.404, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.8367346938775511, |
|
"grad_norm": 225698.859375, |
|
"learning_rate": 4.183673469387755e-06, |
|
"loss": 1.3842, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.8469387755102041, |
|
"grad_norm": 148945.90625, |
|
"learning_rate": 4.234693877551021e-06, |
|
"loss": 1.4605, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.8571428571428571, |
|
"grad_norm": 301350.15625, |
|
"learning_rate": 4.2857142857142855e-06, |
|
"loss": 1.4279, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.8673469387755102, |
|
"grad_norm": 216519.640625, |
|
"learning_rate": 4.336734693877551e-06, |
|
"loss": 1.3122, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.8775510204081632, |
|
"grad_norm": 243937.1875, |
|
"learning_rate": 4.3877551020408165e-06, |
|
"loss": 1.2084, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.8877551020408163, |
|
"grad_norm": 195260.390625, |
|
"learning_rate": 4.438775510204082e-06, |
|
"loss": 1.3024, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.8979591836734694, |
|
"grad_norm": 218123.65625, |
|
"learning_rate": 4.489795918367348e-06, |
|
"loss": 1.2303, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.9081632653061225, |
|
"grad_norm": 343990.03125, |
|
"learning_rate": 4.540816326530613e-06, |
|
"loss": 1.2012, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.9183673469387755, |
|
"grad_norm": 157196.390625, |
|
"learning_rate": 4.591836734693878e-06, |
|
"loss": 1.2632, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.9285714285714286, |
|
"grad_norm": 238103.46875, |
|
"learning_rate": 4.642857142857144e-06, |
|
"loss": 1.3131, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.9387755102040817, |
|
"grad_norm": 158442.625, |
|
"learning_rate": 4.693877551020409e-06, |
|
"loss": 1.2849, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.9489795918367347, |
|
"grad_norm": 178316.640625, |
|
"learning_rate": 4.744897959183674e-06, |
|
"loss": 1.0987, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.9591836734693877, |
|
"grad_norm": 240848.03125, |
|
"learning_rate": 4.795918367346939e-06, |
|
"loss": 1.154, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.9693877551020408, |
|
"grad_norm": 677730.8125, |
|
"learning_rate": 4.846938775510204e-06, |
|
"loss": 1.1682, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.9795918367346939, |
|
"grad_norm": 300622.875, |
|
"learning_rate": 4.897959183673469e-06, |
|
"loss": 1.1212, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.9897959183673469, |
|
"grad_norm": 234745.03125, |
|
"learning_rate": 4.948979591836735e-06, |
|
"loss": 1.1859, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 254966.984375, |
|
"learning_rate": 5e-06, |
|
"loss": 1.1763, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_accuracy": 0.8883299798792756, |
|
"eval_loss": 0.968666136264801, |
|
"eval_runtime": 208.5533, |
|
"eval_samples_per_second": 14.299, |
|
"eval_steps_per_second": 0.599, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.010204081632653, |
|
"grad_norm": 227501.953125, |
|
"learning_rate": 4.994331065759638e-06, |
|
"loss": 1.1466, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 1.0204081632653061, |
|
"grad_norm": 184310.828125, |
|
"learning_rate": 4.9886621315192745e-06, |
|
"loss": 1.0584, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.030612244897959, |
|
"grad_norm": 568293.25, |
|
"learning_rate": 4.982993197278912e-06, |
|
"loss": 1.0574, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 1.0408163265306123, |
|
"grad_norm": 208197.4375, |
|
"learning_rate": 4.9773242630385495e-06, |
|
"loss": 1.0964, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.0510204081632653, |
|
"grad_norm": 302428.34375, |
|
"learning_rate": 4.971655328798186e-06, |
|
"loss": 1.1017, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 1.0612244897959184, |
|
"grad_norm": 331227.1875, |
|
"learning_rate": 4.965986394557824e-06, |
|
"loss": 1.0954, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.0714285714285714, |
|
"grad_norm": 525813.5625, |
|
"learning_rate": 4.960317460317461e-06, |
|
"loss": 1.061, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 1.0816326530612246, |
|
"grad_norm": 578634.125, |
|
"learning_rate": 4.954648526077098e-06, |
|
"loss": 1.135, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.0918367346938775, |
|
"grad_norm": 208531.90625, |
|
"learning_rate": 4.948979591836735e-06, |
|
"loss": 0.9104, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 1.1020408163265305, |
|
"grad_norm": 621899.75, |
|
"learning_rate": 4.943310657596373e-06, |
|
"loss": 1.0909, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.1122448979591837, |
|
"grad_norm": 362684.3125, |
|
"learning_rate": 4.9376417233560094e-06, |
|
"loss": 0.8759, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 1.1224489795918366, |
|
"grad_norm": 274587.8125, |
|
"learning_rate": 4.931972789115647e-06, |
|
"loss": 0.9977, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.1326530612244898, |
|
"grad_norm": 379287.3125, |
|
"learning_rate": 4.9263038548752836e-06, |
|
"loss": 1.013, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 1.1428571428571428, |
|
"grad_norm": 277623.0, |
|
"learning_rate": 4.920634920634921e-06, |
|
"loss": 0.9425, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.153061224489796, |
|
"grad_norm": 341604.53125, |
|
"learning_rate": 4.914965986394558e-06, |
|
"loss": 0.9697, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 1.163265306122449, |
|
"grad_norm": 742070.625, |
|
"learning_rate": 4.909297052154195e-06, |
|
"loss": 1.1181, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.1734693877551021, |
|
"grad_norm": 225296.84375, |
|
"learning_rate": 4.903628117913833e-06, |
|
"loss": 0.9465, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 1.183673469387755, |
|
"grad_norm": 952885.3125, |
|
"learning_rate": 4.897959183673469e-06, |
|
"loss": 1.1135, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.193877551020408, |
|
"grad_norm": 527028.875, |
|
"learning_rate": 4.892290249433107e-06, |
|
"loss": 1.109, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 1.2040816326530612, |
|
"grad_norm": 243262.0625, |
|
"learning_rate": 4.886621315192744e-06, |
|
"loss": 0.9313, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.2142857142857142, |
|
"grad_norm": 287492.28125, |
|
"learning_rate": 4.880952380952381e-06, |
|
"loss": 0.9603, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 1.2244897959183674, |
|
"grad_norm": 400522.90625, |
|
"learning_rate": 4.8752834467120185e-06, |
|
"loss": 0.9105, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.2346938775510203, |
|
"grad_norm": 190966.40625, |
|
"learning_rate": 4.869614512471656e-06, |
|
"loss": 0.9654, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 1.2448979591836735, |
|
"grad_norm": 223976.515625, |
|
"learning_rate": 4.863945578231293e-06, |
|
"loss": 0.9511, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.2551020408163265, |
|
"grad_norm": 345172.09375, |
|
"learning_rate": 4.85827664399093e-06, |
|
"loss": 0.8092, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 1.2653061224489797, |
|
"grad_norm": 378395.75, |
|
"learning_rate": 4.852607709750568e-06, |
|
"loss": 0.9777, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.2755102040816326, |
|
"grad_norm": 408057.8125, |
|
"learning_rate": 4.846938775510204e-06, |
|
"loss": 1.006, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 1.2857142857142856, |
|
"grad_norm": 254345.203125, |
|
"learning_rate": 4.841269841269842e-06, |
|
"loss": 0.9107, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.2959183673469388, |
|
"grad_norm": 465929.21875, |
|
"learning_rate": 4.835600907029479e-06, |
|
"loss": 1.0237, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 1.306122448979592, |
|
"grad_norm": 665911.5625, |
|
"learning_rate": 4.829931972789116e-06, |
|
"loss": 0.886, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.316326530612245, |
|
"grad_norm": 541784.25, |
|
"learning_rate": 4.824263038548753e-06, |
|
"loss": 0.9266, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 1.3265306122448979, |
|
"grad_norm": 181121.25, |
|
"learning_rate": 4.818594104308391e-06, |
|
"loss": 0.9289, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.336734693877551, |
|
"grad_norm": 465088.90625, |
|
"learning_rate": 4.8129251700680275e-06, |
|
"loss": 0.8976, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 1.346938775510204, |
|
"grad_norm": 309529.28125, |
|
"learning_rate": 4.807256235827665e-06, |
|
"loss": 0.9182, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.3571428571428572, |
|
"grad_norm": 672721.125, |
|
"learning_rate": 4.8015873015873025e-06, |
|
"loss": 0.8919, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 1.3673469387755102, |
|
"grad_norm": 300983.96875, |
|
"learning_rate": 4.795918367346939e-06, |
|
"loss": 0.7491, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.3775510204081631, |
|
"grad_norm": 168024.203125, |
|
"learning_rate": 4.790249433106577e-06, |
|
"loss": 0.803, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 1.3877551020408163, |
|
"grad_norm": 252603.5, |
|
"learning_rate": 4.784580498866213e-06, |
|
"loss": 0.8481, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.3979591836734695, |
|
"grad_norm": 495847.71875, |
|
"learning_rate": 4.778911564625851e-06, |
|
"loss": 0.9796, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 1.4081632653061225, |
|
"grad_norm": 213929.046875, |
|
"learning_rate": 4.773242630385488e-06, |
|
"loss": 0.8509, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.4183673469387754, |
|
"grad_norm": 203737.90625, |
|
"learning_rate": 4.767573696145125e-06, |
|
"loss": 0.8133, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 1.4285714285714286, |
|
"grad_norm": 340089.6875, |
|
"learning_rate": 4.761904761904762e-06, |
|
"loss": 0.8628, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.4387755102040816, |
|
"grad_norm": 292163.90625, |
|
"learning_rate": 4.756235827664399e-06, |
|
"loss": 0.9341, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 1.4489795918367347, |
|
"grad_norm": 266707.96875, |
|
"learning_rate": 4.7505668934240365e-06, |
|
"loss": 1.0158, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 1.4591836734693877, |
|
"grad_norm": 389876.40625, |
|
"learning_rate": 4.744897959183674e-06, |
|
"loss": 0.9575, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 1.469387755102041, |
|
"grad_norm": 699534.0625, |
|
"learning_rate": 4.739229024943311e-06, |
|
"loss": 0.9293, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 1.4795918367346939, |
|
"grad_norm": 495432.5, |
|
"learning_rate": 4.733560090702948e-06, |
|
"loss": 0.7233, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 1.489795918367347, |
|
"grad_norm": 297580.71875, |
|
"learning_rate": 4.727891156462586e-06, |
|
"loss": 0.7624, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 214458.125, |
|
"learning_rate": 4.722222222222222e-06, |
|
"loss": 0.8704, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 1.510204081632653, |
|
"grad_norm": 483185.90625, |
|
"learning_rate": 4.71655328798186e-06, |
|
"loss": 0.7841, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 1.5204081632653061, |
|
"grad_norm": 279528.75, |
|
"learning_rate": 4.710884353741497e-06, |
|
"loss": 0.8634, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 1.5306122448979593, |
|
"grad_norm": 398520.15625, |
|
"learning_rate": 4.705215419501134e-06, |
|
"loss": 0.902, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.5408163265306123, |
|
"grad_norm": 343986.875, |
|
"learning_rate": 4.699546485260771e-06, |
|
"loss": 0.8896, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 1.5510204081632653, |
|
"grad_norm": 941463.0625, |
|
"learning_rate": 4.693877551020409e-06, |
|
"loss": 0.8483, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 1.5612244897959182, |
|
"grad_norm": 382110.125, |
|
"learning_rate": 4.6882086167800455e-06, |
|
"loss": 0.944, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 1.5714285714285714, |
|
"grad_norm": 321413.15625, |
|
"learning_rate": 4.682539682539683e-06, |
|
"loss": 0.8019, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 1.5816326530612246, |
|
"grad_norm": 404689.875, |
|
"learning_rate": 4.6768707482993205e-06, |
|
"loss": 0.7069, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 1.5918367346938775, |
|
"grad_norm": 653981.25, |
|
"learning_rate": 4.671201814058957e-06, |
|
"loss": 0.9424, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 1.6020408163265305, |
|
"grad_norm": 387232.34375, |
|
"learning_rate": 4.665532879818595e-06, |
|
"loss": 0.9593, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 1.6122448979591837, |
|
"grad_norm": 527808.625, |
|
"learning_rate": 4.659863945578232e-06, |
|
"loss": 0.8147, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.6224489795918369, |
|
"grad_norm": 350020.8125, |
|
"learning_rate": 4.654195011337869e-06, |
|
"loss": 1.0195, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 1.6326530612244898, |
|
"grad_norm": 510409.9375, |
|
"learning_rate": 4.648526077097506e-06, |
|
"loss": 0.865, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.6428571428571428, |
|
"grad_norm": 238838.65625, |
|
"learning_rate": 4.642857142857144e-06, |
|
"loss": 0.8115, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 1.6530612244897958, |
|
"grad_norm": 266616.84375, |
|
"learning_rate": 4.6371882086167804e-06, |
|
"loss": 0.8091, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 1.663265306122449, |
|
"grad_norm": 398578.84375, |
|
"learning_rate": 4.631519274376418e-06, |
|
"loss": 0.6485, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 1.6734693877551021, |
|
"grad_norm": 242011.765625, |
|
"learning_rate": 4.6258503401360546e-06, |
|
"loss": 0.8242, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 1.683673469387755, |
|
"grad_norm": 646366.6875, |
|
"learning_rate": 4.620181405895692e-06, |
|
"loss": 0.7616, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 1.693877551020408, |
|
"grad_norm": 447861.9375, |
|
"learning_rate": 4.614512471655329e-06, |
|
"loss": 0.8694, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 1.7040816326530612, |
|
"grad_norm": 422529.3125, |
|
"learning_rate": 4.608843537414966e-06, |
|
"loss": 0.9344, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 1.7142857142857144, |
|
"grad_norm": 448338.375, |
|
"learning_rate": 4.603174603174604e-06, |
|
"loss": 0.8459, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.7244897959183674, |
|
"grad_norm": 528423.75, |
|
"learning_rate": 4.59750566893424e-06, |
|
"loss": 0.8312, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 1.7346938775510203, |
|
"grad_norm": 422879.84375, |
|
"learning_rate": 4.591836734693878e-06, |
|
"loss": 0.8151, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.7448979591836735, |
|
"grad_norm": 543066.5, |
|
"learning_rate": 4.5861678004535145e-06, |
|
"loss": 0.8013, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 1.7551020408163265, |
|
"grad_norm": 362556.6875, |
|
"learning_rate": 4.580498866213152e-06, |
|
"loss": 0.9636, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 1.7653061224489797, |
|
"grad_norm": 305676.09375, |
|
"learning_rate": 4.5748299319727895e-06, |
|
"loss": 0.8208, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 1.7755102040816326, |
|
"grad_norm": 477851.96875, |
|
"learning_rate": 4.569160997732427e-06, |
|
"loss": 0.8109, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 1.7857142857142856, |
|
"grad_norm": 380819.1875, |
|
"learning_rate": 4.563492063492064e-06, |
|
"loss": 0.8019, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 1.7959183673469388, |
|
"grad_norm": 276121.21875, |
|
"learning_rate": 4.557823129251701e-06, |
|
"loss": 0.7365, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 1.806122448979592, |
|
"grad_norm": 807004.5625, |
|
"learning_rate": 4.5521541950113386e-06, |
|
"loss": 0.8284, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 1.816326530612245, |
|
"grad_norm": 612872.5625, |
|
"learning_rate": 4.546485260770975e-06, |
|
"loss": 0.9153, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 1.8265306122448979, |
|
"grad_norm": 384264.40625, |
|
"learning_rate": 4.540816326530613e-06, |
|
"loss": 0.8697, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 1.836734693877551, |
|
"grad_norm": 323550.15625, |
|
"learning_rate": 4.53514739229025e-06, |
|
"loss": 0.816, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.8469387755102042, |
|
"grad_norm": 588055.625, |
|
"learning_rate": 4.529478458049887e-06, |
|
"loss": 0.9765, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 1.8571428571428572, |
|
"grad_norm": 1386326.375, |
|
"learning_rate": 4.523809523809524e-06, |
|
"loss": 0.8051, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 1.8673469387755102, |
|
"grad_norm": 351753.28125, |
|
"learning_rate": 4.518140589569162e-06, |
|
"loss": 0.8377, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 1.8775510204081631, |
|
"grad_norm": 547828.25, |
|
"learning_rate": 4.5124716553287985e-06, |
|
"loss": 0.6776, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 1.8877551020408163, |
|
"grad_norm": 332551.40625, |
|
"learning_rate": 4.506802721088436e-06, |
|
"loss": 0.9434, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 1.8979591836734695, |
|
"grad_norm": 597800.125, |
|
"learning_rate": 4.5011337868480735e-06, |
|
"loss": 0.6165, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 1.9081632653061225, |
|
"grad_norm": 598426.1875, |
|
"learning_rate": 4.49546485260771e-06, |
|
"loss": 0.9668, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 1.9183673469387754, |
|
"grad_norm": 508502.5, |
|
"learning_rate": 4.489795918367348e-06, |
|
"loss": 0.8707, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 1.9285714285714286, |
|
"grad_norm": 825221.0625, |
|
"learning_rate": 4.484126984126984e-06, |
|
"loss": 0.8437, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 1.9387755102040818, |
|
"grad_norm": 674555.8125, |
|
"learning_rate": 4.478458049886622e-06, |
|
"loss": 0.7054, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 1.9489795918367347, |
|
"grad_norm": 547481.0, |
|
"learning_rate": 4.472789115646259e-06, |
|
"loss": 0.7896, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 1.9591836734693877, |
|
"grad_norm": 869727.6875, |
|
"learning_rate": 4.467120181405896e-06, |
|
"loss": 0.8161, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 1.9693877551020407, |
|
"grad_norm": 517315.71875, |
|
"learning_rate": 4.461451247165533e-06, |
|
"loss": 0.8131, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 1.9795918367346939, |
|
"grad_norm": 651501.0625, |
|
"learning_rate": 4.45578231292517e-06, |
|
"loss": 0.6217, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 1.989795918367347, |
|
"grad_norm": 559789.5625, |
|
"learning_rate": 4.4501133786848075e-06, |
|
"loss": 0.7849, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 586872.375, |
|
"learning_rate": 4.444444444444444e-06, |
|
"loss": 0.7778, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_accuracy": 0.8779342723004695, |
|
"eval_loss": 0.5649504661560059, |
|
"eval_runtime": 182.7428, |
|
"eval_samples_per_second": 16.318, |
|
"eval_steps_per_second": 0.684, |
|
"step": 980 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 4900, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 10, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2.99358138673152e+18, |
|
"train_batch_size": 24, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|