diff --git "a/trainer_state.json" "b/trainer_state.json" --- "a/trainer_state.json" +++ "b/trainer_state.json" @@ -1,43783 +1,22552 @@ -{ - "best_metric": null, - "best_model_checkpoint": null, - "epoch": 2.0, - "eval_steps": 500, - "global_step": 62500, - "is_hyper_param_search": false, - "is_local_process_zero": true, - "is_world_process_zero": true, - "log_history": [ - { - "epoch": 0.00032, - "grad_norm": 116.36629486083984, - "learning_rate": 3.2e-08, - "loss": 2.146, - "step": 10 - }, - { - "epoch": 0.00064, - "grad_norm": 78.20809173583984, - "learning_rate": 6.4e-08, - "loss": 2.1238, - "step": 20 - }, - { - "epoch": 0.00096, - "grad_norm": 256.7001953125, - "learning_rate": 9.6e-08, - "loss": 2.124, - "step": 30 - }, - { - "epoch": 0.00128, - "grad_norm": 80.30634307861328, - "learning_rate": 1.28e-07, - "loss": 2.1171, - "step": 40 - }, - { - "epoch": 0.0016, - "grad_norm": 130.5838165283203, - "learning_rate": 1.6e-07, - "loss": 2.1067, - "step": 50 - }, - { - "epoch": 0.00192, - "grad_norm": 78.49329376220703, - "learning_rate": 1.92e-07, - "loss": 2.1578, - "step": 60 - }, - { - "epoch": 0.00224, - "grad_norm": 98.15235137939453, - "learning_rate": 2.2400000000000002e-07, - "loss": 2.1194, - "step": 70 - }, - { - "epoch": 0.00256, - "grad_norm": 87.7013931274414, - "learning_rate": 2.56e-07, - "loss": 2.1389, - "step": 80 - }, - { - "epoch": 0.00288, - "grad_norm": 74.4692153930664, - "learning_rate": 2.8800000000000004e-07, - "loss": 2.1174, - "step": 90 - }, - { - "epoch": 0.0032, - "grad_norm": 73.72760009765625, - "learning_rate": 3.2e-07, - "loss": 2.1742, - "step": 100 - }, - { - "epoch": 0.00352, - "grad_norm": 73.00900268554688, - "learning_rate": 3.5200000000000003e-07, - "loss": 2.1119, - "step": 110 - }, - { - "epoch": 0.00384, - "grad_norm": 78.37140655517578, - "learning_rate": 3.84e-07, - "loss": 2.151, - "step": 120 - }, - { - "epoch": 0.00416, - "grad_norm": 99.71389770507812, - "learning_rate": 4.16e-07, - "loss": 2.1146, - "step": 130 - }, - { - "epoch": 0.00448, - "grad_norm": 363.2371826171875, - "learning_rate": 4.4800000000000004e-07, - "loss": 2.1138, - "step": 140 - }, - { - "epoch": 0.0048, - "grad_norm": 86.691650390625, - "learning_rate": 4.800000000000001e-07, - "loss": 2.1319, - "step": 150 - }, - { - "epoch": 0.00512, - "grad_norm": 88.12674713134766, - "learning_rate": 5.12e-07, - "loss": 2.1143, - "step": 160 - }, - { - "epoch": 0.00544, - "grad_norm": 68.71428680419922, - "learning_rate": 5.44e-07, - "loss": 2.1265, - "step": 170 - }, - { - "epoch": 0.00576, - "grad_norm": 65.46075439453125, - "learning_rate": 5.760000000000001e-07, - "loss": 2.1211, - "step": 180 - }, - { - "epoch": 0.00608, - "grad_norm": 78.31707000732422, - "learning_rate": 6.08e-07, - "loss": 2.1096, - "step": 190 - }, - { - "epoch": 0.0064, - "grad_norm": 156.94078063964844, - "learning_rate": 6.4e-07, - "loss": 2.1099, - "step": 200 - }, - { - "epoch": 0.00672, - "grad_norm": 82.27240753173828, - "learning_rate": 6.72e-07, - "loss": 2.0848, - "step": 210 - }, - { - "epoch": 0.00704, - "grad_norm": 82.82190704345703, - "learning_rate": 7.040000000000001e-07, - "loss": 2.108, - "step": 220 - }, - { - "epoch": 0.00736, - "grad_norm": 72.47757720947266, - "learning_rate": 7.36e-07, - "loss": 2.071, - "step": 230 - }, - { - "epoch": 0.00768, - "grad_norm": 79.20008087158203, - "learning_rate": 7.68e-07, - "loss": 2.0826, - "step": 240 - }, - { - "epoch": 0.008, - "grad_norm": 62.04548645019531, - "learning_rate": 8.000000000000001e-07, - "loss": 2.0864, - "step": 250 - }, - { - "epoch": 0.00832, - "grad_norm": 71.6567153930664, - "learning_rate": 8.32e-07, - "loss": 2.133, - "step": 260 - }, - { - "epoch": 0.00864, - "grad_norm": 62.64482879638672, - "learning_rate": 8.640000000000001e-07, - "loss": 2.102, - "step": 270 - }, - { - "epoch": 0.00896, - "grad_norm": 75.67320251464844, - "learning_rate": 8.960000000000001e-07, - "loss": 2.0992, - "step": 280 - }, - { - "epoch": 0.00928, - "grad_norm": 63.39164352416992, - "learning_rate": 9.28e-07, - "loss": 2.0856, - "step": 290 - }, - { - "epoch": 0.0096, - "grad_norm": 99.13253021240234, - "learning_rate": 9.600000000000001e-07, - "loss": 2.0637, - "step": 300 - }, - { - "epoch": 0.00992, - "grad_norm": 274.91937255859375, - "learning_rate": 9.92e-07, - "loss": 2.0504, - "step": 310 - }, - { - "epoch": 0.01024, - "grad_norm": 125.71237182617188, - "learning_rate": 1.024e-06, - "loss": 2.079, - "step": 320 - }, - { - "epoch": 0.01056, - "grad_norm": 61.604496002197266, - "learning_rate": 1.0560000000000001e-06, - "loss": 2.0687, - "step": 330 - }, - { - "epoch": 0.01088, - "grad_norm": 88.08313751220703, - "learning_rate": 1.088e-06, - "loss": 2.1214, - "step": 340 - }, - { - "epoch": 0.0112, - "grad_norm": 64.370849609375, - "learning_rate": 1.12e-06, - "loss": 2.082, - "step": 350 - }, - { - "epoch": 0.01152, - "grad_norm": 60.212215423583984, - "learning_rate": 1.1520000000000002e-06, - "loss": 2.0565, - "step": 360 - }, - { - "epoch": 0.01184, - "grad_norm": 130.44395446777344, - "learning_rate": 1.1840000000000002e-06, - "loss": 2.0608, - "step": 370 - }, - { - "epoch": 0.01216, - "grad_norm": 78.3271255493164, - "learning_rate": 1.216e-06, - "loss": 2.0681, - "step": 380 - }, - { - "epoch": 0.01248, - "grad_norm": 64.16400146484375, - "learning_rate": 1.248e-06, - "loss": 2.0408, - "step": 390 - }, - { - "epoch": 0.0128, - "grad_norm": 65.31056213378906, - "learning_rate": 1.28e-06, - "loss": 2.0729, - "step": 400 - }, - { - "epoch": 0.01312, - "grad_norm": 65.5149917602539, - "learning_rate": 1.3120000000000003e-06, - "loss": 2.0688, - "step": 410 - }, - { - "epoch": 0.01344, - "grad_norm": 58.871517181396484, - "learning_rate": 1.344e-06, - "loss": 2.0569, - "step": 420 - }, - { - "epoch": 0.01376, - "grad_norm": 63.139610290527344, - "learning_rate": 1.376e-06, - "loss": 2.099, - "step": 430 - }, - { - "epoch": 0.01408, - "grad_norm": 59.644832611083984, - "learning_rate": 1.4080000000000001e-06, - "loss": 2.0908, - "step": 440 - }, - { - "epoch": 0.0144, - "grad_norm": 67.80413055419922, - "learning_rate": 1.44e-06, - "loss": 2.0677, - "step": 450 - }, - { - "epoch": 0.01472, - "grad_norm": 83.1611328125, - "learning_rate": 1.472e-06, - "loss": 2.0639, - "step": 460 - }, - { - "epoch": 0.01504, - "grad_norm": 60.72829818725586, - "learning_rate": 1.5040000000000001e-06, - "loss": 2.0843, - "step": 470 - }, - { - "epoch": 0.01536, - "grad_norm": 59.140357971191406, - "learning_rate": 1.536e-06, - "loss": 2.0815, - "step": 480 - }, - { - "epoch": 0.01568, - "grad_norm": 62.182960510253906, - "learning_rate": 1.568e-06, - "loss": 2.0586, - "step": 490 - }, - { - "epoch": 0.016, - "grad_norm": 57.94507598876953, - "learning_rate": 1.6000000000000001e-06, - "loss": 2.0707, - "step": 500 - }, - { - "epoch": 0.01632, - "grad_norm": 58.95804977416992, - "learning_rate": 1.6320000000000002e-06, - "loss": 2.0829, - "step": 510 - }, - { - "epoch": 0.01664, - "grad_norm": 60.962276458740234, - "learning_rate": 1.664e-06, - "loss": 2.1051, - "step": 520 - }, - { - "epoch": 0.01696, - "grad_norm": 65.06385803222656, - "learning_rate": 1.6960000000000002e-06, - "loss": 2.0823, - "step": 530 - }, - { - "epoch": 0.01728, - "grad_norm": 97.38597869873047, - "learning_rate": 1.7280000000000002e-06, - "loss": 2.0762, - "step": 540 - }, - { - "epoch": 0.0176, - "grad_norm": 57.09391403198242, - "learning_rate": 1.76e-06, - "loss": 2.0419, - "step": 550 - }, - { - "epoch": 0.01792, - "grad_norm": 56.89313507080078, - "learning_rate": 1.7920000000000002e-06, - "loss": 2.0712, - "step": 560 - }, - { - "epoch": 0.01824, - "grad_norm": 59.043785095214844, - "learning_rate": 1.8240000000000002e-06, - "loss": 2.0587, - "step": 570 - }, - { - "epoch": 0.01856, - "grad_norm": 62.550270080566406, - "learning_rate": 1.856e-06, - "loss": 2.054, - "step": 580 - }, - { - "epoch": 0.01888, - "grad_norm": 55.80398178100586, - "learning_rate": 1.8880000000000002e-06, - "loss": 2.0576, - "step": 590 - }, - { - "epoch": 0.0192, - "grad_norm": 96.00469207763672, - "learning_rate": 1.9200000000000003e-06, - "loss": 2.0298, - "step": 600 - }, - { - "epoch": 0.01952, - "grad_norm": 57.13787841796875, - "learning_rate": 1.952e-06, - "loss": 2.0663, - "step": 610 - }, - { - "epoch": 0.01984, - "grad_norm": 77.44837951660156, - "learning_rate": 1.984e-06, - "loss": 2.0384, - "step": 620 - }, - { - "epoch": 0.02016, - "grad_norm": 55.39677429199219, - "learning_rate": 2.0160000000000003e-06, - "loss": 2.0735, - "step": 630 - }, - { - "epoch": 0.02048, - "grad_norm": 56.57113265991211, - "learning_rate": 2.048e-06, - "loss": 2.0319, - "step": 640 - }, - { - "epoch": 0.0208, - "grad_norm": 57.18354034423828, - "learning_rate": 2.08e-06, - "loss": 2.0575, - "step": 650 - }, - { - "epoch": 0.02112, - "grad_norm": 58.37870788574219, - "learning_rate": 2.1120000000000003e-06, - "loss": 2.069, - "step": 660 - }, - { - "epoch": 0.02144, - "grad_norm": 65.55963897705078, - "learning_rate": 2.144e-06, - "loss": 2.0331, - "step": 670 - }, - { - "epoch": 0.02176, - "grad_norm": 57.383968353271484, - "learning_rate": 2.176e-06, - "loss": 2.0384, - "step": 680 - }, - { - "epoch": 0.02208, - "grad_norm": 57.483943939208984, - "learning_rate": 2.2080000000000003e-06, - "loss": 2.0409, - "step": 690 - }, - { - "epoch": 0.0224, - "grad_norm": 59.38597869873047, - "learning_rate": 2.24e-06, - "loss": 2.0512, - "step": 700 - }, - { - "epoch": 0.02272, - "grad_norm": 57.73863220214844, - "learning_rate": 2.2720000000000004e-06, - "loss": 2.0403, - "step": 710 - }, - { - "epoch": 0.02304, - "grad_norm": 63.45584487915039, - "learning_rate": 2.3040000000000003e-06, - "loss": 2.0434, - "step": 720 - }, - { - "epoch": 0.02336, - "grad_norm": 100.67032623291016, - "learning_rate": 2.336e-06, - "loss": 2.0526, - "step": 730 - }, - { - "epoch": 0.02368, - "grad_norm": 57.207584381103516, - "learning_rate": 2.3680000000000005e-06, - "loss": 2.0304, - "step": 740 - }, - { - "epoch": 0.024, - "grad_norm": 56.702999114990234, - "learning_rate": 2.4000000000000003e-06, - "loss": 2.0786, - "step": 750 - }, - { - "epoch": 0.02432, - "grad_norm": 63.29140090942383, - "learning_rate": 2.432e-06, - "loss": 2.0715, - "step": 760 - }, - { - "epoch": 0.02464, - "grad_norm": 59.268550872802734, - "learning_rate": 2.4640000000000005e-06, - "loss": 2.0632, - "step": 770 - }, - { - "epoch": 0.02496, - "grad_norm": 55.43419647216797, - "learning_rate": 2.496e-06, - "loss": 2.0311, - "step": 780 - }, - { - "epoch": 0.02528, - "grad_norm": 55.27999496459961, - "learning_rate": 2.5280000000000006e-06, - "loss": 2.046, - "step": 790 - }, - { - "epoch": 0.0256, - "grad_norm": 57.40989685058594, - "learning_rate": 2.56e-06, - "loss": 2.0419, - "step": 800 - }, - { - "epoch": 0.02592, - "grad_norm": 67.16029357910156, - "learning_rate": 2.592e-06, - "loss": 2.03, - "step": 810 - }, - { - "epoch": 0.02624, - "grad_norm": 57.05144500732422, - "learning_rate": 2.6240000000000006e-06, - "loss": 2.0313, - "step": 820 - }, - { - "epoch": 0.02656, - "grad_norm": 65.95780944824219, - "learning_rate": 2.656e-06, - "loss": 2.0233, - "step": 830 - }, - { - "epoch": 0.02688, - "grad_norm": 57.77890396118164, - "learning_rate": 2.688e-06, - "loss": 1.993, - "step": 840 - }, - { - "epoch": 0.0272, - "grad_norm": 55.54998779296875, - "learning_rate": 2.7200000000000002e-06, - "loss": 2.0289, - "step": 850 - }, - { - "epoch": 0.02752, - "grad_norm": 55.96319580078125, - "learning_rate": 2.752e-06, - "loss": 2.0452, - "step": 860 - }, - { - "epoch": 0.02784, - "grad_norm": 55.67866897583008, - "learning_rate": 2.784e-06, - "loss": 2.0498, - "step": 870 - }, - { - "epoch": 0.02816, - "grad_norm": 55.35938262939453, - "learning_rate": 2.8160000000000002e-06, - "loss": 2.013, - "step": 880 - }, - { - "epoch": 0.02848, - "grad_norm": 55.288124084472656, - "learning_rate": 2.848e-06, - "loss": 2.0345, - "step": 890 - }, - { - "epoch": 0.0288, - "grad_norm": 56.8425178527832, - "learning_rate": 2.88e-06, - "loss": 2.0455, - "step": 900 - }, - { - "epoch": 0.02912, - "grad_norm": 58.22956848144531, - "learning_rate": 2.9120000000000002e-06, - "loss": 2.038, - "step": 910 - }, - { - "epoch": 0.02944, - "grad_norm": 54.9834098815918, - "learning_rate": 2.944e-06, - "loss": 2.0466, - "step": 920 - }, - { - "epoch": 0.02976, - "grad_norm": 66.05599212646484, - "learning_rate": 2.976e-06, - "loss": 2.0497, - "step": 930 - }, - { - "epoch": 0.03008, - "grad_norm": 57.55447006225586, - "learning_rate": 3.0080000000000003e-06, - "loss": 2.0273, - "step": 940 - }, - { - "epoch": 0.0304, - "grad_norm": 54.68487548828125, - "learning_rate": 3.04e-06, - "loss": 2.0367, - "step": 950 - }, - { - "epoch": 0.03072, - "grad_norm": 55.63594436645508, - "learning_rate": 3.072e-06, - "loss": 2.0618, - "step": 960 - }, - { - "epoch": 0.03104, - "grad_norm": 54.52610778808594, - "learning_rate": 3.1040000000000003e-06, - "loss": 2.0254, - "step": 970 - }, - { - "epoch": 0.03136, - "grad_norm": 55.98470687866211, - "learning_rate": 3.136e-06, - "loss": 1.9674, - "step": 980 - }, - { - "epoch": 0.03168, - "grad_norm": 53.209693908691406, - "learning_rate": 3.1680000000000004e-06, - "loss": 2.0201, - "step": 990 - }, - { - "epoch": 0.032, - "grad_norm": 57.08290100097656, - "learning_rate": 3.2000000000000003e-06, - "loss": 2.0513, - "step": 1000 - }, - { - "epoch": 0.03232, - "grad_norm": 53.92963409423828, - "learning_rate": 3.232e-06, - "loss": 2.0478, - "step": 1010 - }, - { - "epoch": 0.03264, - "grad_norm": 54.785301208496094, - "learning_rate": 3.2640000000000004e-06, - "loss": 2.0312, - "step": 1020 - }, - { - "epoch": 0.03296, - "grad_norm": 56.84122848510742, - "learning_rate": 3.2960000000000003e-06, - "loss": 2.036, - "step": 1030 - }, - { - "epoch": 0.03328, - "grad_norm": 58.097774505615234, - "learning_rate": 3.328e-06, - "loss": 2.0265, - "step": 1040 - }, - { - "epoch": 0.0336, - "grad_norm": 59.78932571411133, - "learning_rate": 3.3600000000000004e-06, - "loss": 2.0173, - "step": 1050 - }, - { - "epoch": 0.03392, - "grad_norm": 53.78879928588867, - "learning_rate": 3.3920000000000003e-06, - "loss": 2.0493, - "step": 1060 - }, - { - "epoch": 0.03424, - "grad_norm": 58.1050910949707, - "learning_rate": 3.424e-06, - "loss": 2.0251, - "step": 1070 - }, - { - "epoch": 0.03456, - "grad_norm": 54.9379768371582, - "learning_rate": 3.4560000000000005e-06, - "loss": 2.029, - "step": 1080 - }, - { - "epoch": 0.03488, - "grad_norm": 57.901493072509766, - "learning_rate": 3.4880000000000003e-06, - "loss": 1.9813, - "step": 1090 - }, - { - "epoch": 0.0352, - "grad_norm": 55.3337516784668, - "learning_rate": 3.52e-06, - "loss": 2.0027, - "step": 1100 - }, - { - "epoch": 0.03552, - "grad_norm": 57.643306732177734, - "learning_rate": 3.5520000000000005e-06, - "loss": 2.0194, - "step": 1110 - }, - { - "epoch": 0.03584, - "grad_norm": 56.8505744934082, - "learning_rate": 3.5840000000000003e-06, - "loss": 2.014, - "step": 1120 - }, - { - "epoch": 0.03616, - "grad_norm": 54.06476593017578, - "learning_rate": 3.616e-06, - "loss": 2.0378, - "step": 1130 - }, - { - "epoch": 0.03648, - "grad_norm": 57.77007293701172, - "learning_rate": 3.6480000000000005e-06, - "loss": 2.0199, - "step": 1140 - }, - { - "epoch": 0.0368, - "grad_norm": 57.76017379760742, - "learning_rate": 3.6800000000000003e-06, - "loss": 2.0117, - "step": 1150 - }, - { - "epoch": 0.03712, - "grad_norm": 55.163944244384766, - "learning_rate": 3.712e-06, - "loss": 2.0031, - "step": 1160 - }, - { - "epoch": 0.03744, - "grad_norm": 55.953636169433594, - "learning_rate": 3.7440000000000005e-06, - "loss": 1.9844, - "step": 1170 - }, - { - "epoch": 0.03776, - "grad_norm": 54.14808654785156, - "learning_rate": 3.7760000000000004e-06, - "loss": 2.0015, - "step": 1180 - }, - { - "epoch": 0.03808, - "grad_norm": 55.77479553222656, - "learning_rate": 3.8080000000000006e-06, - "loss": 2.0269, - "step": 1190 - }, - { - "epoch": 0.0384, - "grad_norm": 58.67176055908203, - "learning_rate": 3.8400000000000005e-06, - "loss": 2.0185, - "step": 1200 - }, - { - "epoch": 0.03872, - "grad_norm": 54.49531555175781, - "learning_rate": 3.872e-06, - "loss": 2.0179, - "step": 1210 - }, - { - "epoch": 0.03904, - "grad_norm": 56.10469436645508, - "learning_rate": 3.904e-06, - "loss": 1.9929, - "step": 1220 - }, - { - "epoch": 0.03936, - "grad_norm": 53.95834732055664, - "learning_rate": 3.936e-06, - "loss": 2.03, - "step": 1230 - }, - { - "epoch": 0.03968, - "grad_norm": 102.673095703125, - "learning_rate": 3.968e-06, - "loss": 2.0318, - "step": 1240 - }, - { - "epoch": 0.04, - "grad_norm": 54.45889663696289, - "learning_rate": 4.000000000000001e-06, - "loss": 1.9909, - "step": 1250 - }, - { - "epoch": 0.04032, - "grad_norm": 55.20240020751953, - "learning_rate": 4.0320000000000005e-06, - "loss": 2.0459, - "step": 1260 - }, - { - "epoch": 0.04064, - "grad_norm": 56.24109649658203, - "learning_rate": 4.064e-06, - "loss": 2.0029, - "step": 1270 - }, - { - "epoch": 0.04096, - "grad_norm": 56.00358963012695, - "learning_rate": 4.096e-06, - "loss": 1.9921, - "step": 1280 - }, - { - "epoch": 0.04128, - "grad_norm": 59.99430847167969, - "learning_rate": 4.128e-06, - "loss": 1.975, - "step": 1290 - }, - { - "epoch": 0.0416, - "grad_norm": 56.52634811401367, - "learning_rate": 4.16e-06, - "loss": 1.9956, - "step": 1300 - }, - { - "epoch": 0.04192, - "grad_norm": 53.13933181762695, - "learning_rate": 4.192000000000001e-06, - "loss": 2.0033, - "step": 1310 - }, - { - "epoch": 0.04224, - "grad_norm": 54.04265594482422, - "learning_rate": 4.2240000000000006e-06, - "loss": 2.0051, - "step": 1320 - }, - { - "epoch": 0.04256, - "grad_norm": 52.66888427734375, - "learning_rate": 4.256e-06, - "loss": 1.9798, - "step": 1330 - }, - { - "epoch": 0.04288, - "grad_norm": 53.12565994262695, - "learning_rate": 4.288e-06, - "loss": 1.9773, - "step": 1340 - }, - { - "epoch": 0.0432, - "grad_norm": 55.532291412353516, - "learning_rate": 4.32e-06, - "loss": 1.996, - "step": 1350 - }, - { - "epoch": 0.04352, - "grad_norm": 55.148712158203125, - "learning_rate": 4.352e-06, - "loss": 2.0011, - "step": 1360 - }, - { - "epoch": 0.04384, - "grad_norm": 55.9796257019043, - "learning_rate": 4.384000000000001e-06, - "loss": 1.9802, - "step": 1370 - }, - { - "epoch": 0.04416, - "grad_norm": 55.14250564575195, - "learning_rate": 4.416000000000001e-06, - "loss": 2.0021, - "step": 1380 - }, - { - "epoch": 0.04448, - "grad_norm": 57.55263137817383, - "learning_rate": 4.4480000000000004e-06, - "loss": 1.9682, - "step": 1390 - }, - { - "epoch": 0.0448, - "grad_norm": 56.29933547973633, - "learning_rate": 4.48e-06, - "loss": 1.9691, - "step": 1400 - }, - { - "epoch": 0.04512, - "grad_norm": 56.33018493652344, - "learning_rate": 4.512e-06, - "loss": 1.9934, - "step": 1410 - }, - { - "epoch": 0.04544, - "grad_norm": 56.64614486694336, - "learning_rate": 4.544000000000001e-06, - "loss": 2.0083, - "step": 1420 - }, - { - "epoch": 0.04576, - "grad_norm": 57.90870666503906, - "learning_rate": 4.576000000000001e-06, - "loss": 2.0079, - "step": 1430 - }, - { - "epoch": 0.04608, - "grad_norm": 53.97960662841797, - "learning_rate": 4.608000000000001e-06, - "loss": 2.019, - "step": 1440 - }, - { - "epoch": 0.0464, - "grad_norm": 54.856109619140625, - "learning_rate": 4.6400000000000005e-06, - "loss": 2.0008, - "step": 1450 - }, - { - "epoch": 0.04672, - "grad_norm": 54.7287483215332, - "learning_rate": 4.672e-06, - "loss": 1.9936, - "step": 1460 - }, - { - "epoch": 0.04704, - "grad_norm": 54.871925354003906, - "learning_rate": 4.704e-06, - "loss": 2.0316, - "step": 1470 - }, - { - "epoch": 0.04736, - "grad_norm": 57.25019073486328, - "learning_rate": 4.736000000000001e-06, - "loss": 1.9713, - "step": 1480 - }, - { - "epoch": 0.04768, - "grad_norm": 53.96601867675781, - "learning_rate": 4.768000000000001e-06, - "loss": 1.9888, - "step": 1490 - }, - { - "epoch": 0.048, - "grad_norm": 54.54352569580078, - "learning_rate": 4.800000000000001e-06, - "loss": 1.9706, - "step": 1500 - }, - { - "epoch": 0.04832, - "grad_norm": 56.38828659057617, - "learning_rate": 4.8320000000000005e-06, - "loss": 1.9915, - "step": 1510 - }, - { - "epoch": 0.04864, - "grad_norm": 56.09176254272461, - "learning_rate": 4.864e-06, - "loss": 1.9934, - "step": 1520 - }, - { - "epoch": 0.04896, - "grad_norm": 53.20353317260742, - "learning_rate": 4.896e-06, - "loss": 1.9913, - "step": 1530 - }, - { - "epoch": 0.04928, - "grad_norm": 55.36820983886719, - "learning_rate": 4.928000000000001e-06, - "loss": 1.9903, - "step": 1540 - }, - { - "epoch": 0.0496, - "grad_norm": 53.72861862182617, - "learning_rate": 4.960000000000001e-06, - "loss": 1.992, - "step": 1550 - }, - { - "epoch": 0.04992, - "grad_norm": 56.07704162597656, - "learning_rate": 4.992e-06, - "loss": 1.9861, - "step": 1560 - }, - { - "epoch": 0.05024, - "grad_norm": 54.132179260253906, - "learning_rate": 5.024e-06, - "loss": 2.0059, - "step": 1570 - }, - { - "epoch": 0.05056, - "grad_norm": 53.731773376464844, - "learning_rate": 5.056000000000001e-06, - "loss": 1.9821, - "step": 1580 - }, - { - "epoch": 0.05088, - "grad_norm": 54.18653869628906, - "learning_rate": 5.088000000000001e-06, - "loss": 1.981, - "step": 1590 - }, - { - "epoch": 0.0512, - "grad_norm": 55.18617248535156, - "learning_rate": 5.12e-06, - "loss": 2.0078, - "step": 1600 - }, - { - "epoch": 0.05152, - "grad_norm": 54.86219787597656, - "learning_rate": 5.152e-06, - "loss": 1.9672, - "step": 1610 - }, - { - "epoch": 0.05184, - "grad_norm": 55.19453048706055, - "learning_rate": 5.184e-06, - "loss": 1.9968, - "step": 1620 - }, - { - "epoch": 0.05216, - "grad_norm": 53.446937561035156, - "learning_rate": 5.216e-06, - "loss": 1.9704, - "step": 1630 - }, - { - "epoch": 0.05248, - "grad_norm": 54.388919830322266, - "learning_rate": 5.248000000000001e-06, - "loss": 1.9644, - "step": 1640 - }, - { - "epoch": 0.0528, - "grad_norm": 53.366363525390625, - "learning_rate": 5.28e-06, - "loss": 2.016, - "step": 1650 - }, - { - "epoch": 0.05312, - "grad_norm": 54.797367095947266, - "learning_rate": 5.312e-06, - "loss": 1.9977, - "step": 1660 - }, - { - "epoch": 0.05344, - "grad_norm": 55.27174377441406, - "learning_rate": 5.344e-06, - "loss": 1.9998, - "step": 1670 - }, - { - "epoch": 0.05376, - "grad_norm": 53.69790267944336, - "learning_rate": 5.376e-06, - "loss": 2.0032, - "step": 1680 - }, - { - "epoch": 0.05408, - "grad_norm": 54.89072799682617, - "learning_rate": 5.408e-06, - "loss": 1.9904, - "step": 1690 - }, - { - "epoch": 0.0544, - "grad_norm": 55.35121536254883, - "learning_rate": 5.4400000000000004e-06, - "loss": 1.9592, - "step": 1700 - }, - { - "epoch": 0.05472, - "grad_norm": 52.87653350830078, - "learning_rate": 5.472e-06, - "loss": 1.9727, - "step": 1710 - }, - { - "epoch": 0.05504, - "grad_norm": 55.05913543701172, - "learning_rate": 5.504e-06, - "loss": 1.9658, - "step": 1720 - }, - { - "epoch": 0.05536, - "grad_norm": 57.34429168701172, - "learning_rate": 5.536e-06, - "loss": 2.0091, - "step": 1730 - }, - { - "epoch": 0.05568, - "grad_norm": 55.97663497924805, - "learning_rate": 5.568e-06, - "loss": 1.9778, - "step": 1740 - }, - { - "epoch": 0.056, - "grad_norm": 55.70100021362305, - "learning_rate": 5.600000000000001e-06, - "loss": 1.9616, - "step": 1750 - }, - { - "epoch": 0.05632, - "grad_norm": 58.278560638427734, - "learning_rate": 5.6320000000000005e-06, - "loss": 1.9674, - "step": 1760 - }, - { - "epoch": 0.05664, - "grad_norm": 57.26869583129883, - "learning_rate": 5.664e-06, - "loss": 1.9691, - "step": 1770 - }, - { - "epoch": 0.05696, - "grad_norm": 54.13764572143555, - "learning_rate": 5.696e-06, - "loss": 1.9874, - "step": 1780 - }, - { - "epoch": 0.05728, - "grad_norm": 51.36130142211914, - "learning_rate": 5.728e-06, - "loss": 1.9858, - "step": 1790 - }, - { - "epoch": 0.0576, - "grad_norm": 55.706329345703125, - "learning_rate": 5.76e-06, - "loss": 1.9472, - "step": 1800 - }, - { - "epoch": 0.05792, - "grad_norm": 53.5456428527832, - "learning_rate": 5.792000000000001e-06, - "loss": 1.9674, - "step": 1810 - }, - { - "epoch": 0.05824, - "grad_norm": 54.37445068359375, - "learning_rate": 5.8240000000000005e-06, - "loss": 1.9978, - "step": 1820 - }, - { - "epoch": 0.05856, - "grad_norm": 54.93683624267578, - "learning_rate": 5.856e-06, - "loss": 1.9733, - "step": 1830 - }, - { - "epoch": 0.05888, - "grad_norm": 54.61006164550781, - "learning_rate": 5.888e-06, - "loss": 1.9629, - "step": 1840 - }, - { - "epoch": 0.0592, - "grad_norm": 55.023983001708984, - "learning_rate": 5.92e-06, - "loss": 2.0033, - "step": 1850 - }, - { - "epoch": 0.05952, - "grad_norm": 53.465049743652344, - "learning_rate": 5.952e-06, - "loss": 1.989, - "step": 1860 - }, - { - "epoch": 0.05984, - "grad_norm": 56.193817138671875, - "learning_rate": 5.984000000000001e-06, - "loss": 1.9739, - "step": 1870 - }, - { - "epoch": 0.06016, - "grad_norm": 54.541046142578125, - "learning_rate": 6.0160000000000005e-06, - "loss": 1.9517, - "step": 1880 - }, - { - "epoch": 0.06048, - "grad_norm": 56.678497314453125, - "learning_rate": 6.048e-06, - "loss": 1.9855, - "step": 1890 - }, - { - "epoch": 0.0608, - "grad_norm": 55.969120025634766, - "learning_rate": 6.08e-06, - "loss": 2.007, - "step": 1900 - }, - { - "epoch": 0.06112, - "grad_norm": 57.037635803222656, - "learning_rate": 6.112e-06, - "loss": 1.97, - "step": 1910 - }, - { - "epoch": 0.06144, - "grad_norm": 54.40221405029297, - "learning_rate": 6.144e-06, - "loss": 1.9743, - "step": 1920 - }, - { - "epoch": 0.06176, - "grad_norm": 53.17253494262695, - "learning_rate": 6.176000000000001e-06, - "loss": 1.9731, - "step": 1930 - }, - { - "epoch": 0.06208, - "grad_norm": 54.895198822021484, - "learning_rate": 6.2080000000000005e-06, - "loss": 1.9538, - "step": 1940 - }, - { - "epoch": 0.0624, - "grad_norm": 53.09823226928711, - "learning_rate": 6.24e-06, - "loss": 1.9552, - "step": 1950 - }, - { - "epoch": 0.06272, - "grad_norm": 56.002220153808594, - "learning_rate": 6.272e-06, - "loss": 1.9599, - "step": 1960 - }, - { - "epoch": 0.06304, - "grad_norm": 55.639400482177734, - "learning_rate": 6.304e-06, - "loss": 1.9609, - "step": 1970 - }, - { - "epoch": 0.06336, - "grad_norm": 52.89950942993164, - "learning_rate": 6.336000000000001e-06, - "loss": 1.9171, - "step": 1980 - }, - { - "epoch": 0.06368, - "grad_norm": 53.86906814575195, - "learning_rate": 6.368000000000001e-06, - "loss": 1.97, - "step": 1990 - }, - { - "epoch": 0.064, - "grad_norm": 55.6588134765625, - "learning_rate": 6.4000000000000006e-06, - "loss": 1.9761, - "step": 2000 - }, - { - "epoch": 0.06432, - "grad_norm": 53.31937026977539, - "learning_rate": 6.432e-06, - "loss": 1.9875, - "step": 2010 - }, - { - "epoch": 0.06464, - "grad_norm": 54.0553092956543, - "learning_rate": 6.464e-06, - "loss": 1.9506, - "step": 2020 - }, - { - "epoch": 0.06496, - "grad_norm": 55.71330261230469, - "learning_rate": 6.496e-06, - "loss": 1.9804, - "step": 2030 - }, - { - "epoch": 0.06528, - "grad_norm": 55.74639129638672, - "learning_rate": 6.528000000000001e-06, - "loss": 1.9477, - "step": 2040 - }, - { - "epoch": 0.0656, - "grad_norm": 52.34907913208008, - "learning_rate": 6.560000000000001e-06, - "loss": 1.9649, - "step": 2050 - }, - { - "epoch": 0.06592, - "grad_norm": 56.01731872558594, - "learning_rate": 6.592000000000001e-06, - "loss": 1.9424, - "step": 2060 - }, - { - "epoch": 0.06624, - "grad_norm": 54.96754455566406, - "learning_rate": 6.6240000000000004e-06, - "loss": 1.9472, - "step": 2070 - }, - { - "epoch": 0.06656, - "grad_norm": 54.93800735473633, - "learning_rate": 6.656e-06, - "loss": 1.9315, - "step": 2080 - }, - { - "epoch": 0.06688, - "grad_norm": 53.28120040893555, - "learning_rate": 6.688e-06, - "loss": 1.9172, - "step": 2090 - }, - { - "epoch": 0.0672, - "grad_norm": 57.81128692626953, - "learning_rate": 6.720000000000001e-06, - "loss": 1.9547, - "step": 2100 - }, - { - "epoch": 0.06752, - "grad_norm": 55.10950469970703, - "learning_rate": 6.752000000000001e-06, - "loss": 1.9546, - "step": 2110 - }, - { - "epoch": 0.06784, - "grad_norm": 57.18091583251953, - "learning_rate": 6.784000000000001e-06, - "loss": 1.9474, - "step": 2120 - }, - { - "epoch": 0.06816, - "grad_norm": 55.841949462890625, - "learning_rate": 6.8160000000000005e-06, - "loss": 1.9447, - "step": 2130 - }, - { - "epoch": 0.06848, - "grad_norm": 56.35560989379883, - "learning_rate": 6.848e-06, - "loss": 1.9712, - "step": 2140 - }, - { - "epoch": 0.0688, - "grad_norm": 53.2947883605957, - "learning_rate": 6.88e-06, - "loss": 1.9627, - "step": 2150 - }, - { - "epoch": 0.06912, - "grad_norm": 54.127017974853516, - "learning_rate": 6.912000000000001e-06, - "loss": 1.9434, - "step": 2160 - }, - { - "epoch": 0.06944, - "grad_norm": 55.26138687133789, - "learning_rate": 6.944000000000001e-06, - "loss": 1.9383, - "step": 2170 - }, - { - "epoch": 0.06976, - "grad_norm": 53.60810852050781, - "learning_rate": 6.976000000000001e-06, - "loss": 1.9546, - "step": 2180 - }, - { - "epoch": 0.07008, - "grad_norm": 59.719818115234375, - "learning_rate": 7.0080000000000005e-06, - "loss": 1.942, - "step": 2190 - }, - { - "epoch": 0.0704, - "grad_norm": 57.145912170410156, - "learning_rate": 7.04e-06, - "loss": 1.9615, - "step": 2200 - }, - { - "epoch": 0.07072, - "grad_norm": 54.45525360107422, - "learning_rate": 7.072000000000001e-06, - "loss": 1.985, - "step": 2210 - }, - { - "epoch": 0.07104, - "grad_norm": 54.832584381103516, - "learning_rate": 7.104000000000001e-06, - "loss": 1.962, - "step": 2220 - }, - { - "epoch": 0.07136, - "grad_norm": 51.92244338989258, - "learning_rate": 7.136000000000001e-06, - "loss": 1.9542, - "step": 2230 - }, - { - "epoch": 0.07168, - "grad_norm": 56.1202392578125, - "learning_rate": 7.168000000000001e-06, - "loss": 1.9922, - "step": 2240 - }, - { - "epoch": 0.072, - "grad_norm": 54.495853424072266, - "learning_rate": 7.2000000000000005e-06, - "loss": 1.9604, - "step": 2250 - }, - { - "epoch": 0.07232, - "grad_norm": 54.13912582397461, - "learning_rate": 7.232e-06, - "loss": 1.9325, - "step": 2260 - }, - { - "epoch": 0.07264, - "grad_norm": 54.35029220581055, - "learning_rate": 7.264000000000001e-06, - "loss": 1.9923, - "step": 2270 - }, - { - "epoch": 0.07296, - "grad_norm": 54.93330764770508, - "learning_rate": 7.296000000000001e-06, - "loss": 1.9505, - "step": 2280 - }, - { - "epoch": 0.07328, - "grad_norm": 57.12767028808594, - "learning_rate": 7.328000000000001e-06, - "loss": 1.914, - "step": 2290 - }, - { - "epoch": 0.0736, - "grad_norm": 55.84728240966797, - "learning_rate": 7.360000000000001e-06, - "loss": 1.9528, - "step": 2300 - }, - { - "epoch": 0.07392, - "grad_norm": 55.132205963134766, - "learning_rate": 7.3920000000000005e-06, - "loss": 1.9449, - "step": 2310 - }, - { - "epoch": 0.07424, - "grad_norm": 56.098594665527344, - "learning_rate": 7.424e-06, - "loss": 1.9802, - "step": 2320 - }, - { - "epoch": 0.07456, - "grad_norm": 56.53871154785156, - "learning_rate": 7.456000000000001e-06, - "loss": 1.9791, - "step": 2330 - }, - { - "epoch": 0.07488, - "grad_norm": 52.63264465332031, - "learning_rate": 7.488000000000001e-06, - "loss": 1.9377, - "step": 2340 - }, - { - "epoch": 0.0752, - "grad_norm": 56.09365463256836, - "learning_rate": 7.520000000000001e-06, - "loss": 1.9626, - "step": 2350 - }, - { - "epoch": 0.07552, - "grad_norm": 51.870357513427734, - "learning_rate": 7.552000000000001e-06, - "loss": 1.973, - "step": 2360 - }, - { - "epoch": 0.07584, - "grad_norm": 54.86323165893555, - "learning_rate": 7.5840000000000006e-06, - "loss": 1.9508, - "step": 2370 - }, - { - "epoch": 0.07616, - "grad_norm": 52.37346649169922, - "learning_rate": 7.616000000000001e-06, - "loss": 1.9633, - "step": 2380 - }, - { - "epoch": 0.07648, - "grad_norm": 53.62883758544922, - "learning_rate": 7.648e-06, - "loss": 1.92, - "step": 2390 - }, - { - "epoch": 0.0768, - "grad_norm": 53.85102462768555, - "learning_rate": 7.680000000000001e-06, - "loss": 1.9566, - "step": 2400 - }, - { - "epoch": 0.07712, - "grad_norm": 52.15278244018555, - "learning_rate": 7.712e-06, - "loss": 1.9368, - "step": 2410 - }, - { - "epoch": 0.07744, - "grad_norm": 54.85605239868164, - "learning_rate": 7.744e-06, - "loss": 1.9585, - "step": 2420 - }, - { - "epoch": 0.07776, - "grad_norm": 56.958553314208984, - "learning_rate": 7.776e-06, - "loss": 1.9739, - "step": 2430 - }, - { - "epoch": 0.07808, - "grad_norm": 51.912452697753906, - "learning_rate": 7.808e-06, - "loss": 1.97, - "step": 2440 - }, - { - "epoch": 0.0784, - "grad_norm": 54.37236022949219, - "learning_rate": 7.840000000000001e-06, - "loss": 1.976, - "step": 2450 - }, - { - "epoch": 0.07872, - "grad_norm": 54.4852409362793, - "learning_rate": 7.872e-06, - "loss": 1.9596, - "step": 2460 - }, - { - "epoch": 0.07904, - "grad_norm": 55.548343658447266, - "learning_rate": 7.904000000000001e-06, - "loss": 1.9475, - "step": 2470 - }, - { - "epoch": 0.07936, - "grad_norm": 53.80973434448242, - "learning_rate": 7.936e-06, - "loss": 1.9215, - "step": 2480 - }, - { - "epoch": 0.07968, - "grad_norm": 54.02414321899414, - "learning_rate": 7.968e-06, - "loss": 1.9115, - "step": 2490 - }, - { - "epoch": 0.08, - "grad_norm": 54.69728088378906, - "learning_rate": 8.000000000000001e-06, - "loss": 1.9142, - "step": 2500 - }, - { - "epoch": 0.08032, - "grad_norm": 54.138126373291016, - "learning_rate": 8.032e-06, - "loss": 1.9299, - "step": 2510 - }, - { - "epoch": 0.08064, - "grad_norm": 55.51025390625, - "learning_rate": 8.064000000000001e-06, - "loss": 1.9347, - "step": 2520 - }, - { - "epoch": 0.08096, - "grad_norm": 51.047943115234375, - "learning_rate": 8.096e-06, - "loss": 1.9486, - "step": 2530 - }, - { - "epoch": 0.08128, - "grad_norm": 56.506046295166016, - "learning_rate": 8.128e-06, - "loss": 1.9777, - "step": 2540 - }, - { - "epoch": 0.0816, - "grad_norm": 55.341896057128906, - "learning_rate": 8.16e-06, - "loss": 1.953, - "step": 2550 - }, - { - "epoch": 0.08192, - "grad_norm": 52.932979583740234, - "learning_rate": 8.192e-06, - "loss": 1.9175, - "step": 2560 - }, - { - "epoch": 0.08224, - "grad_norm": 53.52081298828125, - "learning_rate": 8.224000000000001e-06, - "loss": 1.966, - "step": 2570 - }, - { - "epoch": 0.08256, - "grad_norm": 51.49814987182617, - "learning_rate": 8.256e-06, - "loss": 1.9507, - "step": 2580 - }, - { - "epoch": 0.08288, - "grad_norm": 57.984745025634766, - "learning_rate": 8.288000000000001e-06, - "loss": 1.9256, - "step": 2590 - }, - { - "epoch": 0.0832, - "grad_norm": 53.47091293334961, - "learning_rate": 8.32e-06, - "loss": 1.9209, - "step": 2600 - }, - { - "epoch": 0.08352, - "grad_norm": 55.28806686401367, - "learning_rate": 8.352e-06, - "loss": 1.9572, - "step": 2610 - }, - { - "epoch": 0.08384, - "grad_norm": 53.81025314331055, - "learning_rate": 8.384000000000001e-06, - "loss": 1.9052, - "step": 2620 - }, - { - "epoch": 0.08416, - "grad_norm": 54.805519104003906, - "learning_rate": 8.416e-06, - "loss": 1.9068, - "step": 2630 - }, - { - "epoch": 0.08448, - "grad_norm": 55.12998580932617, - "learning_rate": 8.448000000000001e-06, - "loss": 1.9165, - "step": 2640 - }, - { - "epoch": 0.0848, - "grad_norm": 54.45305633544922, - "learning_rate": 8.48e-06, - "loss": 1.9044, - "step": 2650 - }, - { - "epoch": 0.08512, - "grad_norm": 52.86287307739258, - "learning_rate": 8.512e-06, - "loss": 1.9557, - "step": 2660 - }, - { - "epoch": 0.08544, - "grad_norm": 54.07919692993164, - "learning_rate": 8.544000000000002e-06, - "loss": 1.9357, - "step": 2670 - }, - { - "epoch": 0.08576, - "grad_norm": 53.222190856933594, - "learning_rate": 8.576e-06, - "loss": 1.9608, - "step": 2680 - }, - { - "epoch": 0.08608, - "grad_norm": 53.212642669677734, - "learning_rate": 8.608000000000001e-06, - "loss": 1.9576, - "step": 2690 - }, - { - "epoch": 0.0864, - "grad_norm": 54.31929016113281, - "learning_rate": 8.64e-06, - "loss": 1.9355, - "step": 2700 - }, - { - "epoch": 0.08672, - "grad_norm": 54.23030090332031, - "learning_rate": 8.672000000000001e-06, - "loss": 1.9085, - "step": 2710 - }, - { - "epoch": 0.08704, - "grad_norm": 54.162254333496094, - "learning_rate": 8.704e-06, - "loss": 1.9607, - "step": 2720 - }, - { - "epoch": 0.08736, - "grad_norm": 52.76661682128906, - "learning_rate": 8.736e-06, - "loss": 1.9672, - "step": 2730 - }, - { - "epoch": 0.08768, - "grad_norm": 55.04710006713867, - "learning_rate": 8.768000000000001e-06, - "loss": 1.9271, - "step": 2740 - }, - { - "epoch": 0.088, - "grad_norm": 50.674922943115234, - "learning_rate": 8.8e-06, - "loss": 1.9289, - "step": 2750 - }, - { - "epoch": 0.08832, - "grad_norm": 53.99641799926758, - "learning_rate": 8.832000000000001e-06, - "loss": 1.9284, - "step": 2760 - }, - { - "epoch": 0.08864, - "grad_norm": 54.7790412902832, - "learning_rate": 8.864e-06, - "loss": 1.9531, - "step": 2770 - }, - { - "epoch": 0.08896, - "grad_norm": 55.55375671386719, - "learning_rate": 8.896000000000001e-06, - "loss": 1.9442, - "step": 2780 - }, - { - "epoch": 0.08928, - "grad_norm": 54.156951904296875, - "learning_rate": 8.928000000000002e-06, - "loss": 1.9336, - "step": 2790 - }, - { - "epoch": 0.0896, - "grad_norm": 55.16888427734375, - "learning_rate": 8.96e-06, - "loss": 1.9637, - "step": 2800 - }, - { - "epoch": 0.08992, - "grad_norm": 53.6899528503418, - "learning_rate": 8.992000000000001e-06, - "loss": 1.9319, - "step": 2810 - }, - { - "epoch": 0.09024, - "grad_norm": 53.920284271240234, - "learning_rate": 9.024e-06, - "loss": 1.9264, - "step": 2820 - }, - { - "epoch": 0.09056, - "grad_norm": 56.052589416503906, - "learning_rate": 9.056000000000001e-06, - "loss": 1.9338, - "step": 2830 - }, - { - "epoch": 0.09088, - "grad_norm": 54.092559814453125, - "learning_rate": 9.088000000000002e-06, - "loss": 1.9374, - "step": 2840 - }, - { - "epoch": 0.0912, - "grad_norm": 54.96030044555664, - "learning_rate": 9.12e-06, - "loss": 1.9035, - "step": 2850 - }, - { - "epoch": 0.09152, - "grad_norm": 54.033260345458984, - "learning_rate": 9.152000000000001e-06, - "loss": 1.9501, - "step": 2860 - }, - { - "epoch": 0.09184, - "grad_norm": 56.32831954956055, - "learning_rate": 9.184e-06, - "loss": 1.9292, - "step": 2870 - }, - { - "epoch": 0.09216, - "grad_norm": 56.31962966918945, - "learning_rate": 9.216000000000001e-06, - "loss": 1.934, - "step": 2880 - }, - { - "epoch": 0.09248, - "grad_norm": 52.693443298339844, - "learning_rate": 9.248e-06, - "loss": 1.9015, - "step": 2890 - }, - { - "epoch": 0.0928, - "grad_norm": 58.64433670043945, - "learning_rate": 9.280000000000001e-06, - "loss": 1.9302, - "step": 2900 - }, - { - "epoch": 0.09312, - "grad_norm": 54.63075256347656, - "learning_rate": 9.312000000000002e-06, - "loss": 1.9186, - "step": 2910 - }, - { - "epoch": 0.09344, - "grad_norm": 55.12821578979492, - "learning_rate": 9.344e-06, - "loss": 1.9432, - "step": 2920 - }, - { - "epoch": 0.09376, - "grad_norm": 53.73576736450195, - "learning_rate": 9.376000000000001e-06, - "loss": 1.9453, - "step": 2930 - }, - { - "epoch": 0.09408, - "grad_norm": 54.59904098510742, - "learning_rate": 9.408e-06, - "loss": 1.9144, - "step": 2940 - }, - { - "epoch": 0.0944, - "grad_norm": 56.60734558105469, - "learning_rate": 9.440000000000001e-06, - "loss": 1.92, - "step": 2950 - }, - { - "epoch": 0.09472, - "grad_norm": 54.28753662109375, - "learning_rate": 9.472000000000002e-06, - "loss": 1.9247, - "step": 2960 - }, - { - "epoch": 0.09504, - "grad_norm": 54.17729568481445, - "learning_rate": 9.504e-06, - "loss": 1.9287, - "step": 2970 - }, - { - "epoch": 0.09536, - "grad_norm": 51.31298828125, - "learning_rate": 9.536000000000002e-06, - "loss": 1.9311, - "step": 2980 - }, - { - "epoch": 0.09568, - "grad_norm": 51.89619827270508, - "learning_rate": 9.568e-06, - "loss": 1.9267, - "step": 2990 - }, - { - "epoch": 0.096, - "grad_norm": 53.17412567138672, - "learning_rate": 9.600000000000001e-06, - "loss": 1.9588, - "step": 3000 - }, - { - "epoch": 0.09632, - "grad_norm": 54.527095794677734, - "learning_rate": 9.632e-06, - "loss": 1.9124, - "step": 3010 - }, - { - "epoch": 0.09664, - "grad_norm": 53.29350280761719, - "learning_rate": 9.664000000000001e-06, - "loss": 1.9428, - "step": 3020 - }, - { - "epoch": 0.09696, - "grad_norm": 52.03528594970703, - "learning_rate": 9.696000000000002e-06, - "loss": 1.9349, - "step": 3030 - }, - { - "epoch": 0.09728, - "grad_norm": 52.16074752807617, - "learning_rate": 9.728e-06, - "loss": 1.8923, - "step": 3040 - }, - { - "epoch": 0.0976, - "grad_norm": 54.195716857910156, - "learning_rate": 9.760000000000001e-06, - "loss": 1.948, - "step": 3050 - }, - { - "epoch": 0.09792, - "grad_norm": 53.85103988647461, - "learning_rate": 9.792e-06, - "loss": 1.9211, - "step": 3060 - }, - { - "epoch": 0.09824, - "grad_norm": 52.96208190917969, - "learning_rate": 9.824000000000001e-06, - "loss": 1.9633, - "step": 3070 - }, - { - "epoch": 0.09856, - "grad_norm": 54.462432861328125, - "learning_rate": 9.856000000000002e-06, - "loss": 1.8881, - "step": 3080 - }, - { - "epoch": 0.09888, - "grad_norm": 52.74999237060547, - "learning_rate": 9.888000000000001e-06, - "loss": 1.944, - "step": 3090 - }, - { - "epoch": 0.0992, - "grad_norm": 56.4122428894043, - "learning_rate": 9.920000000000002e-06, - "loss": 1.9364, - "step": 3100 - }, - { - "epoch": 0.09952, - "grad_norm": 51.049678802490234, - "learning_rate": 9.952e-06, - "loss": 1.9211, - "step": 3110 - }, - { - "epoch": 0.09984, - "grad_norm": 52.30768966674805, - "learning_rate": 9.984e-06, - "loss": 1.9375, - "step": 3120 - }, - { - "epoch": 0.10016, - "grad_norm": 54.005855560302734, - "learning_rate": 1.0016000000000002e-05, - "loss": 1.9178, - "step": 3130 - }, - { - "epoch": 0.10048, - "grad_norm": 55.89468765258789, - "learning_rate": 1.0048e-05, - "loss": 1.9192, - "step": 3140 - }, - { - "epoch": 0.1008, - "grad_norm": 52.7632942199707, - "learning_rate": 1.008e-05, - "loss": 1.9249, - "step": 3150 - }, - { - "epoch": 0.10112, - "grad_norm": 54.614105224609375, - "learning_rate": 1.0112000000000002e-05, - "loss": 1.9498, - "step": 3160 - }, - { - "epoch": 0.10144, - "grad_norm": 53.3930549621582, - "learning_rate": 1.0144e-05, - "loss": 1.9389, - "step": 3170 - }, - { - "epoch": 0.10176, - "grad_norm": 52.193302154541016, - "learning_rate": 1.0176000000000002e-05, - "loss": 1.9007, - "step": 3180 - }, - { - "epoch": 0.10208, - "grad_norm": 52.78200912475586, - "learning_rate": 1.0208e-05, - "loss": 1.9244, - "step": 3190 - }, - { - "epoch": 0.1024, - "grad_norm": 56.6796875, - "learning_rate": 1.024e-05, - "loss": 1.9249, - "step": 3200 - }, - { - "epoch": 0.10272, - "grad_norm": 52.93400573730469, - "learning_rate": 1.0272e-05, - "loss": 1.901, - "step": 3210 - }, - { - "epoch": 0.10304, - "grad_norm": 52.60874557495117, - "learning_rate": 1.0304e-05, - "loss": 1.8986, - "step": 3220 - }, - { - "epoch": 0.10336, - "grad_norm": 54.57966232299805, - "learning_rate": 1.0336000000000002e-05, - "loss": 1.9102, - "step": 3230 - }, - { - "epoch": 0.10368, - "grad_norm": 52.31039810180664, - "learning_rate": 1.0368e-05, - "loss": 1.9536, - "step": 3240 - }, - { - "epoch": 0.104, - "grad_norm": 53.94814682006836, - "learning_rate": 1.04e-05, - "loss": 1.9062, - "step": 3250 - }, - { - "epoch": 0.10432, - "grad_norm": 54.85002517700195, - "learning_rate": 1.0432e-05, - "loss": 1.9327, - "step": 3260 - }, - { - "epoch": 0.10464, - "grad_norm": 54.65288543701172, - "learning_rate": 1.0464e-05, - "loss": 1.9333, - "step": 3270 - }, - { - "epoch": 0.10496, - "grad_norm": 53.56013488769531, - "learning_rate": 1.0496000000000003e-05, - "loss": 1.9172, - "step": 3280 - }, - { - "epoch": 0.10528, - "grad_norm": 51.968772888183594, - "learning_rate": 1.0528e-05, - "loss": 1.9291, - "step": 3290 - }, - { - "epoch": 0.1056, - "grad_norm": 53.27824020385742, - "learning_rate": 1.056e-05, - "loss": 1.9172, - "step": 3300 - }, - { - "epoch": 0.10592, - "grad_norm": 52.052886962890625, - "learning_rate": 1.0592e-05, - "loss": 1.9086, - "step": 3310 - }, - { - "epoch": 0.10624, - "grad_norm": 57.092655181884766, - "learning_rate": 1.0624e-05, - "loss": 1.9064, - "step": 3320 - }, - { - "epoch": 0.10656, - "grad_norm": 53.775962829589844, - "learning_rate": 1.0656000000000003e-05, - "loss": 1.9032, - "step": 3330 - }, - { - "epoch": 0.10688, - "grad_norm": 54.14551544189453, - "learning_rate": 1.0688e-05, - "loss": 1.8882, - "step": 3340 - }, - { - "epoch": 0.1072, - "grad_norm": 51.43288040161133, - "learning_rate": 1.072e-05, - "loss": 1.9218, - "step": 3350 - }, - { - "epoch": 0.10752, - "grad_norm": 53.97608947753906, - "learning_rate": 1.0752e-05, - "loss": 1.9089, - "step": 3360 - }, - { - "epoch": 0.10784, - "grad_norm": 54.1445426940918, - "learning_rate": 1.0784e-05, - "loss": 1.9201, - "step": 3370 - }, - { - "epoch": 0.10816, - "grad_norm": 52.34402084350586, - "learning_rate": 1.0816e-05, - "loss": 1.921, - "step": 3380 - }, - { - "epoch": 0.10848, - "grad_norm": 53.012149810791016, - "learning_rate": 1.0848e-05, - "loss": 1.8994, - "step": 3390 - }, - { - "epoch": 0.1088, - "grad_norm": 51.83297348022461, - "learning_rate": 1.0880000000000001e-05, - "loss": 1.9445, - "step": 3400 - }, - { - "epoch": 0.10912, - "grad_norm": 54.20074462890625, - "learning_rate": 1.0912e-05, - "loss": 1.9188, - "step": 3410 - }, - { - "epoch": 0.10944, - "grad_norm": 53.536834716796875, - "learning_rate": 1.0944e-05, - "loss": 1.9129, - "step": 3420 - }, - { - "epoch": 0.10976, - "grad_norm": 55.08799362182617, - "learning_rate": 1.0976e-05, - "loss": 1.9033, - "step": 3430 - }, - { - "epoch": 0.11008, - "grad_norm": 53.32230758666992, - "learning_rate": 1.1008e-05, - "loss": 1.8888, - "step": 3440 - }, - { - "epoch": 0.1104, - "grad_norm": 51.482086181640625, - "learning_rate": 1.1040000000000001e-05, - "loss": 1.929, - "step": 3450 - }, - { - "epoch": 0.11072, - "grad_norm": 52.34898376464844, - "learning_rate": 1.1072e-05, - "loss": 1.9445, - "step": 3460 - }, - { - "epoch": 0.11104, - "grad_norm": 51.944236755371094, - "learning_rate": 1.1104e-05, - "loss": 1.9294, - "step": 3470 - }, - { - "epoch": 0.11136, - "grad_norm": 53.45691680908203, - "learning_rate": 1.1136e-05, - "loss": 1.9127, - "step": 3480 - }, - { - "epoch": 0.11168, - "grad_norm": 53.34093475341797, - "learning_rate": 1.1168e-05, - "loss": 1.9039, - "step": 3490 - }, - { - "epoch": 0.112, - "grad_norm": 53.048362731933594, - "learning_rate": 1.1200000000000001e-05, - "loss": 1.9287, - "step": 3500 - }, - { - "epoch": 0.11232, - "grad_norm": 52.95180130004883, - "learning_rate": 1.1232e-05, - "loss": 1.9277, - "step": 3510 - }, - { - "epoch": 0.11264, - "grad_norm": 51.409751892089844, - "learning_rate": 1.1264000000000001e-05, - "loss": 1.9043, - "step": 3520 - }, - { - "epoch": 0.11296, - "grad_norm": 53.68730163574219, - "learning_rate": 1.1296e-05, - "loss": 1.9118, - "step": 3530 - }, - { - "epoch": 0.11328, - "grad_norm": 54.60579299926758, - "learning_rate": 1.1328e-05, - "loss": 1.9177, - "step": 3540 - }, - { - "epoch": 0.1136, - "grad_norm": 52.81605529785156, - "learning_rate": 1.136e-05, - "loss": 1.899, - "step": 3550 - }, - { - "epoch": 0.11392, - "grad_norm": 52.89885711669922, - "learning_rate": 1.1392e-05, - "loss": 1.8706, - "step": 3560 - }, - { - "epoch": 0.11424, - "grad_norm": 53.58485794067383, - "learning_rate": 1.1424000000000001e-05, - "loss": 1.8861, - "step": 3570 - }, - { - "epoch": 0.11456, - "grad_norm": 52.32903289794922, - "learning_rate": 1.1456e-05, - "loss": 1.9004, - "step": 3580 - }, - { - "epoch": 0.11488, - "grad_norm": 51.63499450683594, - "learning_rate": 1.1488e-05, - "loss": 1.891, - "step": 3590 - }, - { - "epoch": 0.1152, - "grad_norm": 56.98868942260742, - "learning_rate": 1.152e-05, - "loss": 1.8828, - "step": 3600 - }, - { - "epoch": 0.11552, - "grad_norm": 52.2146110534668, - "learning_rate": 1.1552e-05, - "loss": 1.9317, - "step": 3610 - }, - { - "epoch": 0.11584, - "grad_norm": 53.047584533691406, - "learning_rate": 1.1584000000000001e-05, - "loss": 1.9252, - "step": 3620 - }, - { - "epoch": 0.11616, - "grad_norm": 54.36164093017578, - "learning_rate": 1.1616e-05, - "loss": 1.9031, - "step": 3630 - }, - { - "epoch": 0.11648, - "grad_norm": 51.535457611083984, - "learning_rate": 1.1648000000000001e-05, - "loss": 1.8883, - "step": 3640 - }, - { - "epoch": 0.1168, - "grad_norm": 51.89107131958008, - "learning_rate": 1.168e-05, - "loss": 1.9308, - "step": 3650 - }, - { - "epoch": 0.11712, - "grad_norm": 52.61616897583008, - "learning_rate": 1.1712e-05, - "loss": 1.904, - "step": 3660 - }, - { - "epoch": 0.11744, - "grad_norm": 54.508609771728516, - "learning_rate": 1.1744000000000001e-05, - "loss": 1.8981, - "step": 3670 - }, - { - "epoch": 0.11776, - "grad_norm": 52.905399322509766, - "learning_rate": 1.1776e-05, - "loss": 1.8918, - "step": 3680 - }, - { - "epoch": 0.11808, - "grad_norm": 54.308448791503906, - "learning_rate": 1.1808000000000001e-05, - "loss": 1.8762, - "step": 3690 - }, - { - "epoch": 0.1184, - "grad_norm": 50.295318603515625, - "learning_rate": 1.184e-05, - "loss": 1.9246, - "step": 3700 - }, - { - "epoch": 0.11872, - "grad_norm": 55.23350524902344, - "learning_rate": 1.1872000000000001e-05, - "loss": 1.9205, - "step": 3710 - }, - { - "epoch": 0.11904, - "grad_norm": 52.099143981933594, - "learning_rate": 1.1904e-05, - "loss": 1.9082, - "step": 3720 - }, - { - "epoch": 0.11936, - "grad_norm": 52.80199432373047, - "learning_rate": 1.1936e-05, - "loss": 1.8809, - "step": 3730 - }, - { - "epoch": 0.11968, - "grad_norm": 50.771202087402344, - "learning_rate": 1.1968000000000001e-05, - "loss": 1.892, - "step": 3740 - }, - { - "epoch": 0.12, - "grad_norm": 52.11357879638672, - "learning_rate": 1.2e-05, - "loss": 1.9049, - "step": 3750 - }, - { - "epoch": 0.12032, - "grad_norm": 53.48591613769531, - "learning_rate": 1.2032000000000001e-05, - "loss": 1.874, - "step": 3760 - }, - { - "epoch": 0.12064, - "grad_norm": 52.847015380859375, - "learning_rate": 1.2064e-05, - "loss": 1.8921, - "step": 3770 - }, - { - "epoch": 0.12096, - "grad_norm": 53.891414642333984, - "learning_rate": 1.2096e-05, - "loss": 1.9263, - "step": 3780 - }, - { - "epoch": 0.12128, - "grad_norm": 53.7008171081543, - "learning_rate": 1.2128000000000001e-05, - "loss": 1.8968, - "step": 3790 - }, - { - "epoch": 0.1216, - "grad_norm": 56.37105178833008, - "learning_rate": 1.216e-05, - "loss": 1.8804, - "step": 3800 - }, - { - "epoch": 0.12192, - "grad_norm": 49.93663024902344, - "learning_rate": 1.2192000000000001e-05, - "loss": 1.8962, - "step": 3810 - }, - { - "epoch": 0.12224, - "grad_norm": 53.451927185058594, - "learning_rate": 1.2224e-05, - "loss": 1.8972, - "step": 3820 - }, - { - "epoch": 0.12256, - "grad_norm": 51.564979553222656, - "learning_rate": 1.2256000000000001e-05, - "loss": 1.8889, - "step": 3830 - }, - { - "epoch": 0.12288, - "grad_norm": 54.76313400268555, - "learning_rate": 1.2288e-05, - "loss": 1.8864, - "step": 3840 - }, - { - "epoch": 0.1232, - "grad_norm": 52.7327880859375, - "learning_rate": 1.232e-05, - "loss": 1.9118, - "step": 3850 - }, - { - "epoch": 0.12352, - "grad_norm": 54.4197883605957, - "learning_rate": 1.2352000000000001e-05, - "loss": 1.8859, - "step": 3860 - }, - { - "epoch": 0.12384, - "grad_norm": 52.1079216003418, - "learning_rate": 1.2384e-05, - "loss": 1.919, - "step": 3870 - }, - { - "epoch": 0.12416, - "grad_norm": 51.248836517333984, - "learning_rate": 1.2416000000000001e-05, - "loss": 1.911, - "step": 3880 - }, - { - "epoch": 0.12448, - "grad_norm": 52.00767517089844, - "learning_rate": 1.2448e-05, - "loss": 1.8667, - "step": 3890 - }, - { - "epoch": 0.1248, - "grad_norm": 53.36152267456055, - "learning_rate": 1.248e-05, - "loss": 1.8689, - "step": 3900 - }, - { - "epoch": 0.12512, - "grad_norm": 54.70954895019531, - "learning_rate": 1.2512000000000002e-05, - "loss": 1.9175, - "step": 3910 - }, - { - "epoch": 0.12544, - "grad_norm": 53.92921829223633, - "learning_rate": 1.2544e-05, - "loss": 1.9274, - "step": 3920 - }, - { - "epoch": 0.12576, - "grad_norm": 53.22060012817383, - "learning_rate": 1.2576000000000001e-05, - "loss": 1.8985, - "step": 3930 - }, - { - "epoch": 0.12608, - "grad_norm": 53.11907958984375, - "learning_rate": 1.2608e-05, - "loss": 1.9053, - "step": 3940 - }, - { - "epoch": 0.1264, - "grad_norm": 56.41827392578125, - "learning_rate": 1.2640000000000001e-05, - "loss": 1.9134, - "step": 3950 - }, - { - "epoch": 0.12672, - "grad_norm": 53.10688018798828, - "learning_rate": 1.2672000000000002e-05, - "loss": 1.8679, - "step": 3960 - }, - { - "epoch": 0.12704, - "grad_norm": 54.76327133178711, - "learning_rate": 1.2704e-05, - "loss": 1.88, - "step": 3970 - }, - { - "epoch": 0.12736, - "grad_norm": 51.2487907409668, - "learning_rate": 1.2736000000000001e-05, - "loss": 1.8733, - "step": 3980 - }, - { - "epoch": 0.12768, - "grad_norm": 54.9959602355957, - "learning_rate": 1.2768e-05, - "loss": 1.9171, - "step": 3990 - }, - { - "epoch": 0.128, - "grad_norm": 51.75206756591797, - "learning_rate": 1.2800000000000001e-05, - "loss": 1.9109, - "step": 4000 - }, - { - "epoch": 0.12832, - "grad_norm": 52.249847412109375, - "learning_rate": 1.2832e-05, - "loss": 1.8478, - "step": 4010 - }, - { - "epoch": 0.12864, - "grad_norm": 51.156700134277344, - "learning_rate": 1.2864e-05, - "loss": 1.8795, - "step": 4020 - }, - { - "epoch": 0.12896, - "grad_norm": 52.112060546875, - "learning_rate": 1.2896000000000002e-05, - "loss": 1.8716, - "step": 4030 - }, - { - "epoch": 0.12928, - "grad_norm": 53.839229583740234, - "learning_rate": 1.2928e-05, - "loss": 1.9113, - "step": 4040 - }, - { - "epoch": 0.1296, - "grad_norm": 50.280059814453125, - "learning_rate": 1.2960000000000001e-05, - "loss": 1.9032, - "step": 4050 - }, - { - "epoch": 0.12992, - "grad_norm": 55.25138473510742, - "learning_rate": 1.2992e-05, - "loss": 1.9209, - "step": 4060 - }, - { - "epoch": 0.13024, - "grad_norm": 52.41676330566406, - "learning_rate": 1.3024000000000001e-05, - "loss": 1.8936, - "step": 4070 - }, - { - "epoch": 0.13056, - "grad_norm": 53.2076416015625, - "learning_rate": 1.3056000000000002e-05, - "loss": 1.917, - "step": 4080 - }, - { - "epoch": 0.13088, - "grad_norm": 53.43117141723633, - "learning_rate": 1.3088e-05, - "loss": 1.8803, - "step": 4090 - }, - { - "epoch": 0.1312, - "grad_norm": 50.76637268066406, - "learning_rate": 1.3120000000000001e-05, - "loss": 1.9023, - "step": 4100 - }, - { - "epoch": 0.13152, - "grad_norm": 51.317291259765625, - "learning_rate": 1.3152e-05, - "loss": 1.9109, - "step": 4110 - }, - { - "epoch": 0.13184, - "grad_norm": 52.48469924926758, - "learning_rate": 1.3184000000000001e-05, - "loss": 1.8672, - "step": 4120 - }, - { - "epoch": 0.13216, - "grad_norm": 52.752708435058594, - "learning_rate": 1.3216000000000002e-05, - "loss": 1.9122, - "step": 4130 - }, - { - "epoch": 0.13248, - "grad_norm": 53.35075378417969, - "learning_rate": 1.3248000000000001e-05, - "loss": 1.888, - "step": 4140 - }, - { - "epoch": 0.1328, - "grad_norm": 54.060489654541016, - "learning_rate": 1.3280000000000002e-05, - "loss": 1.8922, - "step": 4150 - }, - { - "epoch": 0.13312, - "grad_norm": 50.84089660644531, - "learning_rate": 1.3312e-05, - "loss": 1.8884, - "step": 4160 - }, - { - "epoch": 0.13344, - "grad_norm": 52.881195068359375, - "learning_rate": 1.3344000000000001e-05, - "loss": 1.8952, - "step": 4170 - }, - { - "epoch": 0.13376, - "grad_norm": 52.58907699584961, - "learning_rate": 1.3376e-05, - "loss": 1.8752, - "step": 4180 - }, - { - "epoch": 0.13408, - "grad_norm": 53.9188346862793, - "learning_rate": 1.3408000000000001e-05, - "loss": 1.8764, - "step": 4190 - }, - { - "epoch": 0.1344, - "grad_norm": 51.16621780395508, - "learning_rate": 1.3440000000000002e-05, - "loss": 1.867, - "step": 4200 - }, - { - "epoch": 0.13472, - "grad_norm": 48.84093475341797, - "learning_rate": 1.3472e-05, - "loss": 1.8732, - "step": 4210 - }, - { - "epoch": 0.13504, - "grad_norm": 52.254451751708984, - "learning_rate": 1.3504000000000001e-05, - "loss": 1.9032, - "step": 4220 - }, - { - "epoch": 0.13536, - "grad_norm": 54.642391204833984, - "learning_rate": 1.3536e-05, - "loss": 1.8666, - "step": 4230 - }, - { - "epoch": 0.13568, - "grad_norm": 53.06638717651367, - "learning_rate": 1.3568000000000001e-05, - "loss": 1.8792, - "step": 4240 - }, - { - "epoch": 0.136, - "grad_norm": 52.53422927856445, - "learning_rate": 1.3600000000000002e-05, - "loss": 1.889, - "step": 4250 - }, - { - "epoch": 0.13632, - "grad_norm": 53.5844612121582, - "learning_rate": 1.3632000000000001e-05, - "loss": 1.8908, - "step": 4260 - }, - { - "epoch": 0.13664, - "grad_norm": 52.1329345703125, - "learning_rate": 1.3664000000000002e-05, - "loss": 1.8925, - "step": 4270 - }, - { - "epoch": 0.13696, - "grad_norm": 52.22524642944336, - "learning_rate": 1.3696e-05, - "loss": 1.8654, - "step": 4280 - }, - { - "epoch": 0.13728, - "grad_norm": 53.75664138793945, - "learning_rate": 1.3728000000000001e-05, - "loss": 1.8558, - "step": 4290 - }, - { - "epoch": 0.1376, - "grad_norm": 55.64548110961914, - "learning_rate": 1.376e-05, - "loss": 1.878, - "step": 4300 - }, - { - "epoch": 0.13792, - "grad_norm": 53.45896911621094, - "learning_rate": 1.3792000000000001e-05, - "loss": 1.8806, - "step": 4310 - }, - { - "epoch": 0.13824, - "grad_norm": 52.175533294677734, - "learning_rate": 1.3824000000000002e-05, - "loss": 1.8848, - "step": 4320 - }, - { - "epoch": 0.13856, - "grad_norm": 50.984527587890625, - "learning_rate": 1.3856e-05, - "loss": 1.8641, - "step": 4330 - }, - { - "epoch": 0.13888, - "grad_norm": 54.03857421875, - "learning_rate": 1.3888000000000002e-05, - "loss": 1.8907, - "step": 4340 - }, - { - "epoch": 0.1392, - "grad_norm": 51.122833251953125, - "learning_rate": 1.392e-05, - "loss": 1.9, - "step": 4350 - }, - { - "epoch": 0.13952, - "grad_norm": 49.632469177246094, - "learning_rate": 1.3952000000000001e-05, - "loss": 1.8702, - "step": 4360 - }, - { - "epoch": 0.13984, - "grad_norm": 51.28369140625, - "learning_rate": 1.3984000000000002e-05, - "loss": 1.8763, - "step": 4370 - }, - { - "epoch": 0.14016, - "grad_norm": 50.211151123046875, - "learning_rate": 1.4016000000000001e-05, - "loss": 1.85, - "step": 4380 - }, - { - "epoch": 0.14048, - "grad_norm": 51.59481430053711, - "learning_rate": 1.4048000000000002e-05, - "loss": 1.8771, - "step": 4390 - }, - { - "epoch": 0.1408, - "grad_norm": 52.014102935791016, - "learning_rate": 1.408e-05, - "loss": 1.872, - "step": 4400 - }, - { - "epoch": 0.14112, - "grad_norm": 55.16132354736328, - "learning_rate": 1.4112000000000001e-05, - "loss": 1.8373, - "step": 4410 - }, - { - "epoch": 0.14144, - "grad_norm": 54.09187316894531, - "learning_rate": 1.4144000000000002e-05, - "loss": 1.8705, - "step": 4420 - }, - { - "epoch": 0.14176, - "grad_norm": 52.59539031982422, - "learning_rate": 1.4176000000000001e-05, - "loss": 1.8561, - "step": 4430 - }, - { - "epoch": 0.14208, - "grad_norm": 49.13827133178711, - "learning_rate": 1.4208000000000002e-05, - "loss": 1.8817, - "step": 4440 - }, - { - "epoch": 0.1424, - "grad_norm": 51.36490249633789, - "learning_rate": 1.4240000000000001e-05, - "loss": 1.8702, - "step": 4450 - }, - { - "epoch": 0.14272, - "grad_norm": 50.50772476196289, - "learning_rate": 1.4272000000000002e-05, - "loss": 1.8779, - "step": 4460 - }, - { - "epoch": 0.14304, - "grad_norm": 53.97172546386719, - "learning_rate": 1.4304e-05, - "loss": 1.9014, - "step": 4470 - }, - { - "epoch": 0.14336, - "grad_norm": 52.78620910644531, - "learning_rate": 1.4336000000000001e-05, - "loss": 1.9061, - "step": 4480 - }, - { - "epoch": 0.14368, - "grad_norm": 51.83744430541992, - "learning_rate": 1.4368000000000002e-05, - "loss": 1.904, - "step": 4490 - }, - { - "epoch": 0.144, - "grad_norm": 52.39995193481445, - "learning_rate": 1.4400000000000001e-05, - "loss": 1.8681, - "step": 4500 - }, - { - "epoch": 0.14432, - "grad_norm": 54.1072998046875, - "learning_rate": 1.4432000000000002e-05, - "loss": 1.9006, - "step": 4510 - }, - { - "epoch": 0.14464, - "grad_norm": 50.79708480834961, - "learning_rate": 1.4464e-05, - "loss": 1.8475, - "step": 4520 - }, - { - "epoch": 0.14496, - "grad_norm": 52.832576751708984, - "learning_rate": 1.4496000000000001e-05, - "loss": 1.8898, - "step": 4530 - }, - { - "epoch": 0.14528, - "grad_norm": 52.59731674194336, - "learning_rate": 1.4528000000000002e-05, - "loss": 1.8694, - "step": 4540 - }, - { - "epoch": 0.1456, - "grad_norm": 53.897300720214844, - "learning_rate": 1.4560000000000001e-05, - "loss": 1.8701, - "step": 4550 - }, - { - "epoch": 0.14592, - "grad_norm": 51.246253967285156, - "learning_rate": 1.4592000000000002e-05, - "loss": 1.8392, - "step": 4560 - }, - { - "epoch": 0.14624, - "grad_norm": 51.34325408935547, - "learning_rate": 1.4624000000000001e-05, - "loss": 1.8767, - "step": 4570 - }, - { - "epoch": 0.14656, - "grad_norm": 52.48518371582031, - "learning_rate": 1.4656000000000002e-05, - "loss": 1.8632, - "step": 4580 - }, - { - "epoch": 0.14688, - "grad_norm": 52.77693557739258, - "learning_rate": 1.4688000000000002e-05, - "loss": 1.855, - "step": 4590 - }, - { - "epoch": 0.1472, - "grad_norm": 51.66023635864258, - "learning_rate": 1.4720000000000001e-05, - "loss": 1.888, - "step": 4600 - }, - { - "epoch": 0.14752, - "grad_norm": 52.59580612182617, - "learning_rate": 1.4752000000000002e-05, - "loss": 1.8731, - "step": 4610 - }, - { - "epoch": 0.14784, - "grad_norm": 50.908451080322266, - "learning_rate": 1.4784000000000001e-05, - "loss": 1.9071, - "step": 4620 - }, - { - "epoch": 0.14816, - "grad_norm": 54.34564208984375, - "learning_rate": 1.4816000000000002e-05, - "loss": 1.8867, - "step": 4630 - }, - { - "epoch": 0.14848, - "grad_norm": 52.151248931884766, - "learning_rate": 1.4848e-05, - "loss": 1.8917, - "step": 4640 - }, - { - "epoch": 0.1488, - "grad_norm": 51.90532684326172, - "learning_rate": 1.4880000000000002e-05, - "loss": 1.8505, - "step": 4650 - }, - { - "epoch": 0.14912, - "grad_norm": 53.09203338623047, - "learning_rate": 1.4912000000000002e-05, - "loss": 1.8405, - "step": 4660 - }, - { - "epoch": 0.14944, - "grad_norm": 50.767364501953125, - "learning_rate": 1.4944000000000001e-05, - "loss": 1.8498, - "step": 4670 - }, - { - "epoch": 0.14976, - "grad_norm": 55.40914535522461, - "learning_rate": 1.4976000000000002e-05, - "loss": 1.8578, - "step": 4680 - }, - { - "epoch": 0.15008, - "grad_norm": 50.39368438720703, - "learning_rate": 1.5008000000000001e-05, - "loss": 1.8433, - "step": 4690 - }, - { - "epoch": 0.1504, - "grad_norm": 50.388824462890625, - "learning_rate": 1.5040000000000002e-05, - "loss": 1.8765, - "step": 4700 - }, - { - "epoch": 0.15072, - "grad_norm": 52.96123504638672, - "learning_rate": 1.5072000000000002e-05, - "loss": 1.8703, - "step": 4710 - }, - { - "epoch": 0.15104, - "grad_norm": 54.47455978393555, - "learning_rate": 1.5104000000000001e-05, - "loss": 1.9063, - "step": 4720 - }, - { - "epoch": 0.15136, - "grad_norm": 53.61328887939453, - "learning_rate": 1.5136000000000002e-05, - "loss": 1.8588, - "step": 4730 - }, - { - "epoch": 0.15168, - "grad_norm": 53.73160934448242, - "learning_rate": 1.5168000000000001e-05, - "loss": 1.8677, - "step": 4740 - }, - { - "epoch": 0.152, - "grad_norm": 52.09059524536133, - "learning_rate": 1.5200000000000002e-05, - "loss": 1.8523, - "step": 4750 - }, - { - "epoch": 0.15232, - "grad_norm": 51.840579986572266, - "learning_rate": 1.5232000000000003e-05, - "loss": 1.8375, - "step": 4760 - }, - { - "epoch": 0.15264, - "grad_norm": 51.018516540527344, - "learning_rate": 1.5264e-05, - "loss": 1.8733, - "step": 4770 - }, - { - "epoch": 0.15296, - "grad_norm": 51.18205261230469, - "learning_rate": 1.5296e-05, - "loss": 1.8648, - "step": 4780 - }, - { - "epoch": 0.15328, - "grad_norm": 52.53403091430664, - "learning_rate": 1.5328e-05, - "loss": 1.8639, - "step": 4790 - }, - { - "epoch": 0.1536, - "grad_norm": 49.89756774902344, - "learning_rate": 1.5360000000000002e-05, - "loss": 1.8552, - "step": 4800 - }, - { - "epoch": 0.15392, - "grad_norm": 50.81180191040039, - "learning_rate": 1.5392e-05, - "loss": 1.8875, - "step": 4810 - }, - { - "epoch": 0.15424, - "grad_norm": 50.676231384277344, - "learning_rate": 1.5424e-05, - "loss": 1.865, - "step": 4820 - }, - { - "epoch": 0.15456, - "grad_norm": 51.839927673339844, - "learning_rate": 1.5456000000000002e-05, - "loss": 1.8921, - "step": 4830 - }, - { - "epoch": 0.15488, - "grad_norm": 53.45131301879883, - "learning_rate": 1.5488e-05, - "loss": 1.8541, - "step": 4840 - }, - { - "epoch": 0.1552, - "grad_norm": 51.4936637878418, - "learning_rate": 1.552e-05, - "loss": 1.878, - "step": 4850 - }, - { - "epoch": 0.15552, - "grad_norm": 51.89486312866211, - "learning_rate": 1.5552e-05, - "loss": 1.8731, - "step": 4860 - }, - { - "epoch": 0.15584, - "grad_norm": 53.06976318359375, - "learning_rate": 1.5584000000000002e-05, - "loss": 1.9073, - "step": 4870 - }, - { - "epoch": 0.15616, - "grad_norm": 50.86164855957031, - "learning_rate": 1.5616e-05, - "loss": 1.8745, - "step": 4880 - }, - { - "epoch": 0.15648, - "grad_norm": 49.303367614746094, - "learning_rate": 1.5648e-05, - "loss": 1.8539, - "step": 4890 - }, - { - "epoch": 0.1568, - "grad_norm": 52.93231201171875, - "learning_rate": 1.5680000000000002e-05, - "loss": 1.868, - "step": 4900 - }, - { - "epoch": 0.15712, - "grad_norm": 51.90977478027344, - "learning_rate": 1.5712e-05, - "loss": 1.8691, - "step": 4910 - }, - { - "epoch": 0.15744, - "grad_norm": 52.30866622924805, - "learning_rate": 1.5744e-05, - "loss": 1.8875, - "step": 4920 - }, - { - "epoch": 0.15776, - "grad_norm": 52.9408073425293, - "learning_rate": 1.5776e-05, - "loss": 1.8858, - "step": 4930 - }, - { - "epoch": 0.15808, - "grad_norm": 54.67021560668945, - "learning_rate": 1.5808000000000002e-05, - "loss": 1.8601, - "step": 4940 - }, - { - "epoch": 0.1584, - "grad_norm": 51.81747055053711, - "learning_rate": 1.584e-05, - "loss": 1.8401, - "step": 4950 - }, - { - "epoch": 0.15872, - "grad_norm": 49.072303771972656, - "learning_rate": 1.5872e-05, - "loss": 1.8616, - "step": 4960 - }, - { - "epoch": 0.15904, - "grad_norm": 51.137847900390625, - "learning_rate": 1.5904000000000002e-05, - "loss": 1.858, - "step": 4970 - }, - { - "epoch": 0.15936, - "grad_norm": 52.146732330322266, - "learning_rate": 1.5936e-05, - "loss": 1.8621, - "step": 4980 - }, - { - "epoch": 0.15968, - "grad_norm": 52.12568664550781, - "learning_rate": 1.5968e-05, - "loss": 1.8653, - "step": 4990 - }, - { - "epoch": 0.16, - "grad_norm": 50.79985809326172, - "learning_rate": 1.6000000000000003e-05, - "loss": 1.8586, - "step": 5000 - }, - { - "epoch": 0.16032, - "grad_norm": 51.26686477661133, - "learning_rate": 1.6032e-05, - "loss": 1.8785, - "step": 5010 - }, - { - "epoch": 0.16064, - "grad_norm": 52.268218994140625, - "learning_rate": 1.6064e-05, - "loss": 1.8588, - "step": 5020 - }, - { - "epoch": 0.16096, - "grad_norm": 52.18198776245117, - "learning_rate": 1.6096e-05, - "loss": 1.854, - "step": 5030 - }, - { - "epoch": 0.16128, - "grad_norm": 51.68186569213867, - "learning_rate": 1.6128000000000002e-05, - "loss": 1.8735, - "step": 5040 - }, - { - "epoch": 0.1616, - "grad_norm": 50.604740142822266, - "learning_rate": 1.616e-05, - "loss": 1.8441, - "step": 5050 - }, - { - "epoch": 0.16192, - "grad_norm": 50.60602569580078, - "learning_rate": 1.6192e-05, - "loss": 1.8835, - "step": 5060 - }, - { - "epoch": 0.16224, - "grad_norm": 51.33891677856445, - "learning_rate": 1.6224000000000003e-05, - "loss": 1.8837, - "step": 5070 - }, - { - "epoch": 0.16256, - "grad_norm": 48.54633331298828, - "learning_rate": 1.6256e-05, - "loss": 1.8548, - "step": 5080 - }, - { - "epoch": 0.16288, - "grad_norm": 48.90038299560547, - "learning_rate": 1.6288e-05, - "loss": 1.8318, - "step": 5090 - }, - { - "epoch": 0.1632, - "grad_norm": 50.23739242553711, - "learning_rate": 1.632e-05, - "loss": 1.8365, - "step": 5100 - }, - { - "epoch": 0.16352, - "grad_norm": 49.558319091796875, - "learning_rate": 1.6352000000000002e-05, - "loss": 1.8786, - "step": 5110 - }, - { - "epoch": 0.16384, - "grad_norm": 52.7110481262207, - "learning_rate": 1.6384e-05, - "loss": 1.8586, - "step": 5120 - }, - { - "epoch": 0.16416, - "grad_norm": 50.92003631591797, - "learning_rate": 1.6416e-05, - "loss": 1.8429, - "step": 5130 - }, - { - "epoch": 0.16448, - "grad_norm": 50.95296096801758, - "learning_rate": 1.6448000000000002e-05, - "loss": 1.8639, - "step": 5140 - }, - { - "epoch": 0.1648, - "grad_norm": 52.3206901550293, - "learning_rate": 1.648e-05, - "loss": 1.8414, - "step": 5150 - }, - { - "epoch": 0.16512, - "grad_norm": 51.82664489746094, - "learning_rate": 1.6512e-05, - "loss": 1.8899, - "step": 5160 - }, - { - "epoch": 0.16544, - "grad_norm": 51.14692687988281, - "learning_rate": 1.6544000000000003e-05, - "loss": 1.8878, - "step": 5170 - }, - { - "epoch": 0.16576, - "grad_norm": 49.87936782836914, - "learning_rate": 1.6576000000000002e-05, - "loss": 1.863, - "step": 5180 - }, - { - "epoch": 0.16608, - "grad_norm": 53.458229064941406, - "learning_rate": 1.6608e-05, - "loss": 1.876, - "step": 5190 - }, - { - "epoch": 0.1664, - "grad_norm": 49.172882080078125, - "learning_rate": 1.664e-05, - "loss": 1.8608, - "step": 5200 - }, - { - "epoch": 0.16672, - "grad_norm": 52.48230743408203, - "learning_rate": 1.6672000000000002e-05, - "loss": 1.8329, - "step": 5210 - }, - { - "epoch": 0.16704, - "grad_norm": 50.219295501708984, - "learning_rate": 1.6704e-05, - "loss": 1.8544, - "step": 5220 - }, - { - "epoch": 0.16736, - "grad_norm": 52.36051559448242, - "learning_rate": 1.6736e-05, - "loss": 1.8895, - "step": 5230 - }, - { - "epoch": 0.16768, - "grad_norm": 50.70857620239258, - "learning_rate": 1.6768000000000003e-05, - "loss": 1.8861, - "step": 5240 - }, - { - "epoch": 0.168, - "grad_norm": 53.20645523071289, - "learning_rate": 1.6800000000000002e-05, - "loss": 1.8769, - "step": 5250 - }, - { - "epoch": 0.16832, - "grad_norm": 50.887107849121094, - "learning_rate": 1.6832e-05, - "loss": 1.8503, - "step": 5260 - }, - { - "epoch": 0.16864, - "grad_norm": 52.35952377319336, - "learning_rate": 1.6864e-05, - "loss": 1.8513, - "step": 5270 - }, - { - "epoch": 0.16896, - "grad_norm": 50.2011833190918, - "learning_rate": 1.6896000000000002e-05, - "loss": 1.8662, - "step": 5280 - }, - { - "epoch": 0.16928, - "grad_norm": 49.79100799560547, - "learning_rate": 1.6928e-05, - "loss": 1.8815, - "step": 5290 - }, - { - "epoch": 0.1696, - "grad_norm": 52.19636154174805, - "learning_rate": 1.696e-05, - "loss": 1.8728, - "step": 5300 - }, - { - "epoch": 0.16992, - "grad_norm": 51.62786865234375, - "learning_rate": 1.6992000000000003e-05, - "loss": 1.8465, - "step": 5310 - }, - { - "epoch": 0.17024, - "grad_norm": 51.08530044555664, - "learning_rate": 1.7024e-05, - "loss": 1.8378, - "step": 5320 - }, - { - "epoch": 0.17056, - "grad_norm": 52.85392379760742, - "learning_rate": 1.7056e-05, - "loss": 1.8521, - "step": 5330 - }, - { - "epoch": 0.17088, - "grad_norm": 53.09284973144531, - "learning_rate": 1.7088000000000003e-05, - "loss": 1.8643, - "step": 5340 - }, - { - "epoch": 0.1712, - "grad_norm": 50.43644332885742, - "learning_rate": 1.7120000000000002e-05, - "loss": 1.8455, - "step": 5350 - }, - { - "epoch": 0.17152, - "grad_norm": 48.954288482666016, - "learning_rate": 1.7152e-05, - "loss": 1.8727, - "step": 5360 - }, - { - "epoch": 0.17184, - "grad_norm": 49.91568374633789, - "learning_rate": 1.7184e-05, - "loss": 1.8408, - "step": 5370 - }, - { - "epoch": 0.17216, - "grad_norm": 52.08123016357422, - "learning_rate": 1.7216000000000003e-05, - "loss": 1.863, - "step": 5380 - }, - { - "epoch": 0.17248, - "grad_norm": 50.53387451171875, - "learning_rate": 1.7248e-05, - "loss": 1.8408, - "step": 5390 - }, - { - "epoch": 0.1728, - "grad_norm": 48.23560333251953, - "learning_rate": 1.728e-05, - "loss": 1.8469, - "step": 5400 - }, - { - "epoch": 0.17312, - "grad_norm": 50.42172622680664, - "learning_rate": 1.7312000000000003e-05, - "loss": 1.8425, - "step": 5410 - }, - { - "epoch": 0.17344, - "grad_norm": 54.85725784301758, - "learning_rate": 1.7344000000000002e-05, - "loss": 1.832, - "step": 5420 - }, - { - "epoch": 0.17376, - "grad_norm": 50.49094772338867, - "learning_rate": 1.7376e-05, - "loss": 1.8302, - "step": 5430 - }, - { - "epoch": 0.17408, - "grad_norm": 49.896392822265625, - "learning_rate": 1.7408e-05, - "loss": 1.8576, - "step": 5440 - }, - { - "epoch": 0.1744, - "grad_norm": 51.21379089355469, - "learning_rate": 1.7440000000000002e-05, - "loss": 1.8592, - "step": 5450 - }, - { - "epoch": 0.17472, - "grad_norm": 49.87135696411133, - "learning_rate": 1.7472e-05, - "loss": 1.8228, - "step": 5460 - }, - { - "epoch": 0.17504, - "grad_norm": 51.83329772949219, - "learning_rate": 1.7504e-05, - "loss": 1.8362, - "step": 5470 - }, - { - "epoch": 0.17536, - "grad_norm": 50.30447006225586, - "learning_rate": 1.7536000000000003e-05, - "loss": 1.8256, - "step": 5480 - }, - { - "epoch": 0.17568, - "grad_norm": 54.67784118652344, - "learning_rate": 1.7568000000000002e-05, - "loss": 1.8355, - "step": 5490 - }, - { - "epoch": 0.176, - "grad_norm": 50.64011764526367, - "learning_rate": 1.76e-05, - "loss": 1.8306, - "step": 5500 - }, - { - "epoch": 0.17632, - "grad_norm": 50.306182861328125, - "learning_rate": 1.7632000000000003e-05, - "loss": 1.8325, - "step": 5510 - }, - { - "epoch": 0.17664, - "grad_norm": 51.00660705566406, - "learning_rate": 1.7664000000000002e-05, - "loss": 1.8385, - "step": 5520 - }, - { - "epoch": 0.17696, - "grad_norm": 50.2363395690918, - "learning_rate": 1.7696e-05, - "loss": 1.8445, - "step": 5530 - }, - { - "epoch": 0.17728, - "grad_norm": 49.31149673461914, - "learning_rate": 1.7728e-05, - "loss": 1.8328, - "step": 5540 - }, - { - "epoch": 0.1776, - "grad_norm": 50.22332000732422, - "learning_rate": 1.7760000000000003e-05, - "loss": 1.8329, - "step": 5550 - }, - { - "epoch": 0.17792, - "grad_norm": 51.147701263427734, - "learning_rate": 1.7792000000000002e-05, - "loss": 1.8246, - "step": 5560 - }, - { - "epoch": 0.17824, - "grad_norm": 49.7264404296875, - "learning_rate": 1.7824e-05, - "loss": 1.8628, - "step": 5570 - }, - { - "epoch": 0.17856, - "grad_norm": 51.533050537109375, - "learning_rate": 1.7856000000000003e-05, - "loss": 1.8456, - "step": 5580 - }, - { - "epoch": 0.17888, - "grad_norm": 50.259490966796875, - "learning_rate": 1.7888000000000002e-05, - "loss": 1.8387, - "step": 5590 - }, - { - "epoch": 0.1792, - "grad_norm": 49.96138000488281, - "learning_rate": 1.792e-05, - "loss": 1.9014, - "step": 5600 - }, - { - "epoch": 0.17952, - "grad_norm": 50.213592529296875, - "learning_rate": 1.7952e-05, - "loss": 1.8341, - "step": 5610 - }, - { - "epoch": 0.17984, - "grad_norm": 50.193511962890625, - "learning_rate": 1.7984000000000003e-05, - "loss": 1.8239, - "step": 5620 - }, - { - "epoch": 0.18016, - "grad_norm": 52.21843338012695, - "learning_rate": 1.8016e-05, - "loss": 1.8507, - "step": 5630 - }, - { - "epoch": 0.18048, - "grad_norm": 51.92977523803711, - "learning_rate": 1.8048e-05, - "loss": 1.8356, - "step": 5640 - }, - { - "epoch": 0.1808, - "grad_norm": 52.1923713684082, - "learning_rate": 1.8080000000000003e-05, - "loss": 1.8525, - "step": 5650 - }, - { - "epoch": 0.18112, - "grad_norm": 49.51137161254883, - "learning_rate": 1.8112000000000002e-05, - "loss": 1.8448, - "step": 5660 - }, - { - "epoch": 0.18144, - "grad_norm": 50.198326110839844, - "learning_rate": 1.8144e-05, - "loss": 1.8553, - "step": 5670 - }, - { - "epoch": 0.18176, - "grad_norm": 52.188941955566406, - "learning_rate": 1.8176000000000004e-05, - "loss": 1.8113, - "step": 5680 - }, - { - "epoch": 0.18208, - "grad_norm": 50.63345718383789, - "learning_rate": 1.8208000000000003e-05, - "loss": 1.8791, - "step": 5690 - }, - { - "epoch": 0.1824, - "grad_norm": 50.17583465576172, - "learning_rate": 1.824e-05, - "loss": 1.8235, - "step": 5700 - }, - { - "epoch": 0.18272, - "grad_norm": 51.316585540771484, - "learning_rate": 1.8272e-05, - "loss": 1.8119, - "step": 5710 - }, - { - "epoch": 0.18304, - "grad_norm": 51.15176010131836, - "learning_rate": 1.8304000000000003e-05, - "loss": 1.8495, - "step": 5720 - }, - { - "epoch": 0.18336, - "grad_norm": 48.695106506347656, - "learning_rate": 1.8336000000000002e-05, - "loss": 1.8428, - "step": 5730 - }, - { - "epoch": 0.18368, - "grad_norm": 51.550941467285156, - "learning_rate": 1.8368e-05, - "loss": 1.8434, - "step": 5740 - }, - { - "epoch": 0.184, - "grad_norm": 51.58259582519531, - "learning_rate": 1.8400000000000003e-05, - "loss": 1.8219, - "step": 5750 - }, - { - "epoch": 0.18432, - "grad_norm": 51.678165435791016, - "learning_rate": 1.8432000000000002e-05, - "loss": 1.8378, - "step": 5760 - }, - { - "epoch": 0.18464, - "grad_norm": 51.844886779785156, - "learning_rate": 1.8464e-05, - "loss": 1.8615, - "step": 5770 - }, - { - "epoch": 0.18496, - "grad_norm": 51.19352340698242, - "learning_rate": 1.8496e-05, - "loss": 1.8592, - "step": 5780 - }, - { - "epoch": 0.18528, - "grad_norm": 50.802066802978516, - "learning_rate": 1.8528000000000003e-05, - "loss": 1.8411, - "step": 5790 - }, - { - "epoch": 0.1856, - "grad_norm": 49.88358688354492, - "learning_rate": 1.8560000000000002e-05, - "loss": 1.8476, - "step": 5800 - }, - { - "epoch": 0.18592, - "grad_norm": 50.59362030029297, - "learning_rate": 1.8592e-05, - "loss": 1.8264, - "step": 5810 - }, - { - "epoch": 0.18624, - "grad_norm": 51.28804016113281, - "learning_rate": 1.8624000000000003e-05, - "loss": 1.8364, - "step": 5820 - }, - { - "epoch": 0.18656, - "grad_norm": 50.7144660949707, - "learning_rate": 1.8656000000000002e-05, - "loss": 1.8469, - "step": 5830 - }, - { - "epoch": 0.18688, - "grad_norm": 51.3600959777832, - "learning_rate": 1.8688e-05, - "loss": 1.8634, - "step": 5840 - }, - { - "epoch": 0.1872, - "grad_norm": 49.02365493774414, - "learning_rate": 1.8720000000000004e-05, - "loss": 1.8499, - "step": 5850 - }, - { - "epoch": 0.18752, - "grad_norm": 51.212425231933594, - "learning_rate": 1.8752000000000003e-05, - "loss": 1.8267, - "step": 5860 - }, - { - "epoch": 0.18784, - "grad_norm": 50.525779724121094, - "learning_rate": 1.8784000000000002e-05, - "loss": 1.8386, - "step": 5870 - }, - { - "epoch": 0.18816, - "grad_norm": 51.337127685546875, - "learning_rate": 1.8816e-05, - "loss": 1.8312, - "step": 5880 - }, - { - "epoch": 0.18848, - "grad_norm": 50.37173080444336, - "learning_rate": 1.8848000000000003e-05, - "loss": 1.8558, - "step": 5890 - }, - { - "epoch": 0.1888, - "grad_norm": 50.84103012084961, - "learning_rate": 1.8880000000000002e-05, - "loss": 1.8531, - "step": 5900 - }, - { - "epoch": 0.18912, - "grad_norm": 54.04008865356445, - "learning_rate": 1.8912e-05, - "loss": 1.8471, - "step": 5910 - }, - { - "epoch": 0.18944, - "grad_norm": 51.854923248291016, - "learning_rate": 1.8944000000000004e-05, - "loss": 1.827, - "step": 5920 - }, - { - "epoch": 0.18976, - "grad_norm": 51.3617057800293, - "learning_rate": 1.8976000000000003e-05, - "loss": 1.8358, - "step": 5930 - }, - { - "epoch": 0.19008, - "grad_norm": 49.379112243652344, - "learning_rate": 1.9008e-05, - "loss": 1.8451, - "step": 5940 - }, - { - "epoch": 0.1904, - "grad_norm": 51.578125, - "learning_rate": 1.904e-05, - "loss": 1.8311, - "step": 5950 - }, - { - "epoch": 0.19072, - "grad_norm": 49.91752624511719, - "learning_rate": 1.9072000000000003e-05, - "loss": 1.8535, - "step": 5960 - }, - { - "epoch": 0.19104, - "grad_norm": 48.54228210449219, - "learning_rate": 1.9104000000000002e-05, - "loss": 1.8538, - "step": 5970 - }, - { - "epoch": 0.19136, - "grad_norm": 50.564903259277344, - "learning_rate": 1.9136e-05, - "loss": 1.8037, - "step": 5980 - }, - { - "epoch": 0.19168, - "grad_norm": 51.50353240966797, - "learning_rate": 1.9168000000000004e-05, - "loss": 1.8539, - "step": 5990 - }, - { - "epoch": 0.192, - "grad_norm": 47.665870666503906, - "learning_rate": 1.9200000000000003e-05, - "loss": 1.8266, - "step": 6000 - }, - { - "epoch": 0.19232, - "grad_norm": 51.77862548828125, - "learning_rate": 1.9232e-05, - "loss": 1.824, - "step": 6010 - }, - { - "epoch": 0.19264, - "grad_norm": 48.47139358520508, - "learning_rate": 1.9264e-05, - "loss": 1.8496, - "step": 6020 - }, - { - "epoch": 0.19296, - "grad_norm": 50.459163665771484, - "learning_rate": 1.9296000000000003e-05, - "loss": 1.86, - "step": 6030 - }, - { - "epoch": 0.19328, - "grad_norm": 48.988704681396484, - "learning_rate": 1.9328000000000002e-05, - "loss": 1.8439, - "step": 6040 - }, - { - "epoch": 0.1936, - "grad_norm": 50.94102478027344, - "learning_rate": 1.936e-05, - "loss": 1.8203, - "step": 6050 - }, - { - "epoch": 0.19392, - "grad_norm": 49.791561126708984, - "learning_rate": 1.9392000000000003e-05, - "loss": 1.8114, - "step": 6060 - }, - { - "epoch": 0.19424, - "grad_norm": 52.81388473510742, - "learning_rate": 1.9424e-05, - "loss": 1.8268, - "step": 6070 - }, - { - "epoch": 0.19456, - "grad_norm": 50.54151153564453, - "learning_rate": 1.9456e-05, - "loss": 1.8154, - "step": 6080 - }, - { - "epoch": 0.19488, - "grad_norm": 50.21413040161133, - "learning_rate": 1.9488000000000004e-05, - "loss": 1.8101, - "step": 6090 - }, - { - "epoch": 0.1952, - "grad_norm": 49.325809478759766, - "learning_rate": 1.9520000000000003e-05, - "loss": 1.8272, - "step": 6100 - }, - { - "epoch": 0.19552, - "grad_norm": 52.05915069580078, - "learning_rate": 1.9552000000000002e-05, - "loss": 1.8456, - "step": 6110 - }, - { - "epoch": 0.19584, - "grad_norm": 49.70790481567383, - "learning_rate": 1.9584e-05, - "loss": 1.8197, - "step": 6120 - }, - { - "epoch": 0.19616, - "grad_norm": 48.50461959838867, - "learning_rate": 1.9616000000000003e-05, - "loss": 1.8252, - "step": 6130 - }, - { - "epoch": 0.19648, - "grad_norm": 50.48190689086914, - "learning_rate": 1.9648000000000002e-05, - "loss": 1.8315, - "step": 6140 - }, - { - "epoch": 0.1968, - "grad_norm": 48.6129264831543, - "learning_rate": 1.968e-05, - "loss": 1.837, - "step": 6150 - }, - { - "epoch": 0.19712, - "grad_norm": 50.71052169799805, - "learning_rate": 1.9712000000000004e-05, - "loss": 1.8467, - "step": 6160 - }, - { - "epoch": 0.19744, - "grad_norm": 46.237510681152344, - "learning_rate": 1.9744e-05, - "loss": 1.7948, - "step": 6170 - }, - { - "epoch": 0.19776, - "grad_norm": 49.22959518432617, - "learning_rate": 1.9776000000000002e-05, - "loss": 1.8117, - "step": 6180 - }, - { - "epoch": 0.19808, - "grad_norm": 49.88234329223633, - "learning_rate": 1.9808e-05, - "loss": 1.8186, - "step": 6190 - }, - { - "epoch": 0.1984, - "grad_norm": 50.502845764160156, - "learning_rate": 1.9840000000000003e-05, - "loss": 1.8444, - "step": 6200 - }, - { - "epoch": 0.19872, - "grad_norm": 49.359500885009766, - "learning_rate": 1.9872000000000002e-05, - "loss": 1.8415, - "step": 6210 - }, - { - "epoch": 0.19904, - "grad_norm": 48.598777770996094, - "learning_rate": 1.9904e-05, - "loss": 1.8026, - "step": 6220 - }, - { - "epoch": 0.19936, - "grad_norm": 50.7224006652832, - "learning_rate": 1.9936000000000004e-05, - "loss": 1.8158, - "step": 6230 - }, - { - "epoch": 0.19968, - "grad_norm": 49.399681091308594, - "learning_rate": 1.9968e-05, - "loss": 1.848, - "step": 6240 - }, - { - "epoch": 0.2, - "grad_norm": 48.65672302246094, - "learning_rate": 2e-05, - "loss": 1.8308, - "step": 6250 - }, - { - "epoch": 0.20032, - "grad_norm": 51.61461639404297, - "learning_rate": 1.9996444444444446e-05, - "loss": 1.8579, - "step": 6260 - }, - { - "epoch": 0.20064, - "grad_norm": 49.613704681396484, - "learning_rate": 1.999288888888889e-05, - "loss": 1.8326, - "step": 6270 - }, - { - "epoch": 0.20096, - "grad_norm": 50.30833435058594, - "learning_rate": 1.9989333333333335e-05, - "loss": 1.82, - "step": 6280 - }, - { - "epoch": 0.20128, - "grad_norm": 50.83747482299805, - "learning_rate": 1.998577777777778e-05, - "loss": 1.8319, - "step": 6290 - }, - { - "epoch": 0.2016, - "grad_norm": 49.3170051574707, - "learning_rate": 1.9982222222222224e-05, - "loss": 1.8233, - "step": 6300 - }, - { - "epoch": 0.20192, - "grad_norm": 49.773380279541016, - "learning_rate": 1.997866666666667e-05, - "loss": 1.8443, - "step": 6310 - }, - { - "epoch": 0.20224, - "grad_norm": 49.04866027832031, - "learning_rate": 1.997511111111111e-05, - "loss": 1.8392, - "step": 6320 - }, - { - "epoch": 0.20256, - "grad_norm": 50.339622497558594, - "learning_rate": 1.9971555555555558e-05, - "loss": 1.8446, - "step": 6330 - }, - { - "epoch": 0.20288, - "grad_norm": 52.96565628051758, - "learning_rate": 1.9968e-05, - "loss": 1.883, - "step": 6340 - }, - { - "epoch": 0.2032, - "grad_norm": 48.281497955322266, - "learning_rate": 1.9964444444444447e-05, - "loss": 1.81, - "step": 6350 - }, - { - "epoch": 0.20352, - "grad_norm": 49.1937255859375, - "learning_rate": 1.996088888888889e-05, - "loss": 1.8321, - "step": 6360 - }, - { - "epoch": 0.20384, - "grad_norm": 51.74014663696289, - "learning_rate": 1.9957333333333336e-05, - "loss": 1.7919, - "step": 6370 - }, - { - "epoch": 0.20416, - "grad_norm": 48.33229064941406, - "learning_rate": 1.9953777777777777e-05, - "loss": 1.8402, - "step": 6380 - }, - { - "epoch": 0.20448, - "grad_norm": 51.9826774597168, - "learning_rate": 1.9950222222222225e-05, - "loss": 1.8295, - "step": 6390 - }, - { - "epoch": 0.2048, - "grad_norm": 48.660789489746094, - "learning_rate": 1.9946666666666667e-05, - "loss": 1.8047, - "step": 6400 - }, - { - "epoch": 0.20512, - "grad_norm": 49.63129806518555, - "learning_rate": 1.9943111111111114e-05, - "loss": 1.8117, - "step": 6410 - }, - { - "epoch": 0.20544, - "grad_norm": 51.93993377685547, - "learning_rate": 1.9939555555555556e-05, - "loss": 1.8335, - "step": 6420 - }, - { - "epoch": 0.20576, - "grad_norm": 50.61872482299805, - "learning_rate": 1.9936000000000004e-05, - "loss": 1.8611, - "step": 6430 - }, - { - "epoch": 0.20608, - "grad_norm": 51.35618591308594, - "learning_rate": 1.9932444444444445e-05, - "loss": 1.8676, - "step": 6440 - }, - { - "epoch": 0.2064, - "grad_norm": 53.216251373291016, - "learning_rate": 1.992888888888889e-05, - "loss": 1.8581, - "step": 6450 - }, - { - "epoch": 0.20672, - "grad_norm": 51.05562973022461, - "learning_rate": 1.9925333333333334e-05, - "loss": 1.8037, - "step": 6460 - }, - { - "epoch": 0.20704, - "grad_norm": 48.82215118408203, - "learning_rate": 1.992177777777778e-05, - "loss": 1.8448, - "step": 6470 - }, - { - "epoch": 0.20736, - "grad_norm": 52.66543960571289, - "learning_rate": 1.9918222222222223e-05, - "loss": 1.8332, - "step": 6480 - }, - { - "epoch": 0.20768, - "grad_norm": 49.5877799987793, - "learning_rate": 1.9914666666666668e-05, - "loss": 1.8237, - "step": 6490 - }, - { - "epoch": 0.208, - "grad_norm": 47.73166275024414, - "learning_rate": 1.9911111111111112e-05, - "loss": 1.8148, - "step": 6500 - }, - { - "epoch": 0.20832, - "grad_norm": 47.3873405456543, - "learning_rate": 1.9907555555555557e-05, - "loss": 1.8299, - "step": 6510 - }, - { - "epoch": 0.20864, - "grad_norm": 49.92698669433594, - "learning_rate": 1.9904e-05, - "loss": 1.8304, - "step": 6520 - }, - { - "epoch": 0.20896, - "grad_norm": 48.46565628051758, - "learning_rate": 1.9900444444444446e-05, - "loss": 1.8439, - "step": 6530 - }, - { - "epoch": 0.20928, - "grad_norm": 50.39113998413086, - "learning_rate": 1.989688888888889e-05, - "loss": 1.8385, - "step": 6540 - }, - { - "epoch": 0.2096, - "grad_norm": 46.97870635986328, - "learning_rate": 1.9893333333333335e-05, - "loss": 1.8522, - "step": 6550 - }, - { - "epoch": 0.20992, - "grad_norm": 52.359954833984375, - "learning_rate": 1.988977777777778e-05, - "loss": 1.8461, - "step": 6560 - }, - { - "epoch": 0.21024, - "grad_norm": 50.89912796020508, - "learning_rate": 1.9886222222222224e-05, - "loss": 1.8255, - "step": 6570 - }, - { - "epoch": 0.21056, - "grad_norm": 49.93498992919922, - "learning_rate": 1.988266666666667e-05, - "loss": 1.807, - "step": 6580 - }, - { - "epoch": 0.21088, - "grad_norm": 51.63752746582031, - "learning_rate": 1.9879111111111113e-05, - "loss": 1.8633, - "step": 6590 - }, - { - "epoch": 0.2112, - "grad_norm": 48.523582458496094, - "learning_rate": 1.9875555555555558e-05, - "loss": 1.8217, - "step": 6600 - }, - { - "epoch": 0.21152, - "grad_norm": 48.10843276977539, - "learning_rate": 1.9872000000000002e-05, - "loss": 1.8299, - "step": 6610 - }, - { - "epoch": 0.21184, - "grad_norm": 49.58185577392578, - "learning_rate": 1.9868444444444447e-05, - "loss": 1.8587, - "step": 6620 - }, - { - "epoch": 0.21216, - "grad_norm": 48.92919158935547, - "learning_rate": 1.986488888888889e-05, - "loss": 1.8477, - "step": 6630 - }, - { - "epoch": 0.21248, - "grad_norm": 51.368221282958984, - "learning_rate": 1.9861333333333336e-05, - "loss": 1.823, - "step": 6640 - }, - { - "epoch": 0.2128, - "grad_norm": 50.350975036621094, - "learning_rate": 1.985777777777778e-05, - "loss": 1.8338, - "step": 6650 - }, - { - "epoch": 0.21312, - "grad_norm": 48.929107666015625, - "learning_rate": 1.9854222222222225e-05, - "loss": 1.8161, - "step": 6660 - }, - { - "epoch": 0.21344, - "grad_norm": 49.51737594604492, - "learning_rate": 1.985066666666667e-05, - "loss": 1.8434, - "step": 6670 - }, - { - "epoch": 0.21376, - "grad_norm": 47.257171630859375, - "learning_rate": 1.9847111111111114e-05, - "loss": 1.8379, - "step": 6680 - }, - { - "epoch": 0.21408, - "grad_norm": 47.63740921020508, - "learning_rate": 1.984355555555556e-05, - "loss": 1.8138, - "step": 6690 - }, - { - "epoch": 0.2144, - "grad_norm": 48.09037780761719, - "learning_rate": 1.9840000000000003e-05, - "loss": 1.8487, - "step": 6700 - }, - { - "epoch": 0.21472, - "grad_norm": 52.047420501708984, - "learning_rate": 1.9836444444444448e-05, - "loss": 1.8312, - "step": 6710 - }, - { - "epoch": 0.21504, - "grad_norm": 50.519474029541016, - "learning_rate": 1.983288888888889e-05, - "loss": 1.8581, - "step": 6720 - }, - { - "epoch": 0.21536, - "grad_norm": 49.650238037109375, - "learning_rate": 1.9829333333333337e-05, - "loss": 1.8074, - "step": 6730 - }, - { - "epoch": 0.21568, - "grad_norm": 49.53740310668945, - "learning_rate": 1.9825777777777778e-05, - "loss": 1.8089, - "step": 6740 - }, - { - "epoch": 0.216, - "grad_norm": 50.6927604675293, - "learning_rate": 1.9822222222222226e-05, - "loss": 1.8014, - "step": 6750 - }, - { - "epoch": 0.21632, - "grad_norm": 54.616119384765625, - "learning_rate": 1.9818666666666667e-05, - "loss": 1.7996, - "step": 6760 - }, - { - "epoch": 0.21664, - "grad_norm": 47.97896957397461, - "learning_rate": 1.981511111111111e-05, - "loss": 1.8241, - "step": 6770 - }, - { - "epoch": 0.21696, - "grad_norm": 49.36483383178711, - "learning_rate": 1.9811555555555556e-05, - "loss": 1.7999, - "step": 6780 - }, - { - "epoch": 0.21728, - "grad_norm": 48.21449279785156, - "learning_rate": 1.9808e-05, - "loss": 1.8411, - "step": 6790 - }, - { - "epoch": 0.2176, - "grad_norm": 52.211509704589844, - "learning_rate": 1.9804444444444445e-05, - "loss": 1.8388, - "step": 6800 - }, - { - "epoch": 0.21792, - "grad_norm": 49.41231918334961, - "learning_rate": 1.980088888888889e-05, - "loss": 1.8171, - "step": 6810 - }, - { - "epoch": 0.21824, - "grad_norm": 50.267425537109375, - "learning_rate": 1.9797333333333334e-05, - "loss": 1.8346, - "step": 6820 - }, - { - "epoch": 0.21856, - "grad_norm": 47.68706512451172, - "learning_rate": 1.979377777777778e-05, - "loss": 1.8136, - "step": 6830 - }, - { - "epoch": 0.21888, - "grad_norm": 48.38620376586914, - "learning_rate": 1.9790222222222224e-05, - "loss": 1.8094, - "step": 6840 - }, - { - "epoch": 0.2192, - "grad_norm": 49.339664459228516, - "learning_rate": 1.9786666666666668e-05, - "loss": 1.8194, - "step": 6850 - }, - { - "epoch": 0.21952, - "grad_norm": 51.0460319519043, - "learning_rate": 1.9783111111111113e-05, - "loss": 1.7893, - "step": 6860 - }, - { - "epoch": 0.21984, - "grad_norm": 47.96543502807617, - "learning_rate": 1.9779555555555557e-05, - "loss": 1.7939, - "step": 6870 - }, - { - "epoch": 0.22016, - "grad_norm": 51.15391540527344, - "learning_rate": 1.9776000000000002e-05, - "loss": 1.8204, - "step": 6880 - }, - { - "epoch": 0.22048, - "grad_norm": 47.516658782958984, - "learning_rate": 1.9772444444444446e-05, - "loss": 1.8251, - "step": 6890 - }, - { - "epoch": 0.2208, - "grad_norm": 46.96601486206055, - "learning_rate": 1.976888888888889e-05, - "loss": 1.7885, - "step": 6900 - }, - { - "epoch": 0.22112, - "grad_norm": 50.02362060546875, - "learning_rate": 1.9765333333333335e-05, - "loss": 1.8253, - "step": 6910 - }, - { - "epoch": 0.22144, - "grad_norm": 49.389183044433594, - "learning_rate": 1.976177777777778e-05, - "loss": 1.808, - "step": 6920 - }, - { - "epoch": 0.22176, - "grad_norm": 50.543033599853516, - "learning_rate": 1.9758222222222225e-05, - "loss": 1.8429, - "step": 6930 - }, - { - "epoch": 0.22208, - "grad_norm": 50.78889465332031, - "learning_rate": 1.975466666666667e-05, - "loss": 1.8296, - "step": 6940 - }, - { - "epoch": 0.2224, - "grad_norm": 46.22223663330078, - "learning_rate": 1.9751111111111114e-05, - "loss": 1.8124, - "step": 6950 - }, - { - "epoch": 0.22272, - "grad_norm": 49.157684326171875, - "learning_rate": 1.9747555555555558e-05, - "loss": 1.8049, - "step": 6960 - }, - { - "epoch": 0.22304, - "grad_norm": 48.17796325683594, - "learning_rate": 1.9744e-05, - "loss": 1.7927, - "step": 6970 - }, - { - "epoch": 0.22336, - "grad_norm": 47.83882141113281, - "learning_rate": 1.9740444444444447e-05, - "loss": 1.7881, - "step": 6980 - }, - { - "epoch": 0.22368, - "grad_norm": 48.507755279541016, - "learning_rate": 1.973688888888889e-05, - "loss": 1.8145, - "step": 6990 - }, - { - "epoch": 0.224, - "grad_norm": 48.463340759277344, - "learning_rate": 1.9733333333333336e-05, - "loss": 1.8137, - "step": 7000 - }, - { - "epoch": 0.22432, - "grad_norm": 47.04343032836914, - "learning_rate": 1.9729777777777778e-05, - "loss": 1.8385, - "step": 7010 - }, - { - "epoch": 0.22464, - "grad_norm": 48.241920471191406, - "learning_rate": 1.9726222222222226e-05, - "loss": 1.7984, - "step": 7020 - }, - { - "epoch": 0.22496, - "grad_norm": 50.49192428588867, - "learning_rate": 1.9722666666666667e-05, - "loss": 1.8091, - "step": 7030 - }, - { - "epoch": 0.22528, - "grad_norm": 53.93669128417969, - "learning_rate": 1.9719111111111115e-05, - "loss": 1.8452, - "step": 7040 - }, - { - "epoch": 0.2256, - "grad_norm": 49.90575408935547, - "learning_rate": 1.9715555555555556e-05, - "loss": 1.8297, - "step": 7050 - }, - { - "epoch": 0.22592, - "grad_norm": 48.688812255859375, - "learning_rate": 1.9712000000000004e-05, - "loss": 1.8348, - "step": 7060 - }, - { - "epoch": 0.22624, - "grad_norm": 52.05872344970703, - "learning_rate": 1.9708444444444445e-05, - "loss": 1.8451, - "step": 7070 - }, - { - "epoch": 0.22656, - "grad_norm": 47.86528778076172, - "learning_rate": 1.9704888888888893e-05, - "loss": 1.8027, - "step": 7080 - }, - { - "epoch": 0.22688, - "grad_norm": 48.741554260253906, - "learning_rate": 1.9701333333333334e-05, - "loss": 1.8219, - "step": 7090 - }, - { - "epoch": 0.2272, - "grad_norm": 51.6088752746582, - "learning_rate": 1.969777777777778e-05, - "loss": 1.827, - "step": 7100 - }, - { - "epoch": 0.22752, - "grad_norm": 49.774803161621094, - "learning_rate": 1.9694222222222223e-05, - "loss": 1.8096, - "step": 7110 - }, - { - "epoch": 0.22784, - "grad_norm": 49.89011001586914, - "learning_rate": 1.9690666666666668e-05, - "loss": 1.8065, - "step": 7120 - }, - { - "epoch": 0.22816, - "grad_norm": 48.9777717590332, - "learning_rate": 1.9687111111111112e-05, - "loss": 1.8163, - "step": 7130 - }, - { - "epoch": 0.22848, - "grad_norm": 48.017555236816406, - "learning_rate": 1.9683555555555557e-05, - "loss": 1.8264, - "step": 7140 - }, - { - "epoch": 0.2288, - "grad_norm": 48.99272155761719, - "learning_rate": 1.968e-05, - "loss": 1.82, - "step": 7150 - }, - { - "epoch": 0.22912, - "grad_norm": 49.58944320678711, - "learning_rate": 1.9676444444444446e-05, - "loss": 1.8333, - "step": 7160 - }, - { - "epoch": 0.22944, - "grad_norm": 50.1278190612793, - "learning_rate": 1.967288888888889e-05, - "loss": 1.8064, - "step": 7170 - }, - { - "epoch": 0.22976, - "grad_norm": 51.582210540771484, - "learning_rate": 1.9669333333333335e-05, - "loss": 1.8158, - "step": 7180 - }, - { - "epoch": 0.23008, - "grad_norm": 49.89718246459961, - "learning_rate": 1.966577777777778e-05, - "loss": 1.8372, - "step": 7190 - }, - { - "epoch": 0.2304, - "grad_norm": 49.177001953125, - "learning_rate": 1.9662222222222224e-05, - "loss": 1.8397, - "step": 7200 - }, - { - "epoch": 0.23072, - "grad_norm": 47.591617584228516, - "learning_rate": 1.965866666666667e-05, - "loss": 1.799, - "step": 7210 - }, - { - "epoch": 0.23104, - "grad_norm": 49.274208068847656, - "learning_rate": 1.9655111111111113e-05, - "loss": 1.8342, - "step": 7220 - }, - { - "epoch": 0.23136, - "grad_norm": 47.56116485595703, - "learning_rate": 1.9651555555555558e-05, - "loss": 1.8098, - "step": 7230 - }, - { - "epoch": 0.23168, - "grad_norm": 46.61338424682617, - "learning_rate": 1.9648000000000002e-05, - "loss": 1.8044, - "step": 7240 - }, - { - "epoch": 0.232, - "grad_norm": 51.38720703125, - "learning_rate": 1.9644444444444447e-05, - "loss": 1.8376, - "step": 7250 - }, - { - "epoch": 0.23232, - "grad_norm": 48.6185417175293, - "learning_rate": 1.964088888888889e-05, - "loss": 1.8045, - "step": 7260 - }, - { - "epoch": 0.23264, - "grad_norm": 46.933162689208984, - "learning_rate": 1.9637333333333336e-05, - "loss": 1.8378, - "step": 7270 - }, - { - "epoch": 0.23296, - "grad_norm": 49.86311721801758, - "learning_rate": 1.963377777777778e-05, - "loss": 1.8234, - "step": 7280 - }, - { - "epoch": 0.23328, - "grad_norm": 50.35856246948242, - "learning_rate": 1.9630222222222225e-05, - "loss": 1.7944, - "step": 7290 - }, - { - "epoch": 0.2336, - "grad_norm": 47.689151763916016, - "learning_rate": 1.9626666666666666e-05, - "loss": 1.8469, - "step": 7300 - }, - { - "epoch": 0.23392, - "grad_norm": 46.43436813354492, - "learning_rate": 1.9623111111111114e-05, - "loss": 1.8313, - "step": 7310 - }, - { - "epoch": 0.23424, - "grad_norm": 49.50898361206055, - "learning_rate": 1.9619555555555555e-05, - "loss": 1.8028, - "step": 7320 - }, - { - "epoch": 0.23456, - "grad_norm": 52.28144454956055, - "learning_rate": 1.9616000000000003e-05, - "loss": 1.8171, - "step": 7330 - }, - { - "epoch": 0.23488, - "grad_norm": 49.656715393066406, - "learning_rate": 1.9612444444444444e-05, - "loss": 1.8042, - "step": 7340 - }, - { - "epoch": 0.2352, - "grad_norm": 49.44095993041992, - "learning_rate": 1.960888888888889e-05, - "loss": 1.814, - "step": 7350 - }, - { - "epoch": 0.23552, - "grad_norm": 48.879371643066406, - "learning_rate": 1.9605333333333334e-05, - "loss": 1.8068, - "step": 7360 - }, - { - "epoch": 0.23584, - "grad_norm": 50.464385986328125, - "learning_rate": 1.9601777777777778e-05, - "loss": 1.8314, - "step": 7370 - }, - { - "epoch": 0.23616, - "grad_norm": 46.931087493896484, - "learning_rate": 1.9598222222222223e-05, - "loss": 1.817, - "step": 7380 - }, - { - "epoch": 0.23648, - "grad_norm": 50.16783905029297, - "learning_rate": 1.9594666666666667e-05, - "loss": 1.8205, - "step": 7390 - }, - { - "epoch": 0.2368, - "grad_norm": 48.354862213134766, - "learning_rate": 1.9591111111111112e-05, - "loss": 1.7917, - "step": 7400 - }, - { - "epoch": 0.23712, - "grad_norm": 48.41815185546875, - "learning_rate": 1.9587555555555556e-05, - "loss": 1.7994, - "step": 7410 - }, - { - "epoch": 0.23744, - "grad_norm": 47.2100715637207, - "learning_rate": 1.9584e-05, - "loss": 1.8207, - "step": 7420 - }, - { - "epoch": 0.23776, - "grad_norm": 50.41408920288086, - "learning_rate": 1.9580444444444445e-05, - "loss": 1.8124, - "step": 7430 - }, - { - "epoch": 0.23808, - "grad_norm": 49.47113037109375, - "learning_rate": 1.957688888888889e-05, - "loss": 1.7621, - "step": 7440 - }, - { - "epoch": 0.2384, - "grad_norm": 49.08128356933594, - "learning_rate": 1.9573333333333335e-05, - "loss": 1.8061, - "step": 7450 - }, - { - "epoch": 0.23872, - "grad_norm": 50.74384307861328, - "learning_rate": 1.956977777777778e-05, - "loss": 1.8424, - "step": 7460 - }, - { - "epoch": 0.23904, - "grad_norm": 52.481388092041016, - "learning_rate": 1.9566222222222224e-05, - "loss": 1.8255, - "step": 7470 - }, - { - "epoch": 0.23936, - "grad_norm": 49.44757080078125, - "learning_rate": 1.9562666666666668e-05, - "loss": 1.7856, - "step": 7480 - }, - { - "epoch": 0.23968, - "grad_norm": 48.70913314819336, - "learning_rate": 1.9559111111111113e-05, - "loss": 1.808, - "step": 7490 - }, - { - "epoch": 0.24, - "grad_norm": 50.43172836303711, - "learning_rate": 1.9555555555555557e-05, - "loss": 1.8109, - "step": 7500 - }, - { - "epoch": 0.24032, - "grad_norm": 50.79537582397461, - "learning_rate": 1.9552000000000002e-05, - "loss": 1.7816, - "step": 7510 - }, - { - "epoch": 0.24064, - "grad_norm": 47.54401397705078, - "learning_rate": 1.9548444444444446e-05, - "loss": 1.8125, - "step": 7520 - }, - { - "epoch": 0.24096, - "grad_norm": 48.10938262939453, - "learning_rate": 1.954488888888889e-05, - "loss": 1.779, - "step": 7530 - }, - { - "epoch": 0.24128, - "grad_norm": 49.07471466064453, - "learning_rate": 1.9541333333333336e-05, - "loss": 1.7964, - "step": 7540 - }, - { - "epoch": 0.2416, - "grad_norm": 49.26071548461914, - "learning_rate": 1.953777777777778e-05, - "loss": 1.8281, - "step": 7550 - }, - { - "epoch": 0.24192, - "grad_norm": 47.97166061401367, - "learning_rate": 1.9534222222222225e-05, - "loss": 1.8303, - "step": 7560 - }, - { - "epoch": 0.24224, - "grad_norm": 47.929691314697266, - "learning_rate": 1.953066666666667e-05, - "loss": 1.8316, - "step": 7570 - }, - { - "epoch": 0.24256, - "grad_norm": 50.565853118896484, - "learning_rate": 1.9527111111111114e-05, - "loss": 1.8414, - "step": 7580 - }, - { - "epoch": 0.24288, - "grad_norm": 48.56721878051758, - "learning_rate": 1.9523555555555558e-05, - "loss": 1.798, - "step": 7590 - }, - { - "epoch": 0.2432, - "grad_norm": 48.066680908203125, - "learning_rate": 1.9520000000000003e-05, - "loss": 1.8232, - "step": 7600 - }, - { - "epoch": 0.24352, - "grad_norm": 48.67830276489258, - "learning_rate": 1.9516444444444447e-05, - "loss": 1.8281, - "step": 7610 - }, - { - "epoch": 0.24384, - "grad_norm": 47.78409194946289, - "learning_rate": 1.951288888888889e-05, - "loss": 1.8156, - "step": 7620 - }, - { - "epoch": 0.24416, - "grad_norm": 46.79526138305664, - "learning_rate": 1.9509333333333337e-05, - "loss": 1.7844, - "step": 7630 - }, - { - "epoch": 0.24448, - "grad_norm": 47.15976333618164, - "learning_rate": 1.9505777777777778e-05, - "loss": 1.8172, - "step": 7640 - }, - { - "epoch": 0.2448, - "grad_norm": 50.70997619628906, - "learning_rate": 1.9502222222222226e-05, - "loss": 1.809, - "step": 7650 - }, - { - "epoch": 0.24512, - "grad_norm": 49.29851150512695, - "learning_rate": 1.9498666666666667e-05, - "loss": 1.7748, - "step": 7660 - }, - { - "epoch": 0.24544, - "grad_norm": 49.05281448364258, - "learning_rate": 1.9495111111111115e-05, - "loss": 1.8165, - "step": 7670 - }, - { - "epoch": 0.24576, - "grad_norm": 48.29036331176758, - "learning_rate": 1.9491555555555556e-05, - "loss": 1.7774, - "step": 7680 - }, - { - "epoch": 0.24608, - "grad_norm": 50.523536682128906, - "learning_rate": 1.9488000000000004e-05, - "loss": 1.793, - "step": 7690 - }, - { - "epoch": 0.2464, - "grad_norm": 48.234649658203125, - "learning_rate": 1.9484444444444445e-05, - "loss": 1.7864, - "step": 7700 - }, - { - "epoch": 0.24672, - "grad_norm": 49.41721725463867, - "learning_rate": 1.9480888888888893e-05, - "loss": 1.8022, - "step": 7710 - }, - { - "epoch": 0.24704, - "grad_norm": 50.055355072021484, - "learning_rate": 1.9477333333333334e-05, - "loss": 1.792, - "step": 7720 - }, - { - "epoch": 0.24736, - "grad_norm": 50.13814926147461, - "learning_rate": 1.947377777777778e-05, - "loss": 1.7929, - "step": 7730 - }, - { - "epoch": 0.24768, - "grad_norm": 47.56300735473633, - "learning_rate": 1.9470222222222223e-05, - "loss": 1.7964, - "step": 7740 - }, - { - "epoch": 0.248, - "grad_norm": 51.54580307006836, - "learning_rate": 1.9466666666666668e-05, - "loss": 1.7946, - "step": 7750 - }, - { - "epoch": 0.24832, - "grad_norm": 47.7603759765625, - "learning_rate": 1.9463111111111112e-05, - "loss": 1.803, - "step": 7760 - }, - { - "epoch": 0.24864, - "grad_norm": 49.26568603515625, - "learning_rate": 1.9459555555555557e-05, - "loss": 1.8293, - "step": 7770 - }, - { - "epoch": 0.24896, - "grad_norm": 47.8105583190918, - "learning_rate": 1.9456e-05, - "loss": 1.7627, - "step": 7780 - }, - { - "epoch": 0.24928, - "grad_norm": 48.05279541015625, - "learning_rate": 1.9452444444444446e-05, - "loss": 1.7881, - "step": 7790 - }, - { - "epoch": 0.2496, - "grad_norm": 50.0302848815918, - "learning_rate": 1.944888888888889e-05, - "loss": 1.7889, - "step": 7800 - }, - { - "epoch": 0.24992, - "grad_norm": 49.33319854736328, - "learning_rate": 1.9445333333333335e-05, - "loss": 1.807, - "step": 7810 - }, - { - "epoch": 0.25024, - "grad_norm": 46.37745666503906, - "learning_rate": 1.944177777777778e-05, - "loss": 1.807, - "step": 7820 - }, - { - "epoch": 0.25056, - "grad_norm": 46.626041412353516, - "learning_rate": 1.9438222222222224e-05, - "loss": 1.8251, - "step": 7830 - }, - { - "epoch": 0.25088, - "grad_norm": 47.2230110168457, - "learning_rate": 1.943466666666667e-05, - "loss": 1.7891, - "step": 7840 - }, - { - "epoch": 0.2512, - "grad_norm": 46.498348236083984, - "learning_rate": 1.9431111111111113e-05, - "loss": 1.7927, - "step": 7850 - }, - { - "epoch": 0.25152, - "grad_norm": 47.701663970947266, - "learning_rate": 1.9427555555555558e-05, - "loss": 1.8128, - "step": 7860 - }, - { - "epoch": 0.25184, - "grad_norm": 46.88845443725586, - "learning_rate": 1.9424e-05, - "loss": 1.7962, - "step": 7870 - }, - { - "epoch": 0.25216, - "grad_norm": 50.29132080078125, - "learning_rate": 1.9420444444444447e-05, - "loss": 1.7985, - "step": 7880 - }, - { - "epoch": 0.25248, - "grad_norm": 48.16657257080078, - "learning_rate": 1.9416888888888888e-05, - "loss": 1.8203, - "step": 7890 - }, - { - "epoch": 0.2528, - "grad_norm": 50.36405944824219, - "learning_rate": 1.9413333333333336e-05, - "loss": 1.7756, - "step": 7900 - }, - { - "epoch": 0.25312, - "grad_norm": 47.91851043701172, - "learning_rate": 1.9409777777777777e-05, - "loss": 1.7991, - "step": 7910 - }, - { - "epoch": 0.25344, - "grad_norm": 49.88837814331055, - "learning_rate": 1.9406222222222225e-05, - "loss": 1.7981, - "step": 7920 - }, - { - "epoch": 0.25376, - "grad_norm": 49.87557601928711, - "learning_rate": 1.9402666666666666e-05, - "loss": 1.797, - "step": 7930 - }, - { - "epoch": 0.25408, - "grad_norm": 50.396419525146484, - "learning_rate": 1.9399111111111114e-05, - "loss": 1.8224, - "step": 7940 - }, - { - "epoch": 0.2544, - "grad_norm": 47.169822692871094, - "learning_rate": 1.9395555555555555e-05, - "loss": 1.7925, - "step": 7950 - }, - { - "epoch": 0.25472, - "grad_norm": 48.06186294555664, - "learning_rate": 1.9392000000000003e-05, - "loss": 1.7787, - "step": 7960 - }, - { - "epoch": 0.25504, - "grad_norm": 46.269710540771484, - "learning_rate": 1.9388444444444445e-05, - "loss": 1.7816, - "step": 7970 - }, - { - "epoch": 0.25536, - "grad_norm": 45.91224670410156, - "learning_rate": 1.9384888888888892e-05, - "loss": 1.8049, - "step": 7980 - }, - { - "epoch": 0.25568, - "grad_norm": 47.012298583984375, - "learning_rate": 1.9381333333333334e-05, - "loss": 1.7766, - "step": 7990 - }, - { - "epoch": 0.256, - "grad_norm": 48.96611404418945, - "learning_rate": 1.9377777777777778e-05, - "loss": 1.7769, - "step": 8000 - }, - { - "epoch": 0.25632, - "grad_norm": 50.42731857299805, - "learning_rate": 1.9374222222222223e-05, - "loss": 1.8164, - "step": 8010 - }, - { - "epoch": 0.25664, - "grad_norm": 47.28165817260742, - "learning_rate": 1.9370666666666667e-05, - "loss": 1.7714, - "step": 8020 - }, - { - "epoch": 0.25696, - "grad_norm": 47.277191162109375, - "learning_rate": 1.9367111111111112e-05, - "loss": 1.8067, - "step": 8030 - }, - { - "epoch": 0.25728, - "grad_norm": 54.00135040283203, - "learning_rate": 1.9363555555555556e-05, - "loss": 1.8454, - "step": 8040 - }, - { - "epoch": 0.2576, - "grad_norm": 46.93778991699219, - "learning_rate": 1.936e-05, - "loss": 1.8049, - "step": 8050 - }, - { - "epoch": 0.25792, - "grad_norm": 49.47587203979492, - "learning_rate": 1.9356444444444446e-05, - "loss": 1.8151, - "step": 8060 - }, - { - "epoch": 0.25824, - "grad_norm": 47.49064636230469, - "learning_rate": 1.935288888888889e-05, - "loss": 1.8169, - "step": 8070 - }, - { - "epoch": 0.25856, - "grad_norm": 48.66109848022461, - "learning_rate": 1.9349333333333335e-05, - "loss": 1.7851, - "step": 8080 - }, - { - "epoch": 0.25888, - "grad_norm": 46.810081481933594, - "learning_rate": 1.934577777777778e-05, - "loss": 1.8211, - "step": 8090 - }, - { - "epoch": 0.2592, - "grad_norm": 46.94947814941406, - "learning_rate": 1.9342222222222224e-05, - "loss": 1.8169, - "step": 8100 - }, - { - "epoch": 0.25952, - "grad_norm": 48.868526458740234, - "learning_rate": 1.933866666666667e-05, - "loss": 1.8155, - "step": 8110 - }, - { - "epoch": 0.25984, - "grad_norm": 47.07829666137695, - "learning_rate": 1.9335111111111113e-05, - "loss": 1.7701, - "step": 8120 - }, - { - "epoch": 0.26016, - "grad_norm": 49.09501266479492, - "learning_rate": 1.9331555555555557e-05, - "loss": 1.7843, - "step": 8130 - }, - { - "epoch": 0.26048, - "grad_norm": 48.02748489379883, - "learning_rate": 1.9328000000000002e-05, - "loss": 1.7955, - "step": 8140 - }, - { - "epoch": 0.2608, - "grad_norm": 49.24200439453125, - "learning_rate": 1.9324444444444447e-05, - "loss": 1.788, - "step": 8150 - }, - { - "epoch": 0.26112, - "grad_norm": 47.42225646972656, - "learning_rate": 1.932088888888889e-05, - "loss": 1.7825, - "step": 8160 - }, - { - "epoch": 0.26144, - "grad_norm": 47.41923141479492, - "learning_rate": 1.9317333333333336e-05, - "loss": 1.7687, - "step": 8170 - }, - { - "epoch": 0.26176, - "grad_norm": 47.802574157714844, - "learning_rate": 1.931377777777778e-05, - "loss": 1.7701, - "step": 8180 - }, - { - "epoch": 0.26208, - "grad_norm": 48.67831802368164, - "learning_rate": 1.9310222222222225e-05, - "loss": 1.7282, - "step": 8190 - }, - { - "epoch": 0.2624, - "grad_norm": 47.63313293457031, - "learning_rate": 1.930666666666667e-05, - "loss": 1.7829, - "step": 8200 - }, - { - "epoch": 0.26272, - "grad_norm": 46.64168930053711, - "learning_rate": 1.9303111111111114e-05, - "loss": 1.7832, - "step": 8210 - }, - { - "epoch": 0.26304, - "grad_norm": 50.76215362548828, - "learning_rate": 1.929955555555556e-05, - "loss": 1.8191, - "step": 8220 - }, - { - "epoch": 0.26336, - "grad_norm": 47.865840911865234, - "learning_rate": 1.9296000000000003e-05, - "loss": 1.7964, - "step": 8230 - }, - { - "epoch": 0.26368, - "grad_norm": 48.87041473388672, - "learning_rate": 1.9292444444444448e-05, - "loss": 1.763, - "step": 8240 - }, - { - "epoch": 0.264, - "grad_norm": 49.547462463378906, - "learning_rate": 1.928888888888889e-05, - "loss": 1.8002, - "step": 8250 - }, - { - "epoch": 0.26432, - "grad_norm": 48.55093002319336, - "learning_rate": 1.9285333333333337e-05, - "loss": 1.7796, - "step": 8260 - }, - { - "epoch": 0.26464, - "grad_norm": 48.40106964111328, - "learning_rate": 1.9281777777777778e-05, - "loss": 1.8064, - "step": 8270 - }, - { - "epoch": 0.26496, - "grad_norm": 47.861427307128906, - "learning_rate": 1.9278222222222226e-05, - "loss": 1.7662, - "step": 8280 - }, - { - "epoch": 0.26528, - "grad_norm": 47.32961654663086, - "learning_rate": 1.9274666666666667e-05, - "loss": 1.7934, - "step": 8290 - }, - { - "epoch": 0.2656, - "grad_norm": 48.121803283691406, - "learning_rate": 1.9271111111111115e-05, - "loss": 1.7799, - "step": 8300 - }, - { - "epoch": 0.26592, - "grad_norm": 46.65339279174805, - "learning_rate": 1.9267555555555556e-05, - "loss": 1.75, - "step": 8310 - }, - { - "epoch": 0.26624, - "grad_norm": 47.114437103271484, - "learning_rate": 1.9264e-05, - "loss": 1.7605, - "step": 8320 - }, - { - "epoch": 0.26656, - "grad_norm": 49.223777770996094, - "learning_rate": 1.9260444444444445e-05, - "loss": 1.7603, - "step": 8330 - }, - { - "epoch": 0.26688, - "grad_norm": 47.0412483215332, - "learning_rate": 1.925688888888889e-05, - "loss": 1.7874, - "step": 8340 - }, - { - "epoch": 0.2672, - "grad_norm": 45.218345642089844, - "learning_rate": 1.9253333333333334e-05, - "loss": 1.7851, - "step": 8350 - }, - { - "epoch": 0.26752, - "grad_norm": 48.539791107177734, - "learning_rate": 1.924977777777778e-05, - "loss": 1.7917, - "step": 8360 - }, - { - "epoch": 0.26784, - "grad_norm": 48.42888641357422, - "learning_rate": 1.9246222222222223e-05, - "loss": 1.7962, - "step": 8370 - }, - { - "epoch": 0.26816, - "grad_norm": 48.56917953491211, - "learning_rate": 1.9242666666666668e-05, - "loss": 1.7911, - "step": 8380 - }, - { - "epoch": 0.26848, - "grad_norm": 47.5833625793457, - "learning_rate": 1.9239111111111112e-05, - "loss": 1.7804, - "step": 8390 - }, - { - "epoch": 0.2688, - "grad_norm": 48.058265686035156, - "learning_rate": 1.9235555555555557e-05, - "loss": 1.7782, - "step": 8400 - }, - { - "epoch": 0.26912, - "grad_norm": 48.20596694946289, - "learning_rate": 1.9232e-05, - "loss": 1.78, - "step": 8410 - }, - { - "epoch": 0.26944, - "grad_norm": 48.306278228759766, - "learning_rate": 1.9228444444444446e-05, - "loss": 1.8114, - "step": 8420 - }, - { - "epoch": 0.26976, - "grad_norm": 47.50426483154297, - "learning_rate": 1.922488888888889e-05, - "loss": 1.7793, - "step": 8430 - }, - { - "epoch": 0.27008, - "grad_norm": 46.97312545776367, - "learning_rate": 1.9221333333333335e-05, - "loss": 1.7618, - "step": 8440 - }, - { - "epoch": 0.2704, - "grad_norm": 46.43683624267578, - "learning_rate": 1.921777777777778e-05, - "loss": 1.7896, - "step": 8450 - }, - { - "epoch": 0.27072, - "grad_norm": 46.32859802246094, - "learning_rate": 1.9214222222222224e-05, - "loss": 1.8147, - "step": 8460 - }, - { - "epoch": 0.27104, - "grad_norm": 51.10436248779297, - "learning_rate": 1.921066666666667e-05, - "loss": 1.8001, - "step": 8470 - }, - { - "epoch": 0.27136, - "grad_norm": 47.70284652709961, - "learning_rate": 1.9207111111111113e-05, - "loss": 1.7932, - "step": 8480 - }, - { - "epoch": 0.27168, - "grad_norm": 47.11650848388672, - "learning_rate": 1.9203555555555558e-05, - "loss": 1.8136, - "step": 8490 - }, - { - "epoch": 0.272, - "grad_norm": 47.973411560058594, - "learning_rate": 1.9200000000000003e-05, - "loss": 1.7823, - "step": 8500 - }, - { - "epoch": 0.27232, - "grad_norm": 48.01387023925781, - "learning_rate": 1.9196444444444447e-05, - "loss": 1.7939, - "step": 8510 - }, - { - "epoch": 0.27264, - "grad_norm": 49.093467712402344, - "learning_rate": 1.9192888888888888e-05, - "loss": 1.7999, - "step": 8520 - }, - { - "epoch": 0.27296, - "grad_norm": 48.86354446411133, - "learning_rate": 1.9189333333333336e-05, - "loss": 1.7981, - "step": 8530 - }, - { - "epoch": 0.27328, - "grad_norm": 47.82622146606445, - "learning_rate": 1.9185777777777777e-05, - "loss": 1.8161, - "step": 8540 - }, - { - "epoch": 0.2736, - "grad_norm": 47.5350456237793, - "learning_rate": 1.9182222222222225e-05, - "loss": 1.7884, - "step": 8550 - }, - { - "epoch": 0.27392, - "grad_norm": 46.69621276855469, - "learning_rate": 1.9178666666666666e-05, - "loss": 1.7977, - "step": 8560 - }, - { - "epoch": 0.27424, - "grad_norm": 47.91357421875, - "learning_rate": 1.9175111111111114e-05, - "loss": 1.7925, - "step": 8570 - }, - { - "epoch": 0.27456, - "grad_norm": 47.39646530151367, - "learning_rate": 1.9171555555555556e-05, - "loss": 1.8325, - "step": 8580 - }, - { - "epoch": 0.27488, - "grad_norm": 48.67210006713867, - "learning_rate": 1.9168000000000004e-05, - "loss": 1.7686, - "step": 8590 - }, - { - "epoch": 0.2752, - "grad_norm": 47.34749984741211, - "learning_rate": 1.9164444444444445e-05, - "loss": 1.7635, - "step": 8600 - }, - { - "epoch": 0.27552, - "grad_norm": 46.572601318359375, - "learning_rate": 1.9160888888888893e-05, - "loss": 1.8069, - "step": 8610 - }, - { - "epoch": 0.27584, - "grad_norm": 47.41379928588867, - "learning_rate": 1.9157333333333334e-05, - "loss": 1.8034, - "step": 8620 - }, - { - "epoch": 0.27616, - "grad_norm": 49.406028747558594, - "learning_rate": 1.915377777777778e-05, - "loss": 1.7773, - "step": 8630 - }, - { - "epoch": 0.27648, - "grad_norm": 46.770076751708984, - "learning_rate": 1.9150222222222223e-05, - "loss": 1.7657, - "step": 8640 - }, - { - "epoch": 0.2768, - "grad_norm": 46.42891311645508, - "learning_rate": 1.9146666666666667e-05, - "loss": 1.7853, - "step": 8650 - }, - { - "epoch": 0.27712, - "grad_norm": 48.82978057861328, - "learning_rate": 1.9143111111111112e-05, - "loss": 1.761, - "step": 8660 - }, - { - "epoch": 0.27744, - "grad_norm": 54.949058532714844, - "learning_rate": 1.9139555555555557e-05, - "loss": 1.8219, - "step": 8670 - }, - { - "epoch": 0.27776, - "grad_norm": 49.66971969604492, - "learning_rate": 1.9136e-05, - "loss": 1.7843, - "step": 8680 - }, - { - "epoch": 0.27808, - "grad_norm": 51.274681091308594, - "learning_rate": 1.9132444444444446e-05, - "loss": 1.7775, - "step": 8690 - }, - { - "epoch": 0.2784, - "grad_norm": 52.11601638793945, - "learning_rate": 1.912888888888889e-05, - "loss": 1.7627, - "step": 8700 - }, - { - "epoch": 0.27872, - "grad_norm": 46.86934280395508, - "learning_rate": 1.9125333333333335e-05, - "loss": 1.7629, - "step": 8710 - }, - { - "epoch": 0.27904, - "grad_norm": 48.512325286865234, - "learning_rate": 1.912177777777778e-05, - "loss": 1.7916, - "step": 8720 - }, - { - "epoch": 0.27936, - "grad_norm": 46.8431510925293, - "learning_rate": 1.9118222222222224e-05, - "loss": 1.8014, - "step": 8730 - }, - { - "epoch": 0.27968, - "grad_norm": 46.54253387451172, - "learning_rate": 1.911466666666667e-05, - "loss": 1.7705, - "step": 8740 - }, - { - "epoch": 0.28, - "grad_norm": 48.69770812988281, - "learning_rate": 1.9111111111111113e-05, - "loss": 1.7871, - "step": 8750 - }, - { - "epoch": 0.28032, - "grad_norm": 47.79988098144531, - "learning_rate": 1.9107555555555558e-05, - "loss": 1.7642, - "step": 8760 - }, - { - "epoch": 0.28064, - "grad_norm": 47.7818489074707, - "learning_rate": 1.9104000000000002e-05, - "loss": 1.8089, - "step": 8770 - }, - { - "epoch": 0.28096, - "grad_norm": 46.9085807800293, - "learning_rate": 1.9100444444444447e-05, - "loss": 1.8113, - "step": 8780 - }, - { - "epoch": 0.28128, - "grad_norm": 51.09097671508789, - "learning_rate": 1.909688888888889e-05, - "loss": 1.7796, - "step": 8790 - }, - { - "epoch": 0.2816, - "grad_norm": 45.09664535522461, - "learning_rate": 1.9093333333333336e-05, - "loss": 1.7894, - "step": 8800 - }, - { - "epoch": 0.28192, - "grad_norm": 46.47522735595703, - "learning_rate": 1.908977777777778e-05, - "loss": 1.7885, - "step": 8810 - }, - { - "epoch": 0.28224, - "grad_norm": 46.489280700683594, - "learning_rate": 1.9086222222222225e-05, - "loss": 1.7831, - "step": 8820 - }, - { - "epoch": 0.28256, - "grad_norm": 46.805702209472656, - "learning_rate": 1.908266666666667e-05, - "loss": 1.7966, - "step": 8830 - }, - { - "epoch": 0.28288, - "grad_norm": 48.01674270629883, - "learning_rate": 1.9079111111111114e-05, - "loss": 1.7632, - "step": 8840 - }, - { - "epoch": 0.2832, - "grad_norm": 48.38603973388672, - "learning_rate": 1.9075555555555555e-05, - "loss": 1.7791, - "step": 8850 - }, - { - "epoch": 0.28352, - "grad_norm": 48.47395706176758, - "learning_rate": 1.9072000000000003e-05, - "loss": 1.8077, - "step": 8860 - }, - { - "epoch": 0.28384, - "grad_norm": 48.59999465942383, - "learning_rate": 1.9068444444444444e-05, - "loss": 1.769, - "step": 8870 - }, - { - "epoch": 0.28416, - "grad_norm": 45.61606979370117, - "learning_rate": 1.9064888888888892e-05, - "loss": 1.7588, - "step": 8880 - }, - { - "epoch": 0.28448, - "grad_norm": 47.03929901123047, - "learning_rate": 1.9061333333333333e-05, - "loss": 1.7967, - "step": 8890 - }, - { - "epoch": 0.2848, - "grad_norm": 50.322166442871094, - "learning_rate": 1.9057777777777778e-05, - "loss": 1.7673, - "step": 8900 - }, - { - "epoch": 0.28512, - "grad_norm": 47.95815658569336, - "learning_rate": 1.9054222222222222e-05, - "loss": 1.8022, - "step": 8910 - }, - { - "epoch": 0.28544, - "grad_norm": 46.3283576965332, - "learning_rate": 1.9050666666666667e-05, - "loss": 1.7782, - "step": 8920 - }, - { - "epoch": 0.28576, - "grad_norm": 50.623783111572266, - "learning_rate": 1.904711111111111e-05, - "loss": 1.7955, - "step": 8930 - }, - { - "epoch": 0.28608, - "grad_norm": 47.69876480102539, - "learning_rate": 1.9043555555555556e-05, - "loss": 1.7749, - "step": 8940 - }, - { - "epoch": 0.2864, - "grad_norm": 46.81853103637695, - "learning_rate": 1.904e-05, - "loss": 1.8274, - "step": 8950 - }, - { - "epoch": 0.28672, - "grad_norm": 46.980613708496094, - "learning_rate": 1.9036444444444445e-05, - "loss": 1.7823, - "step": 8960 - }, - { - "epoch": 0.28704, - "grad_norm": 49.283103942871094, - "learning_rate": 1.903288888888889e-05, - "loss": 1.7414, - "step": 8970 - }, - { - "epoch": 0.28736, - "grad_norm": 48.2234001159668, - "learning_rate": 1.9029333333333334e-05, - "loss": 1.7708, - "step": 8980 - }, - { - "epoch": 0.28768, - "grad_norm": 47.294795989990234, - "learning_rate": 1.902577777777778e-05, - "loss": 1.8338, - "step": 8990 - }, - { - "epoch": 0.288, - "grad_norm": 48.731834411621094, - "learning_rate": 1.9022222222222223e-05, - "loss": 1.7925, - "step": 9000 - }, - { - "epoch": 0.28832, - "grad_norm": 49.90666198730469, - "learning_rate": 1.9018666666666668e-05, - "loss": 1.8027, - "step": 9010 - }, - { - "epoch": 0.28864, - "grad_norm": 48.464569091796875, - "learning_rate": 1.9015111111111113e-05, - "loss": 1.7825, - "step": 9020 - }, - { - "epoch": 0.28896, - "grad_norm": 47.314823150634766, - "learning_rate": 1.9011555555555557e-05, - "loss": 1.7472, - "step": 9030 - }, - { - "epoch": 0.28928, - "grad_norm": 44.8106689453125, - "learning_rate": 1.9008e-05, - "loss": 1.7687, - "step": 9040 - }, - { - "epoch": 0.2896, - "grad_norm": 47.74726867675781, - "learning_rate": 1.9004444444444446e-05, - "loss": 1.7818, - "step": 9050 - }, - { - "epoch": 0.28992, - "grad_norm": 45.84540557861328, - "learning_rate": 1.900088888888889e-05, - "loss": 1.7905, - "step": 9060 - }, - { - "epoch": 0.29024, - "grad_norm": 46.0303840637207, - "learning_rate": 1.8997333333333335e-05, - "loss": 1.7701, - "step": 9070 - }, - { - "epoch": 0.29056, - "grad_norm": 47.5675048828125, - "learning_rate": 1.899377777777778e-05, - "loss": 1.7884, - "step": 9080 - }, - { - "epoch": 0.29088, - "grad_norm": 49.11726379394531, - "learning_rate": 1.8990222222222224e-05, - "loss": 1.7676, - "step": 9090 - }, - { - "epoch": 0.2912, - "grad_norm": 45.33092498779297, - "learning_rate": 1.898666666666667e-05, - "loss": 1.7959, - "step": 9100 - }, - { - "epoch": 0.29152, - "grad_norm": 45.65995407104492, - "learning_rate": 1.8983111111111114e-05, - "loss": 1.7636, - "step": 9110 - }, - { - "epoch": 0.29184, - "grad_norm": 46.20924758911133, - "learning_rate": 1.8979555555555558e-05, - "loss": 1.7979, - "step": 9120 - }, - { - "epoch": 0.29216, - "grad_norm": 45.72431945800781, - "learning_rate": 1.8976000000000003e-05, - "loss": 1.7564, - "step": 9130 - }, - { - "epoch": 0.29248, - "grad_norm": 46.04757308959961, - "learning_rate": 1.8972444444444447e-05, - "loss": 1.7588, - "step": 9140 - }, - { - "epoch": 0.2928, - "grad_norm": 45.486202239990234, - "learning_rate": 1.896888888888889e-05, - "loss": 1.7994, - "step": 9150 - }, - { - "epoch": 0.29312, - "grad_norm": 48.11737060546875, - "learning_rate": 1.8965333333333336e-05, - "loss": 1.7839, - "step": 9160 - }, - { - "epoch": 0.29344, - "grad_norm": 48.25556564331055, - "learning_rate": 1.8961777777777777e-05, - "loss": 1.7964, - "step": 9170 - }, - { - "epoch": 0.29376, - "grad_norm": 47.92764663696289, - "learning_rate": 1.8958222222222225e-05, - "loss": 1.759, - "step": 9180 - }, - { - "epoch": 0.29408, - "grad_norm": 48.09939956665039, - "learning_rate": 1.8954666666666667e-05, - "loss": 1.8148, - "step": 9190 - }, - { - "epoch": 0.2944, - "grad_norm": 45.122501373291016, - "learning_rate": 1.8951111111111115e-05, - "loss": 1.7707, - "step": 9200 - }, - { - "epoch": 0.29472, - "grad_norm": 47.23085403442383, - "learning_rate": 1.8947555555555556e-05, - "loss": 1.7575, - "step": 9210 - }, - { - "epoch": 0.29504, - "grad_norm": 47.04438018798828, - "learning_rate": 1.8944000000000004e-05, - "loss": 1.7709, - "step": 9220 - }, - { - "epoch": 0.29536, - "grad_norm": 49.08626174926758, - "learning_rate": 1.8940444444444445e-05, - "loss": 1.7826, - "step": 9230 - }, - { - "epoch": 0.29568, - "grad_norm": 45.1053581237793, - "learning_rate": 1.8936888888888893e-05, - "loss": 1.7963, - "step": 9240 - }, - { - "epoch": 0.296, - "grad_norm": 48.38051986694336, - "learning_rate": 1.8933333333333334e-05, - "loss": 1.7631, - "step": 9250 - }, - { - "epoch": 0.29632, - "grad_norm": 47.439598083496094, - "learning_rate": 1.8929777777777782e-05, - "loss": 1.7905, - "step": 9260 - }, - { - "epoch": 0.29664, - "grad_norm": 47.75306701660156, - "learning_rate": 1.8926222222222223e-05, - "loss": 1.7846, - "step": 9270 - }, - { - "epoch": 0.29696, - "grad_norm": 47.29454803466797, - "learning_rate": 1.8922666666666668e-05, - "loss": 1.7869, - "step": 9280 - }, - { - "epoch": 0.29728, - "grad_norm": 50.435028076171875, - "learning_rate": 1.8919111111111112e-05, - "loss": 1.7748, - "step": 9290 - }, - { - "epoch": 0.2976, - "grad_norm": 47.95176696777344, - "learning_rate": 1.8915555555555557e-05, - "loss": 1.766, - "step": 9300 - }, - { - "epoch": 0.29792, - "grad_norm": 48.74436950683594, - "learning_rate": 1.8912e-05, - "loss": 1.7644, - "step": 9310 - }, - { - "epoch": 0.29824, - "grad_norm": 47.629600524902344, - "learning_rate": 1.8908444444444446e-05, - "loss": 1.7657, - "step": 9320 - }, - { - "epoch": 0.29856, - "grad_norm": 48.885963439941406, - "learning_rate": 1.890488888888889e-05, - "loss": 1.7768, - "step": 9330 - }, - { - "epoch": 0.29888, - "grad_norm": 47.101810455322266, - "learning_rate": 1.8901333333333335e-05, - "loss": 1.7704, - "step": 9340 - }, - { - "epoch": 0.2992, - "grad_norm": 45.48361587524414, - "learning_rate": 1.889777777777778e-05, - "loss": 1.7875, - "step": 9350 - }, - { - "epoch": 0.29952, - "grad_norm": 48.26191329956055, - "learning_rate": 1.8894222222222224e-05, - "loss": 1.7986, - "step": 9360 - }, - { - "epoch": 0.29984, - "grad_norm": 47.485687255859375, - "learning_rate": 1.889066666666667e-05, - "loss": 1.7677, - "step": 9370 - }, - { - "epoch": 0.30016, - "grad_norm": 46.89303970336914, - "learning_rate": 1.8887111111111113e-05, - "loss": 1.7426, - "step": 9380 - }, - { - "epoch": 0.30048, - "grad_norm": 46.88542938232422, - "learning_rate": 1.8883555555555558e-05, - "loss": 1.7756, - "step": 9390 - }, - { - "epoch": 0.3008, - "grad_norm": 48.30686950683594, - "learning_rate": 1.8880000000000002e-05, - "loss": 1.8225, - "step": 9400 - }, - { - "epoch": 0.30112, - "grad_norm": 47.19614028930664, - "learning_rate": 1.8876444444444447e-05, - "loss": 1.7551, - "step": 9410 - }, - { - "epoch": 0.30144, - "grad_norm": 47.09248352050781, - "learning_rate": 1.8872888888888888e-05, - "loss": 1.7781, - "step": 9420 - }, - { - "epoch": 0.30176, - "grad_norm": 47.05224609375, - "learning_rate": 1.8869333333333336e-05, - "loss": 1.7903, - "step": 9430 - }, - { - "epoch": 0.30208, - "grad_norm": 47.02943420410156, - "learning_rate": 1.8865777777777777e-05, - "loss": 1.7767, - "step": 9440 - }, - { - "epoch": 0.3024, - "grad_norm": 47.527587890625, - "learning_rate": 1.8862222222222225e-05, - "loss": 1.7778, - "step": 9450 - }, - { - "epoch": 0.30272, - "grad_norm": 46.53438186645508, - "learning_rate": 1.8858666666666666e-05, - "loss": 1.7415, - "step": 9460 - }, - { - "epoch": 0.30304, - "grad_norm": 46.07378387451172, - "learning_rate": 1.8855111111111114e-05, - "loss": 1.7837, - "step": 9470 - }, - { - "epoch": 0.30336, - "grad_norm": 47.58318328857422, - "learning_rate": 1.8851555555555555e-05, - "loss": 1.7784, - "step": 9480 - }, - { - "epoch": 0.30368, - "grad_norm": 47.040992736816406, - "learning_rate": 1.8848000000000003e-05, - "loss": 1.7898, - "step": 9490 - }, - { - "epoch": 0.304, - "grad_norm": 47.662567138671875, - "learning_rate": 1.8844444444444444e-05, - "loss": 1.7556, - "step": 9500 - }, - { - "epoch": 0.30432, - "grad_norm": 47.28549575805664, - "learning_rate": 1.8840888888888892e-05, - "loss": 1.765, - "step": 9510 - }, - { - "epoch": 0.30464, - "grad_norm": 46.851097106933594, - "learning_rate": 1.8837333333333333e-05, - "loss": 1.7466, - "step": 9520 - }, - { - "epoch": 0.30496, - "grad_norm": 46.6473388671875, - "learning_rate": 1.883377777777778e-05, - "loss": 1.7977, - "step": 9530 - }, - { - "epoch": 0.30528, - "grad_norm": 48.394996643066406, - "learning_rate": 1.8830222222222223e-05, - "loss": 1.7194, - "step": 9540 - }, - { - "epoch": 0.3056, - "grad_norm": 45.25028610229492, - "learning_rate": 1.8826666666666667e-05, - "loss": 1.7798, - "step": 9550 - }, - { - "epoch": 0.30592, - "grad_norm": 47.76869583129883, - "learning_rate": 1.882311111111111e-05, - "loss": 1.7691, - "step": 9560 - }, - { - "epoch": 0.30624, - "grad_norm": 48.7626838684082, - "learning_rate": 1.8819555555555556e-05, - "loss": 1.771, - "step": 9570 - }, - { - "epoch": 0.30656, - "grad_norm": 46.44325256347656, - "learning_rate": 1.8816e-05, - "loss": 1.742, - "step": 9580 - }, - { - "epoch": 0.30688, - "grad_norm": 46.602508544921875, - "learning_rate": 1.8812444444444445e-05, - "loss": 1.7771, - "step": 9590 - }, - { - "epoch": 0.3072, - "grad_norm": 47.8487663269043, - "learning_rate": 1.880888888888889e-05, - "loss": 1.7564, - "step": 9600 - }, - { - "epoch": 0.30752, - "grad_norm": 47.99014663696289, - "learning_rate": 1.8805333333333334e-05, - "loss": 1.795, - "step": 9610 - }, - { - "epoch": 0.30784, - "grad_norm": 46.99567794799805, - "learning_rate": 1.880177777777778e-05, - "loss": 1.771, - "step": 9620 - }, - { - "epoch": 0.30816, - "grad_norm": 45.05099868774414, - "learning_rate": 1.8798222222222224e-05, - "loss": 1.7543, - "step": 9630 - }, - { - "epoch": 0.30848, - "grad_norm": 44.71341323852539, - "learning_rate": 1.8794666666666668e-05, - "loss": 1.7704, - "step": 9640 - }, - { - "epoch": 0.3088, - "grad_norm": 47.164066314697266, - "learning_rate": 1.8791111111111113e-05, - "loss": 1.7739, - "step": 9650 - }, - { - "epoch": 0.30912, - "grad_norm": 47.73215866088867, - "learning_rate": 1.8787555555555557e-05, - "loss": 1.7642, - "step": 9660 - }, - { - "epoch": 0.30944, - "grad_norm": 45.98970413208008, - "learning_rate": 1.8784000000000002e-05, - "loss": 1.7849, - "step": 9670 - }, - { - "epoch": 0.30976, - "grad_norm": 47.81766128540039, - "learning_rate": 1.8780444444444446e-05, - "loss": 1.7772, - "step": 9680 - }, - { - "epoch": 0.31008, - "grad_norm": 47.27102279663086, - "learning_rate": 1.877688888888889e-05, - "loss": 1.7741, - "step": 9690 - }, - { - "epoch": 0.3104, - "grad_norm": 45.84796142578125, - "learning_rate": 1.8773333333333335e-05, - "loss": 1.7645, - "step": 9700 - }, - { - "epoch": 0.31072, - "grad_norm": 49.074859619140625, - "learning_rate": 1.876977777777778e-05, - "loss": 1.777, - "step": 9710 - }, - { - "epoch": 0.31104, - "grad_norm": 46.83264923095703, - "learning_rate": 1.8766222222222225e-05, - "loss": 1.7312, - "step": 9720 - }, - { - "epoch": 0.31136, - "grad_norm": 48.32571792602539, - "learning_rate": 1.876266666666667e-05, - "loss": 1.7871, - "step": 9730 - }, - { - "epoch": 0.31168, - "grad_norm": 45.8250846862793, - "learning_rate": 1.8759111111111114e-05, - "loss": 1.7578, - "step": 9740 - }, - { - "epoch": 0.312, - "grad_norm": 45.85641860961914, - "learning_rate": 1.8755555555555558e-05, - "loss": 1.8071, - "step": 9750 - }, - { - "epoch": 0.31232, - "grad_norm": 48.53336715698242, - "learning_rate": 1.8752000000000003e-05, - "loss": 1.7663, - "step": 9760 - }, - { - "epoch": 0.31264, - "grad_norm": 47.985836029052734, - "learning_rate": 1.8748444444444447e-05, - "loss": 1.7587, - "step": 9770 - }, - { - "epoch": 0.31296, - "grad_norm": 46.00651550292969, - "learning_rate": 1.8744888888888892e-05, - "loss": 1.7713, - "step": 9780 - }, - { - "epoch": 0.31328, - "grad_norm": 44.8219108581543, - "learning_rate": 1.8741333333333336e-05, - "loss": 1.8027, - "step": 9790 - }, - { - "epoch": 0.3136, - "grad_norm": 48.181697845458984, - "learning_rate": 1.8737777777777778e-05, - "loss": 1.7904, - "step": 9800 - }, - { - "epoch": 0.31392, - "grad_norm": 46.20647430419922, - "learning_rate": 1.8734222222222226e-05, - "loss": 1.7507, - "step": 9810 - }, - { - "epoch": 0.31424, - "grad_norm": 46.36750793457031, - "learning_rate": 1.8730666666666667e-05, - "loss": 1.7458, - "step": 9820 - }, - { - "epoch": 0.31456, - "grad_norm": 48.03936004638672, - "learning_rate": 1.8727111111111115e-05, - "loss": 1.7935, - "step": 9830 - }, - { - "epoch": 0.31488, - "grad_norm": 47.40336990356445, - "learning_rate": 1.8723555555555556e-05, - "loss": 1.7613, - "step": 9840 - }, - { - "epoch": 0.3152, - "grad_norm": 44.89904022216797, - "learning_rate": 1.8720000000000004e-05, - "loss": 1.7486, - "step": 9850 - }, - { - "epoch": 0.31552, - "grad_norm": 46.88365936279297, - "learning_rate": 1.8716444444444445e-05, - "loss": 1.7897, - "step": 9860 - }, - { - "epoch": 0.31584, - "grad_norm": 45.93436050415039, - "learning_rate": 1.871288888888889e-05, - "loss": 1.7787, - "step": 9870 - }, - { - "epoch": 0.31616, - "grad_norm": 45.79889678955078, - "learning_rate": 1.8709333333333334e-05, - "loss": 1.7857, - "step": 9880 - }, - { - "epoch": 0.31648, - "grad_norm": 48.29885482788086, - "learning_rate": 1.870577777777778e-05, - "loss": 1.782, - "step": 9890 - }, - { - "epoch": 0.3168, - "grad_norm": 46.49193572998047, - "learning_rate": 1.8702222222222223e-05, - "loss": 1.7554, - "step": 9900 - }, - { - "epoch": 0.31712, - "grad_norm": 44.27806091308594, - "learning_rate": 1.8698666666666668e-05, - "loss": 1.773, - "step": 9910 - }, - { - "epoch": 0.31744, - "grad_norm": 48.690162658691406, - "learning_rate": 1.8695111111111112e-05, - "loss": 1.7727, - "step": 9920 - }, - { - "epoch": 0.31776, - "grad_norm": 47.276405334472656, - "learning_rate": 1.8691555555555557e-05, - "loss": 1.7563, - "step": 9930 - }, - { - "epoch": 0.31808, - "grad_norm": 48.51708221435547, - "learning_rate": 1.8688e-05, - "loss": 1.7822, - "step": 9940 - }, - { - "epoch": 0.3184, - "grad_norm": 48.64371109008789, - "learning_rate": 1.8684444444444446e-05, - "loss": 1.7525, - "step": 9950 - }, - { - "epoch": 0.31872, - "grad_norm": 45.173187255859375, - "learning_rate": 1.868088888888889e-05, - "loss": 1.7504, - "step": 9960 - }, - { - "epoch": 0.31904, - "grad_norm": 47.1632194519043, - "learning_rate": 1.8677333333333335e-05, - "loss": 1.784, - "step": 9970 - }, - { - "epoch": 0.31936, - "grad_norm": 46.15009307861328, - "learning_rate": 1.867377777777778e-05, - "loss": 1.7506, - "step": 9980 - }, - { - "epoch": 0.31968, - "grad_norm": 46.347412109375, - "learning_rate": 1.8670222222222224e-05, - "loss": 1.7905, - "step": 9990 - }, - { - "epoch": 0.32, - "grad_norm": 49.578147888183594, - "learning_rate": 1.866666666666667e-05, - "loss": 1.7491, - "step": 10000 - }, - { - "epoch": 0.32032, - "grad_norm": 48.711246490478516, - "learning_rate": 1.8663111111111113e-05, - "loss": 1.7571, - "step": 10010 - }, - { - "epoch": 0.32064, - "grad_norm": 47.9279899597168, - "learning_rate": 1.8659555555555558e-05, - "loss": 1.764, - "step": 10020 - }, - { - "epoch": 0.32096, - "grad_norm": 47.64333724975586, - "learning_rate": 1.8656000000000002e-05, - "loss": 1.7664, - "step": 10030 - }, - { - "epoch": 0.32128, - "grad_norm": 46.161312103271484, - "learning_rate": 1.8652444444444447e-05, - "loss": 1.762, - "step": 10040 - }, - { - "epoch": 0.3216, - "grad_norm": 46.62035369873047, - "learning_rate": 1.8648888888888888e-05, - "loss": 1.7595, - "step": 10050 - }, - { - "epoch": 0.32192, - "grad_norm": 46.48634338378906, - "learning_rate": 1.8645333333333336e-05, - "loss": 1.7663, - "step": 10060 - }, - { - "epoch": 0.32224, - "grad_norm": 44.407623291015625, - "learning_rate": 1.8641777777777777e-05, - "loss": 1.7764, - "step": 10070 - }, - { - "epoch": 0.32256, - "grad_norm": 47.40081787109375, - "learning_rate": 1.8638222222222225e-05, - "loss": 1.7533, - "step": 10080 - }, - { - "epoch": 0.32288, - "grad_norm": 44.79414749145508, - "learning_rate": 1.8634666666666666e-05, - "loss": 1.7668, - "step": 10090 - }, - { - "epoch": 0.3232, - "grad_norm": 49.381526947021484, - "learning_rate": 1.8631111111111114e-05, - "loss": 1.7661, - "step": 10100 - }, - { - "epoch": 0.32352, - "grad_norm": 46.1864128112793, - "learning_rate": 1.8627555555555555e-05, - "loss": 1.7585, - "step": 10110 - }, - { - "epoch": 0.32384, - "grad_norm": 44.101497650146484, - "learning_rate": 1.8624000000000003e-05, - "loss": 1.7463, - "step": 10120 - }, - { - "epoch": 0.32416, - "grad_norm": 48.681636810302734, - "learning_rate": 1.8620444444444444e-05, - "loss": 1.7981, - "step": 10130 - }, - { - "epoch": 0.32448, - "grad_norm": 46.09796142578125, - "learning_rate": 1.8616888888888892e-05, - "loss": 1.7711, - "step": 10140 - }, - { - "epoch": 0.3248, - "grad_norm": 45.78500747680664, - "learning_rate": 1.8613333333333334e-05, - "loss": 1.7613, - "step": 10150 - }, - { - "epoch": 0.32512, - "grad_norm": 46.612937927246094, - "learning_rate": 1.860977777777778e-05, - "loss": 1.7646, - "step": 10160 - }, - { - "epoch": 0.32544, - "grad_norm": 46.29045867919922, - "learning_rate": 1.8606222222222223e-05, - "loss": 1.7676, - "step": 10170 - }, - { - "epoch": 0.32576, - "grad_norm": 47.204795837402344, - "learning_rate": 1.8602666666666667e-05, - "loss": 1.7667, - "step": 10180 - }, - { - "epoch": 0.32608, - "grad_norm": 49.51637268066406, - "learning_rate": 1.8599111111111112e-05, - "loss": 1.7439, - "step": 10190 - }, - { - "epoch": 0.3264, - "grad_norm": 46.27983856201172, - "learning_rate": 1.8595555555555556e-05, - "loss": 1.7464, - "step": 10200 - }, - { - "epoch": 0.32672, - "grad_norm": 50.15081024169922, - "learning_rate": 1.8592e-05, - "loss": 1.737, - "step": 10210 - }, - { - "epoch": 0.32704, - "grad_norm": 48.33657455444336, - "learning_rate": 1.8588444444444445e-05, - "loss": 1.7338, - "step": 10220 - }, - { - "epoch": 0.32736, - "grad_norm": 45.548072814941406, - "learning_rate": 1.858488888888889e-05, - "loss": 1.7796, - "step": 10230 - }, - { - "epoch": 0.32768, - "grad_norm": 46.754878997802734, - "learning_rate": 1.8581333333333335e-05, - "loss": 1.7664, - "step": 10240 - }, - { - "epoch": 0.328, - "grad_norm": 47.31142807006836, - "learning_rate": 1.857777777777778e-05, - "loss": 1.7729, - "step": 10250 - }, - { - "epoch": 0.32832, - "grad_norm": 48.030948638916016, - "learning_rate": 1.8574222222222224e-05, - "loss": 1.7665, - "step": 10260 - }, - { - "epoch": 0.32864, - "grad_norm": 47.46772003173828, - "learning_rate": 1.8570666666666668e-05, - "loss": 1.731, - "step": 10270 - }, - { - "epoch": 0.32896, - "grad_norm": 46.123939514160156, - "learning_rate": 1.8567111111111113e-05, - "loss": 1.7825, - "step": 10280 - }, - { - "epoch": 0.32928, - "grad_norm": 46.65377426147461, - "learning_rate": 1.8563555555555557e-05, - "loss": 1.7956, - "step": 10290 - }, - { - "epoch": 0.3296, - "grad_norm": 51.14271926879883, - "learning_rate": 1.8560000000000002e-05, - "loss": 1.7583, - "step": 10300 - }, - { - "epoch": 0.32992, - "grad_norm": 45.32281494140625, - "learning_rate": 1.8556444444444446e-05, - "loss": 1.7874, - "step": 10310 - }, - { - "epoch": 0.33024, - "grad_norm": 45.66364669799805, - "learning_rate": 1.855288888888889e-05, - "loss": 1.7917, - "step": 10320 - }, - { - "epoch": 0.33056, - "grad_norm": 47.25345993041992, - "learning_rate": 1.8549333333333336e-05, - "loss": 1.7249, - "step": 10330 - }, - { - "epoch": 0.33088, - "grad_norm": 45.0384635925293, - "learning_rate": 1.854577777777778e-05, - "loss": 1.7708, - "step": 10340 - }, - { - "epoch": 0.3312, - "grad_norm": 47.916831970214844, - "learning_rate": 1.8542222222222225e-05, - "loss": 1.7726, - "step": 10350 - }, - { - "epoch": 0.33152, - "grad_norm": 45.50624084472656, - "learning_rate": 1.853866666666667e-05, - "loss": 1.7704, - "step": 10360 - }, - { - "epoch": 0.33184, - "grad_norm": 49.995033264160156, - "learning_rate": 1.8535111111111114e-05, - "loss": 1.7448, - "step": 10370 - }, - { - "epoch": 0.33216, - "grad_norm": 43.8958625793457, - "learning_rate": 1.8531555555555555e-05, - "loss": 1.7621, - "step": 10380 - }, - { - "epoch": 0.33248, - "grad_norm": 47.6920280456543, - "learning_rate": 1.8528000000000003e-05, - "loss": 1.7757, - "step": 10390 - }, - { - "epoch": 0.3328, - "grad_norm": 46.501773834228516, - "learning_rate": 1.8524444444444444e-05, - "loss": 1.757, - "step": 10400 - }, - { - "epoch": 0.33312, - "grad_norm": 49.876014709472656, - "learning_rate": 1.8520888888888892e-05, - "loss": 1.7676, - "step": 10410 - }, - { - "epoch": 0.33344, - "grad_norm": 45.7028694152832, - "learning_rate": 1.8517333333333333e-05, - "loss": 1.7685, - "step": 10420 - }, - { - "epoch": 0.33376, - "grad_norm": 47.121124267578125, - "learning_rate": 1.851377777777778e-05, - "loss": 1.7713, - "step": 10430 - }, - { - "epoch": 0.33408, - "grad_norm": 43.50001525878906, - "learning_rate": 1.8510222222222222e-05, - "loss": 1.7629, - "step": 10440 - }, - { - "epoch": 0.3344, - "grad_norm": 45.10226821899414, - "learning_rate": 1.8506666666666667e-05, - "loss": 1.7892, - "step": 10450 - }, - { - "epoch": 0.33472, - "grad_norm": 45.64974594116211, - "learning_rate": 1.850311111111111e-05, - "loss": 1.7734, - "step": 10460 - }, - { - "epoch": 0.33504, - "grad_norm": 48.50352096557617, - "learning_rate": 1.8499555555555556e-05, - "loss": 1.7701, - "step": 10470 - }, - { - "epoch": 0.33536, - "grad_norm": 47.27094268798828, - "learning_rate": 1.8496e-05, - "loss": 1.7888, - "step": 10480 - }, - { - "epoch": 0.33568, - "grad_norm": 44.90556335449219, - "learning_rate": 1.8492444444444445e-05, - "loss": 1.7535, - "step": 10490 - }, - { - "epoch": 0.336, - "grad_norm": 47.575096130371094, - "learning_rate": 1.848888888888889e-05, - "loss": 1.7657, - "step": 10500 - }, - { - "epoch": 0.33632, - "grad_norm": 49.730674743652344, - "learning_rate": 1.8485333333333334e-05, - "loss": 1.7859, - "step": 10510 - }, - { - "epoch": 0.33664, - "grad_norm": 45.4715690612793, - "learning_rate": 1.848177777777778e-05, - "loss": 1.7721, - "step": 10520 - }, - { - "epoch": 0.33696, - "grad_norm": 45.48630905151367, - "learning_rate": 1.8478222222222223e-05, - "loss": 1.7347, - "step": 10530 - }, - { - "epoch": 0.33728, - "grad_norm": 46.90533447265625, - "learning_rate": 1.8474666666666668e-05, - "loss": 1.7278, - "step": 10540 - }, - { - "epoch": 0.3376, - "grad_norm": 47.98374557495117, - "learning_rate": 1.8471111111111112e-05, - "loss": 1.7522, - "step": 10550 - }, - { - "epoch": 0.33792, - "grad_norm": 45.95107650756836, - "learning_rate": 1.8467555555555557e-05, - "loss": 1.7476, - "step": 10560 - }, - { - "epoch": 0.33824, - "grad_norm": 47.6331672668457, - "learning_rate": 1.8464e-05, - "loss": 1.7487, - "step": 10570 - }, - { - "epoch": 0.33856, - "grad_norm": 44.778446197509766, - "learning_rate": 1.8460444444444446e-05, - "loss": 1.7472, - "step": 10580 - }, - { - "epoch": 0.33888, - "grad_norm": 47.06455993652344, - "learning_rate": 1.845688888888889e-05, - "loss": 1.7799, - "step": 10590 - }, - { - "epoch": 0.3392, - "grad_norm": 47.35928726196289, - "learning_rate": 1.8453333333333335e-05, - "loss": 1.7418, - "step": 10600 - }, - { - "epoch": 0.33952, - "grad_norm": 44.61867141723633, - "learning_rate": 1.844977777777778e-05, - "loss": 1.7388, - "step": 10610 - }, - { - "epoch": 0.33984, - "grad_norm": 45.826133728027344, - "learning_rate": 1.8446222222222224e-05, - "loss": 1.7485, - "step": 10620 - }, - { - "epoch": 0.34016, - "grad_norm": 48.199649810791016, - "learning_rate": 1.844266666666667e-05, - "loss": 1.7742, - "step": 10630 - }, - { - "epoch": 0.34048, - "grad_norm": 48.93040466308594, - "learning_rate": 1.8439111111111113e-05, - "loss": 1.7667, - "step": 10640 - }, - { - "epoch": 0.3408, - "grad_norm": 46.984500885009766, - "learning_rate": 1.8435555555555558e-05, - "loss": 1.7965, - "step": 10650 - }, - { - "epoch": 0.34112, - "grad_norm": 48.803184509277344, - "learning_rate": 1.8432000000000002e-05, - "loss": 1.7528, - "step": 10660 - }, - { - "epoch": 0.34144, - "grad_norm": 47.56668472290039, - "learning_rate": 1.8428444444444447e-05, - "loss": 1.7542, - "step": 10670 - }, - { - "epoch": 0.34176, - "grad_norm": 47.09225082397461, - "learning_rate": 1.842488888888889e-05, - "loss": 1.7825, - "step": 10680 - }, - { - "epoch": 0.34208, - "grad_norm": 47.98640060424805, - "learning_rate": 1.8421333333333336e-05, - "loss": 1.7399, - "step": 10690 - }, - { - "epoch": 0.3424, - "grad_norm": 45.85944366455078, - "learning_rate": 1.8417777777777777e-05, - "loss": 1.7566, - "step": 10700 - }, - { - "epoch": 0.34272, - "grad_norm": 47.46086120605469, - "learning_rate": 1.8414222222222225e-05, - "loss": 1.7667, - "step": 10710 - }, - { - "epoch": 0.34304, - "grad_norm": 46.52635192871094, - "learning_rate": 1.8410666666666666e-05, - "loss": 1.7813, - "step": 10720 - }, - { - "epoch": 0.34336, - "grad_norm": 44.20283126831055, - "learning_rate": 1.8407111111111114e-05, - "loss": 1.7366, - "step": 10730 - }, - { - "epoch": 0.34368, - "grad_norm": 47.95680618286133, - "learning_rate": 1.8403555555555555e-05, - "loss": 1.7492, - "step": 10740 - }, - { - "epoch": 0.344, - "grad_norm": 44.47868728637695, - "learning_rate": 1.8400000000000003e-05, - "loss": 1.7709, - "step": 10750 - }, - { - "epoch": 0.34432, - "grad_norm": 45.7765998840332, - "learning_rate": 1.8396444444444445e-05, - "loss": 1.7466, - "step": 10760 - }, - { - "epoch": 0.34464, - "grad_norm": 49.115234375, - "learning_rate": 1.8392888888888893e-05, - "loss": 1.7933, - "step": 10770 - }, - { - "epoch": 0.34496, - "grad_norm": 45.5890007019043, - "learning_rate": 1.8389333333333334e-05, - "loss": 1.7436, - "step": 10780 - }, - { - "epoch": 0.34528, - "grad_norm": 46.42338562011719, - "learning_rate": 1.838577777777778e-05, - "loss": 1.7747, - "step": 10790 - }, - { - "epoch": 0.3456, - "grad_norm": 45.16661071777344, - "learning_rate": 1.8382222222222223e-05, - "loss": 1.7467, - "step": 10800 - }, - { - "epoch": 0.34592, - "grad_norm": 45.05027770996094, - "learning_rate": 1.837866666666667e-05, - "loss": 1.7571, - "step": 10810 - }, - { - "epoch": 0.34624, - "grad_norm": 44.80595397949219, - "learning_rate": 1.8375111111111112e-05, - "loss": 1.7643, - "step": 10820 - }, - { - "epoch": 0.34656, - "grad_norm": 44.93939971923828, - "learning_rate": 1.8371555555555556e-05, - "loss": 1.7336, - "step": 10830 - }, - { - "epoch": 0.34688, - "grad_norm": 46.136112213134766, - "learning_rate": 1.8368e-05, - "loss": 1.7521, - "step": 10840 - }, - { - "epoch": 0.3472, - "grad_norm": 45.64118957519531, - "learning_rate": 1.8364444444444446e-05, - "loss": 1.7721, - "step": 10850 - }, - { - "epoch": 0.34752, - "grad_norm": 47.01308822631836, - "learning_rate": 1.836088888888889e-05, - "loss": 1.7538, - "step": 10860 - }, - { - "epoch": 0.34784, - "grad_norm": 48.4968147277832, - "learning_rate": 1.8357333333333335e-05, - "loss": 1.7721, - "step": 10870 - }, - { - "epoch": 0.34816, - "grad_norm": 45.39475631713867, - "learning_rate": 1.835377777777778e-05, - "loss": 1.7198, - "step": 10880 - }, - { - "epoch": 0.34848, - "grad_norm": 48.02215576171875, - "learning_rate": 1.8350222222222224e-05, - "loss": 1.7364, - "step": 10890 - }, - { - "epoch": 0.3488, - "grad_norm": 45.59247970581055, - "learning_rate": 1.834666666666667e-05, - "loss": 1.7428, - "step": 10900 - }, - { - "epoch": 0.34912, - "grad_norm": 46.79558181762695, - "learning_rate": 1.8343111111111113e-05, - "loss": 1.7639, - "step": 10910 - }, - { - "epoch": 0.34944, - "grad_norm": 45.718727111816406, - "learning_rate": 1.8339555555555557e-05, - "loss": 1.7313, - "step": 10920 - }, - { - "epoch": 0.34976, - "grad_norm": 48.2978630065918, - "learning_rate": 1.8336000000000002e-05, - "loss": 1.7908, - "step": 10930 - }, - { - "epoch": 0.35008, - "grad_norm": 46.27851104736328, - "learning_rate": 1.8332444444444447e-05, - "loss": 1.7485, - "step": 10940 - }, - { - "epoch": 0.3504, - "grad_norm": 46.8973274230957, - "learning_rate": 1.832888888888889e-05, - "loss": 1.7244, - "step": 10950 - }, - { - "epoch": 0.35072, - "grad_norm": 45.576988220214844, - "learning_rate": 1.8325333333333336e-05, - "loss": 1.7575, - "step": 10960 - }, - { - "epoch": 0.35104, - "grad_norm": 45.916908264160156, - "learning_rate": 1.8321777777777777e-05, - "loss": 1.722, - "step": 10970 - }, - { - "epoch": 0.35136, - "grad_norm": 44.97768783569336, - "learning_rate": 1.8318222222222225e-05, - "loss": 1.7588, - "step": 10980 - }, - { - "epoch": 0.35168, - "grad_norm": 45.55659484863281, - "learning_rate": 1.8314666666666666e-05, - "loss": 1.7565, - "step": 10990 - }, - { - "epoch": 0.352, - "grad_norm": 45.32796096801758, - "learning_rate": 1.8311111111111114e-05, - "loss": 1.7627, - "step": 11000 - }, - { - "epoch": 0.35232, - "grad_norm": 46.5700798034668, - "learning_rate": 1.8307555555555555e-05, - "loss": 1.7332, - "step": 11010 - }, - { - "epoch": 0.35264, - "grad_norm": 46.67179489135742, - "learning_rate": 1.8304000000000003e-05, - "loss": 1.764, - "step": 11020 - }, - { - "epoch": 0.35296, - "grad_norm": 44.31849670410156, - "learning_rate": 1.8300444444444444e-05, - "loss": 1.7501, - "step": 11030 - }, - { - "epoch": 0.35328, - "grad_norm": 47.23154067993164, - "learning_rate": 1.8296888888888892e-05, - "loss": 1.7756, - "step": 11040 - }, - { - "epoch": 0.3536, - "grad_norm": 45.82375717163086, - "learning_rate": 1.8293333333333333e-05, - "loss": 1.758, - "step": 11050 - }, - { - "epoch": 0.35392, - "grad_norm": 45.44552993774414, - "learning_rate": 1.828977777777778e-05, - "loss": 1.7307, - "step": 11060 - }, - { - "epoch": 0.35424, - "grad_norm": 44.66324234008789, - "learning_rate": 1.8286222222222222e-05, - "loss": 1.7543, - "step": 11070 - }, - { - "epoch": 0.35456, - "grad_norm": 45.85771560668945, - "learning_rate": 1.8282666666666667e-05, - "loss": 1.7505, - "step": 11080 - }, - { - "epoch": 0.35488, - "grad_norm": 44.97885513305664, - "learning_rate": 1.827911111111111e-05, - "loss": 1.7334, - "step": 11090 - }, - { - "epoch": 0.3552, - "grad_norm": 44.4973258972168, - "learning_rate": 1.8275555555555556e-05, - "loss": 1.7625, - "step": 11100 - }, - { - "epoch": 0.35552, - "grad_norm": 48.64956283569336, - "learning_rate": 1.8272e-05, - "loss": 1.7664, - "step": 11110 - }, - { - "epoch": 0.35584, - "grad_norm": 46.956947326660156, - "learning_rate": 1.8268444444444445e-05, - "loss": 1.7611, - "step": 11120 - }, - { - "epoch": 0.35616, - "grad_norm": 45.659523010253906, - "learning_rate": 1.826488888888889e-05, - "loss": 1.7577, - "step": 11130 - }, - { - "epoch": 0.35648, - "grad_norm": 46.591251373291016, - "learning_rate": 1.8261333333333334e-05, - "loss": 1.7543, - "step": 11140 - }, - { - "epoch": 0.3568, - "grad_norm": 44.3946533203125, - "learning_rate": 1.825777777777778e-05, - "loss": 1.7314, - "step": 11150 - }, - { - "epoch": 0.35712, - "grad_norm": 45.77732849121094, - "learning_rate": 1.8254222222222223e-05, - "loss": 1.7455, - "step": 11160 - }, - { - "epoch": 0.35744, - "grad_norm": 44.791587829589844, - "learning_rate": 1.8250666666666668e-05, - "loss": 1.7583, - "step": 11170 - }, - { - "epoch": 0.35776, - "grad_norm": 45.5029296875, - "learning_rate": 1.8247111111111112e-05, - "loss": 1.757, - "step": 11180 - }, - { - "epoch": 0.35808, - "grad_norm": 46.15699768066406, - "learning_rate": 1.8243555555555557e-05, - "loss": 1.7399, - "step": 11190 - }, - { - "epoch": 0.3584, - "grad_norm": 44.94160461425781, - "learning_rate": 1.824e-05, - "loss": 1.7639, - "step": 11200 - }, - { - "epoch": 0.35872, - "grad_norm": 45.93684005737305, - "learning_rate": 1.8236444444444446e-05, - "loss": 1.763, - "step": 11210 - }, - { - "epoch": 0.35904, - "grad_norm": 44.380393981933594, - "learning_rate": 1.823288888888889e-05, - "loss": 1.7609, - "step": 11220 - }, - { - "epoch": 0.35936, - "grad_norm": 46.544795989990234, - "learning_rate": 1.8229333333333335e-05, - "loss": 1.7309, - "step": 11230 - }, - { - "epoch": 0.35968, - "grad_norm": 45.34314727783203, - "learning_rate": 1.822577777777778e-05, - "loss": 1.7389, - "step": 11240 - }, - { - "epoch": 0.36, - "grad_norm": 47.537864685058594, - "learning_rate": 1.8222222222222224e-05, - "loss": 1.75, - "step": 11250 - }, - { - "epoch": 0.36032, - "grad_norm": 48.27071762084961, - "learning_rate": 1.821866666666667e-05, - "loss": 1.7758, - "step": 11260 - }, - { - "epoch": 0.36064, - "grad_norm": 50.97309875488281, - "learning_rate": 1.8215111111111113e-05, - "loss": 1.76, - "step": 11270 - }, - { - "epoch": 0.36096, - "grad_norm": 45.12947463989258, - "learning_rate": 1.8211555555555558e-05, - "loss": 1.7682, - "step": 11280 - }, - { - "epoch": 0.36128, - "grad_norm": 48.3563117980957, - "learning_rate": 1.8208000000000003e-05, - "loss": 1.7378, - "step": 11290 - }, - { - "epoch": 0.3616, - "grad_norm": 45.731014251708984, - "learning_rate": 1.8204444444444447e-05, - "loss": 1.7396, - "step": 11300 - }, - { - "epoch": 0.36192, - "grad_norm": 45.1165771484375, - "learning_rate": 1.820088888888889e-05, - "loss": 1.7387, - "step": 11310 - }, - { - "epoch": 0.36224, - "grad_norm": 45.07907485961914, - "learning_rate": 1.8197333333333336e-05, - "loss": 1.8069, - "step": 11320 - }, - { - "epoch": 0.36256, - "grad_norm": 45.680091857910156, - "learning_rate": 1.819377777777778e-05, - "loss": 1.7286, - "step": 11330 - }, - { - "epoch": 0.36288, - "grad_norm": 47.89656066894531, - "learning_rate": 1.8190222222222225e-05, - "loss": 1.746, - "step": 11340 - }, - { - "epoch": 0.3632, - "grad_norm": 44.2681999206543, - "learning_rate": 1.8186666666666666e-05, - "loss": 1.7426, - "step": 11350 - }, - { - "epoch": 0.36352, - "grad_norm": 46.58122634887695, - "learning_rate": 1.8183111111111114e-05, - "loss": 1.7493, - "step": 11360 - }, - { - "epoch": 0.36384, - "grad_norm": 45.0501823425293, - "learning_rate": 1.8179555555555556e-05, - "loss": 1.7392, - "step": 11370 - }, - { - "epoch": 0.36416, - "grad_norm": 46.51576232910156, - "learning_rate": 1.8176000000000004e-05, - "loss": 1.7417, - "step": 11380 - }, - { - "epoch": 0.36448, - "grad_norm": 44.66267776489258, - "learning_rate": 1.8172444444444445e-05, - "loss": 1.7547, - "step": 11390 - }, - { - "epoch": 0.3648, - "grad_norm": 45.7799186706543, - "learning_rate": 1.8168888888888893e-05, - "loss": 1.7624, - "step": 11400 - }, - { - "epoch": 0.36512, - "grad_norm": 47.37014389038086, - "learning_rate": 1.8165333333333334e-05, - "loss": 1.767, - "step": 11410 - }, - { - "epoch": 0.36544, - "grad_norm": 47.13766098022461, - "learning_rate": 1.816177777777778e-05, - "loss": 1.7526, - "step": 11420 - }, - { - "epoch": 0.36576, - "grad_norm": 47.16769790649414, - "learning_rate": 1.8158222222222223e-05, - "loss": 1.7376, - "step": 11430 - }, - { - "epoch": 0.36608, - "grad_norm": 46.399654388427734, - "learning_rate": 1.8154666666666667e-05, - "loss": 1.75, - "step": 11440 - }, - { - "epoch": 0.3664, - "grad_norm": 46.89807891845703, - "learning_rate": 1.8151111111111112e-05, - "loss": 1.7607, - "step": 11450 - }, - { - "epoch": 0.36672, - "grad_norm": 46.25182342529297, - "learning_rate": 1.8147555555555557e-05, - "loss": 1.7196, - "step": 11460 - }, - { - "epoch": 0.36704, - "grad_norm": 45.322322845458984, - "learning_rate": 1.8144e-05, - "loss": 1.7141, - "step": 11470 - }, - { - "epoch": 0.36736, - "grad_norm": 46.26806640625, - "learning_rate": 1.8140444444444446e-05, - "loss": 1.7524, - "step": 11480 - }, - { - "epoch": 0.36768, - "grad_norm": 43.700660705566406, - "learning_rate": 1.813688888888889e-05, - "loss": 1.7206, - "step": 11490 - }, - { - "epoch": 0.368, - "grad_norm": 46.64167785644531, - "learning_rate": 1.8133333333333335e-05, - "loss": 1.7522, - "step": 11500 - }, - { - "epoch": 0.36832, - "grad_norm": 45.604671478271484, - "learning_rate": 1.812977777777778e-05, - "loss": 1.7551, - "step": 11510 - }, - { - "epoch": 0.36864, - "grad_norm": 46.66270446777344, - "learning_rate": 1.8126222222222224e-05, - "loss": 1.7218, - "step": 11520 - }, - { - "epoch": 0.36896, - "grad_norm": 47.54534149169922, - "learning_rate": 1.812266666666667e-05, - "loss": 1.7246, - "step": 11530 - }, - { - "epoch": 0.36928, - "grad_norm": 48.792396545410156, - "learning_rate": 1.8119111111111113e-05, - "loss": 1.7373, - "step": 11540 - }, - { - "epoch": 0.3696, - "grad_norm": 45.595306396484375, - "learning_rate": 1.8115555555555558e-05, - "loss": 1.7531, - "step": 11550 - }, - { - "epoch": 0.36992, - "grad_norm": 45.06191635131836, - "learning_rate": 1.8112000000000002e-05, - "loss": 1.7465, - "step": 11560 - }, - { - "epoch": 0.37024, - "grad_norm": 47.39137649536133, - "learning_rate": 1.8108444444444447e-05, - "loss": 1.7441, - "step": 11570 - }, - { - "epoch": 0.37056, - "grad_norm": 44.804264068603516, - "learning_rate": 1.810488888888889e-05, - "loss": 1.7558, - "step": 11580 - }, - { - "epoch": 0.37088, - "grad_norm": 47.24115753173828, - "learning_rate": 1.8101333333333336e-05, - "loss": 1.735, - "step": 11590 - }, - { - "epoch": 0.3712, - "grad_norm": 47.79445266723633, - "learning_rate": 1.8097777777777777e-05, - "loss": 1.7455, - "step": 11600 - }, - { - "epoch": 0.37152, - "grad_norm": 45.69266891479492, - "learning_rate": 1.8094222222222225e-05, - "loss": 1.7525, - "step": 11610 - }, - { - "epoch": 0.37184, - "grad_norm": 46.884437561035156, - "learning_rate": 1.8090666666666666e-05, - "loss": 1.73, - "step": 11620 - }, - { - "epoch": 0.37216, - "grad_norm": 46.90225601196289, - "learning_rate": 1.8087111111111114e-05, - "loss": 1.7528, - "step": 11630 - }, - { - "epoch": 0.37248, - "grad_norm": 48.71936798095703, - "learning_rate": 1.8083555555555555e-05, - "loss": 1.7683, - "step": 11640 - }, - { - "epoch": 0.3728, - "grad_norm": 44.3066291809082, - "learning_rate": 1.8080000000000003e-05, - "loss": 1.7455, - "step": 11650 - }, - { - "epoch": 0.37312, - "grad_norm": 45.78877639770508, - "learning_rate": 1.8076444444444444e-05, - "loss": 1.78, - "step": 11660 - }, - { - "epoch": 0.37344, - "grad_norm": 48.4770622253418, - "learning_rate": 1.8072888888888892e-05, - "loss": 1.7724, - "step": 11670 - }, - { - "epoch": 0.37376, - "grad_norm": 44.937191009521484, - "learning_rate": 1.8069333333333333e-05, - "loss": 1.7467, - "step": 11680 - }, - { - "epoch": 0.37408, - "grad_norm": 45.83155059814453, - "learning_rate": 1.806577777777778e-05, - "loss": 1.7419, - "step": 11690 - }, - { - "epoch": 0.3744, - "grad_norm": 47.36248779296875, - "learning_rate": 1.8062222222222222e-05, - "loss": 1.7215, - "step": 11700 - }, - { - "epoch": 0.37472, - "grad_norm": 48.68720626831055, - "learning_rate": 1.805866666666667e-05, - "loss": 1.7201, - "step": 11710 - }, - { - "epoch": 0.37504, - "grad_norm": 44.902809143066406, - "learning_rate": 1.805511111111111e-05, - "loss": 1.7343, - "step": 11720 - }, - { - "epoch": 0.37536, - "grad_norm": 45.616302490234375, - "learning_rate": 1.8051555555555556e-05, - "loss": 1.7533, - "step": 11730 - }, - { - "epoch": 0.37568, - "grad_norm": 48.17421340942383, - "learning_rate": 1.8048e-05, - "loss": 1.7369, - "step": 11740 - }, - { - "epoch": 0.376, - "grad_norm": 46.45351791381836, - "learning_rate": 1.8044444444444445e-05, - "loss": 1.7455, - "step": 11750 - }, - { - "epoch": 0.37632, - "grad_norm": 47.1370849609375, - "learning_rate": 1.804088888888889e-05, - "loss": 1.7546, - "step": 11760 - }, - { - "epoch": 0.37664, - "grad_norm": 47.52964401245117, - "learning_rate": 1.8037333333333334e-05, - "loss": 1.7315, - "step": 11770 - }, - { - "epoch": 0.37696, - "grad_norm": 46.355072021484375, - "learning_rate": 1.803377777777778e-05, - "loss": 1.7806, - "step": 11780 - }, - { - "epoch": 0.37728, - "grad_norm": 47.648521423339844, - "learning_rate": 1.8030222222222223e-05, - "loss": 1.7521, - "step": 11790 - }, - { - "epoch": 0.3776, - "grad_norm": 46.333187103271484, - "learning_rate": 1.8026666666666668e-05, - "loss": 1.7419, - "step": 11800 - }, - { - "epoch": 0.37792, - "grad_norm": 47.49595260620117, - "learning_rate": 1.8023111111111113e-05, - "loss": 1.7417, - "step": 11810 - }, - { - "epoch": 0.37824, - "grad_norm": 44.231197357177734, - "learning_rate": 1.8019555555555557e-05, - "loss": 1.7185, - "step": 11820 - }, - { - "epoch": 0.37856, - "grad_norm": 46.03631591796875, - "learning_rate": 1.8016e-05, - "loss": 1.7129, - "step": 11830 - }, - { - "epoch": 0.37888, - "grad_norm": 44.27299499511719, - "learning_rate": 1.8012444444444446e-05, - "loss": 1.752, - "step": 11840 - }, - { - "epoch": 0.3792, - "grad_norm": 45.1333122253418, - "learning_rate": 1.800888888888889e-05, - "loss": 1.7286, - "step": 11850 - }, - { - "epoch": 0.37952, - "grad_norm": 46.22859573364258, - "learning_rate": 1.8005333333333335e-05, - "loss": 1.7488, - "step": 11860 - }, - { - "epoch": 0.37984, - "grad_norm": 44.65610885620117, - "learning_rate": 1.800177777777778e-05, - "loss": 1.7543, - "step": 11870 - }, - { - "epoch": 0.38016, - "grad_norm": 45.86985778808594, - "learning_rate": 1.7998222222222224e-05, - "loss": 1.7556, - "step": 11880 - }, - { - "epoch": 0.38048, - "grad_norm": 45.231658935546875, - "learning_rate": 1.799466666666667e-05, - "loss": 1.7255, - "step": 11890 - }, - { - "epoch": 0.3808, - "grad_norm": 44.76812744140625, - "learning_rate": 1.7991111111111114e-05, - "loss": 1.7467, - "step": 11900 - }, - { - "epoch": 0.38112, - "grad_norm": 45.64585876464844, - "learning_rate": 1.7987555555555558e-05, - "loss": 1.7702, - "step": 11910 - }, - { - "epoch": 0.38144, - "grad_norm": 47.54214859008789, - "learning_rate": 1.7984000000000003e-05, - "loss": 1.7646, - "step": 11920 - }, - { - "epoch": 0.38176, - "grad_norm": 44.36042785644531, - "learning_rate": 1.7980444444444444e-05, - "loss": 1.7226, - "step": 11930 - }, - { - "epoch": 0.38208, - "grad_norm": 46.86466979980469, - "learning_rate": 1.7976888888888892e-05, - "loss": 1.7231, - "step": 11940 - }, - { - "epoch": 0.3824, - "grad_norm": 44.66880416870117, - "learning_rate": 1.7973333333333333e-05, - "loss": 1.7667, - "step": 11950 - }, - { - "epoch": 0.38272, - "grad_norm": 46.481754302978516, - "learning_rate": 1.796977777777778e-05, - "loss": 1.7423, - "step": 11960 - }, - { - "epoch": 0.38304, - "grad_norm": 47.537837982177734, - "learning_rate": 1.7966222222222222e-05, - "loss": 1.7087, - "step": 11970 - }, - { - "epoch": 0.38336, - "grad_norm": 50.210052490234375, - "learning_rate": 1.7962666666666667e-05, - "loss": 1.7374, - "step": 11980 - }, - { - "epoch": 0.38368, - "grad_norm": 45.741024017333984, - "learning_rate": 1.795911111111111e-05, - "loss": 1.7655, - "step": 11990 - }, - { - "epoch": 0.384, - "grad_norm": 45.897953033447266, - "learning_rate": 1.7955555555555556e-05, - "loss": 1.7656, - "step": 12000 - }, - { - "epoch": 0.38432, - "grad_norm": 45.622100830078125, - "learning_rate": 1.7952e-05, - "loss": 1.762, - "step": 12010 - }, - { - "epoch": 0.38464, - "grad_norm": 46.58403396606445, - "learning_rate": 1.7948444444444445e-05, - "loss": 1.7246, - "step": 12020 - }, - { - "epoch": 0.38496, - "grad_norm": 45.18146514892578, - "learning_rate": 1.794488888888889e-05, - "loss": 1.7601, - "step": 12030 - }, - { - "epoch": 0.38528, - "grad_norm": 45.685115814208984, - "learning_rate": 1.7941333333333334e-05, - "loss": 1.7703, - "step": 12040 - }, - { - "epoch": 0.3856, - "grad_norm": 45.952911376953125, - "learning_rate": 1.793777777777778e-05, - "loss": 1.7499, - "step": 12050 - }, - { - "epoch": 0.38592, - "grad_norm": 45.1624755859375, - "learning_rate": 1.7934222222222223e-05, - "loss": 1.767, - "step": 12060 - }, - { - "epoch": 0.38624, - "grad_norm": 46.44557571411133, - "learning_rate": 1.7930666666666668e-05, - "loss": 1.7551, - "step": 12070 - }, - { - "epoch": 0.38656, - "grad_norm": 46.18535232543945, - "learning_rate": 1.7927111111111112e-05, - "loss": 1.7062, - "step": 12080 - }, - { - "epoch": 0.38688, - "grad_norm": 46.5549430847168, - "learning_rate": 1.7923555555555557e-05, - "loss": 1.739, - "step": 12090 - }, - { - "epoch": 0.3872, - "grad_norm": 45.92256164550781, - "learning_rate": 1.792e-05, - "loss": 1.7616, - "step": 12100 - }, - { - "epoch": 0.38752, - "grad_norm": 47.67626953125, - "learning_rate": 1.7916444444444446e-05, - "loss": 1.7356, - "step": 12110 - }, - { - "epoch": 0.38784, - "grad_norm": 46.13505935668945, - "learning_rate": 1.791288888888889e-05, - "loss": 1.7371, - "step": 12120 - }, - { - "epoch": 0.38816, - "grad_norm": 46.67782974243164, - "learning_rate": 1.7909333333333335e-05, - "loss": 1.7342, - "step": 12130 - }, - { - "epoch": 0.38848, - "grad_norm": 45.36748123168945, - "learning_rate": 1.790577777777778e-05, - "loss": 1.7383, - "step": 12140 - }, - { - "epoch": 0.3888, - "grad_norm": 47.25735092163086, - "learning_rate": 1.7902222222222224e-05, - "loss": 1.7343, - "step": 12150 - }, - { - "epoch": 0.38912, - "grad_norm": 46.73722457885742, - "learning_rate": 1.789866666666667e-05, - "loss": 1.7394, - "step": 12160 - }, - { - "epoch": 0.38944, - "grad_norm": 45.669830322265625, - "learning_rate": 1.7895111111111113e-05, - "loss": 1.7294, - "step": 12170 - }, - { - "epoch": 0.38976, - "grad_norm": 43.606658935546875, - "learning_rate": 1.7891555555555558e-05, - "loss": 1.741, - "step": 12180 - }, - { - "epoch": 0.39008, - "grad_norm": 46.752349853515625, - "learning_rate": 1.7888000000000002e-05, - "loss": 1.7201, - "step": 12190 - }, - { - "epoch": 0.3904, - "grad_norm": 45.13085174560547, - "learning_rate": 1.7884444444444447e-05, - "loss": 1.6933, - "step": 12200 - }, - { - "epoch": 0.39072, - "grad_norm": 47.74692916870117, - "learning_rate": 1.788088888888889e-05, - "loss": 1.7298, - "step": 12210 - }, - { - "epoch": 0.39104, - "grad_norm": 47.35121536254883, - "learning_rate": 1.7877333333333336e-05, - "loss": 1.7401, - "step": 12220 - }, - { - "epoch": 0.39136, - "grad_norm": 47.107688903808594, - "learning_rate": 1.787377777777778e-05, - "loss": 1.736, - "step": 12230 - }, - { - "epoch": 0.39168, - "grad_norm": 46.06573486328125, - "learning_rate": 1.7870222222222225e-05, - "loss": 1.7439, - "step": 12240 - }, - { - "epoch": 0.392, - "grad_norm": 44.885154724121094, - "learning_rate": 1.7866666666666666e-05, - "loss": 1.7402, - "step": 12250 - }, - { - "epoch": 0.39232, - "grad_norm": 44.785648345947266, - "learning_rate": 1.7863111111111114e-05, - "loss": 1.7475, - "step": 12260 - }, - { - "epoch": 0.39264, - "grad_norm": 47.50531768798828, - "learning_rate": 1.7859555555555555e-05, - "loss": 1.7689, - "step": 12270 - }, - { - "epoch": 0.39296, - "grad_norm": 46.02947998046875, - "learning_rate": 1.7856000000000003e-05, - "loss": 1.7424, - "step": 12280 - }, - { - "epoch": 0.39328, - "grad_norm": 47.10367965698242, - "learning_rate": 1.7852444444444444e-05, - "loss": 1.7345, - "step": 12290 - }, - { - "epoch": 0.3936, - "grad_norm": 49.23219680786133, - "learning_rate": 1.7848888888888892e-05, - "loss": 1.7514, - "step": 12300 - }, - { - "epoch": 0.39392, - "grad_norm": 47.4083251953125, - "learning_rate": 1.7845333333333333e-05, - "loss": 1.751, - "step": 12310 - }, - { - "epoch": 0.39424, - "grad_norm": 47.23759078979492, - "learning_rate": 1.784177777777778e-05, - "loss": 1.7486, - "step": 12320 - }, - { - "epoch": 0.39456, - "grad_norm": 47.71400451660156, - "learning_rate": 1.7838222222222223e-05, - "loss": 1.7515, - "step": 12330 - }, - { - "epoch": 0.39488, - "grad_norm": 47.39936065673828, - "learning_rate": 1.783466666666667e-05, - "loss": 1.7542, - "step": 12340 - }, - { - "epoch": 0.3952, - "grad_norm": 47.184940338134766, - "learning_rate": 1.783111111111111e-05, - "loss": 1.7303, - "step": 12350 - }, - { - "epoch": 0.39552, - "grad_norm": 43.96541976928711, - "learning_rate": 1.782755555555556e-05, - "loss": 1.7178, - "step": 12360 - }, - { - "epoch": 0.39584, - "grad_norm": 48.6561393737793, - "learning_rate": 1.7824e-05, - "loss": 1.7481, - "step": 12370 - }, - { - "epoch": 0.39616, - "grad_norm": 45.21891784667969, - "learning_rate": 1.7820444444444445e-05, - "loss": 1.7518, - "step": 12380 - }, - { - "epoch": 0.39648, - "grad_norm": 44.285064697265625, - "learning_rate": 1.781688888888889e-05, - "loss": 1.7334, - "step": 12390 - }, - { - "epoch": 0.3968, - "grad_norm": 45.4031867980957, - "learning_rate": 1.7813333333333334e-05, - "loss": 1.7392, - "step": 12400 - }, - { - "epoch": 0.39712, - "grad_norm": 46.44404983520508, - "learning_rate": 1.780977777777778e-05, - "loss": 1.7558, - "step": 12410 - }, - { - "epoch": 0.39744, - "grad_norm": 46.931880950927734, - "learning_rate": 1.7806222222222224e-05, - "loss": 1.757, - "step": 12420 - }, - { - "epoch": 0.39776, - "grad_norm": 45.4690055847168, - "learning_rate": 1.7802666666666668e-05, - "loss": 1.7441, - "step": 12430 - }, - { - "epoch": 0.39808, - "grad_norm": 47.687198638916016, - "learning_rate": 1.7799111111111113e-05, - "loss": 1.7388, - "step": 12440 - }, - { - "epoch": 0.3984, - "grad_norm": 45.4809455871582, - "learning_rate": 1.7795555555555557e-05, - "loss": 1.7613, - "step": 12450 - }, - { - "epoch": 0.39872, - "grad_norm": 44.98910140991211, - "learning_rate": 1.7792000000000002e-05, - "loss": 1.7376, - "step": 12460 - }, - { - "epoch": 0.39904, - "grad_norm": 45.352169036865234, - "learning_rate": 1.7788444444444446e-05, - "loss": 1.7192, - "step": 12470 - }, - { - "epoch": 0.39936, - "grad_norm": 45.38097381591797, - "learning_rate": 1.778488888888889e-05, - "loss": 1.7749, - "step": 12480 - }, - { - "epoch": 0.39968, - "grad_norm": 47.00810241699219, - "learning_rate": 1.7781333333333335e-05, - "loss": 1.7183, - "step": 12490 - }, - { - "epoch": 0.4, - "grad_norm": 46.29263687133789, - "learning_rate": 1.7777777777777777e-05, - "loss": 1.6969, - "step": 12500 - }, - { - "epoch": 0.40032, - "grad_norm": 46.169193267822266, - "learning_rate": 1.7774222222222225e-05, - "loss": 1.7214, - "step": 12510 - }, - { - "epoch": 0.40064, - "grad_norm": 47.48134231567383, - "learning_rate": 1.7770666666666666e-05, - "loss": 1.7373, - "step": 12520 - }, - { - "epoch": 0.40096, - "grad_norm": 46.51030731201172, - "learning_rate": 1.7767111111111114e-05, - "loss": 1.734, - "step": 12530 - }, - { - "epoch": 0.40128, - "grad_norm": 46.44158172607422, - "learning_rate": 1.7763555555555555e-05, - "loss": 1.7533, - "step": 12540 - }, - { - "epoch": 0.4016, - "grad_norm": 44.81826400756836, - "learning_rate": 1.7760000000000003e-05, - "loss": 1.7474, - "step": 12550 - }, - { - "epoch": 0.40192, - "grad_norm": 47.05168533325195, - "learning_rate": 1.7756444444444444e-05, - "loss": 1.7421, - "step": 12560 - }, - { - "epoch": 0.40224, - "grad_norm": 45.72329330444336, - "learning_rate": 1.7752888888888892e-05, - "loss": 1.7243, - "step": 12570 - }, - { - "epoch": 0.40256, - "grad_norm": 48.39426040649414, - "learning_rate": 1.7749333333333333e-05, - "loss": 1.7181, - "step": 12580 - }, - { - "epoch": 0.40288, - "grad_norm": 45.663997650146484, - "learning_rate": 1.774577777777778e-05, - "loss": 1.7138, - "step": 12590 - }, - { - "epoch": 0.4032, - "grad_norm": 46.39791488647461, - "learning_rate": 1.7742222222222222e-05, - "loss": 1.7452, - "step": 12600 - }, - { - "epoch": 0.40352, - "grad_norm": 45.58061218261719, - "learning_rate": 1.773866666666667e-05, - "loss": 1.7456, - "step": 12610 - }, - { - "epoch": 0.40384, - "grad_norm": 47.24848175048828, - "learning_rate": 1.773511111111111e-05, - "loss": 1.7502, - "step": 12620 - }, - { - "epoch": 0.40416, - "grad_norm": 45.21625900268555, - "learning_rate": 1.7731555555555556e-05, - "loss": 1.7408, - "step": 12630 - }, - { - "epoch": 0.40448, - "grad_norm": 43.57041931152344, - "learning_rate": 1.7728e-05, - "loss": 1.7407, - "step": 12640 - }, - { - "epoch": 0.4048, - "grad_norm": 45.896942138671875, - "learning_rate": 1.7724444444444445e-05, - "loss": 1.7327, - "step": 12650 - }, - { - "epoch": 0.40512, - "grad_norm": 45.211212158203125, - "learning_rate": 1.772088888888889e-05, - "loss": 1.7506, - "step": 12660 - }, - { - "epoch": 0.40544, - "grad_norm": 45.28093719482422, - "learning_rate": 1.7717333333333334e-05, - "loss": 1.7308, - "step": 12670 - }, - { - "epoch": 0.40576, - "grad_norm": 44.34235382080078, - "learning_rate": 1.771377777777778e-05, - "loss": 1.7177, - "step": 12680 - }, - { - "epoch": 0.40608, - "grad_norm": 44.18003463745117, - "learning_rate": 1.7710222222222223e-05, - "loss": 1.7427, - "step": 12690 - }, - { - "epoch": 0.4064, - "grad_norm": 45.78373336791992, - "learning_rate": 1.7706666666666668e-05, - "loss": 1.736, - "step": 12700 - }, - { - "epoch": 0.40672, - "grad_norm": 47.60939025878906, - "learning_rate": 1.7703111111111112e-05, - "loss": 1.7644, - "step": 12710 - }, - { - "epoch": 0.40704, - "grad_norm": 47.69173812866211, - "learning_rate": 1.7699555555555557e-05, - "loss": 1.7165, - "step": 12720 - }, - { - "epoch": 0.40736, - "grad_norm": 47.19498825073242, - "learning_rate": 1.7696e-05, - "loss": 1.7511, - "step": 12730 - }, - { - "epoch": 0.40768, - "grad_norm": 45.9237060546875, - "learning_rate": 1.7692444444444446e-05, - "loss": 1.7394, - "step": 12740 - }, - { - "epoch": 0.408, - "grad_norm": 48.24811553955078, - "learning_rate": 1.768888888888889e-05, - "loss": 1.7615, - "step": 12750 - }, - { - "epoch": 0.40832, - "grad_norm": 46.84287643432617, - "learning_rate": 1.7685333333333335e-05, - "loss": 1.7241, - "step": 12760 - }, - { - "epoch": 0.40864, - "grad_norm": 44.28770065307617, - "learning_rate": 1.768177777777778e-05, - "loss": 1.7564, - "step": 12770 - }, - { - "epoch": 0.40896, - "grad_norm": 46.839080810546875, - "learning_rate": 1.7678222222222224e-05, - "loss": 1.7411, - "step": 12780 - }, - { - "epoch": 0.40928, - "grad_norm": 47.01158905029297, - "learning_rate": 1.767466666666667e-05, - "loss": 1.7154, - "step": 12790 - }, - { - "epoch": 0.4096, - "grad_norm": 47.18655776977539, - "learning_rate": 1.7671111111111113e-05, - "loss": 1.7384, - "step": 12800 - }, - { - "epoch": 0.40992, - "grad_norm": 43.830631256103516, - "learning_rate": 1.7667555555555558e-05, - "loss": 1.7559, - "step": 12810 - }, - { - "epoch": 0.41024, - "grad_norm": 46.170230865478516, - "learning_rate": 1.7664000000000002e-05, - "loss": 1.7664, - "step": 12820 - }, - { - "epoch": 0.41056, - "grad_norm": 44.55336380004883, - "learning_rate": 1.7660444444444447e-05, - "loss": 1.746, - "step": 12830 - }, - { - "epoch": 0.41088, - "grad_norm": 44.43898010253906, - "learning_rate": 1.765688888888889e-05, - "loss": 1.7448, - "step": 12840 - }, - { - "epoch": 0.4112, - "grad_norm": 46.56974411010742, - "learning_rate": 1.7653333333333336e-05, - "loss": 1.7177, - "step": 12850 - }, - { - "epoch": 0.41152, - "grad_norm": 45.02238845825195, - "learning_rate": 1.764977777777778e-05, - "loss": 1.7224, - "step": 12860 - }, - { - "epoch": 0.41184, - "grad_norm": 43.810420989990234, - "learning_rate": 1.7646222222222225e-05, - "loss": 1.7423, - "step": 12870 - }, - { - "epoch": 0.41216, - "grad_norm": 45.10750961303711, - "learning_rate": 1.7642666666666666e-05, - "loss": 1.7277, - "step": 12880 - }, - { - "epoch": 0.41248, - "grad_norm": 44.9813232421875, - "learning_rate": 1.7639111111111114e-05, - "loss": 1.7396, - "step": 12890 - }, - { - "epoch": 0.4128, - "grad_norm": 43.90498352050781, - "learning_rate": 1.7635555555555555e-05, - "loss": 1.7491, - "step": 12900 - }, - { - "epoch": 0.41312, - "grad_norm": 45.96403503417969, - "learning_rate": 1.7632000000000003e-05, - "loss": 1.7272, - "step": 12910 - }, - { - "epoch": 0.41344, - "grad_norm": 48.598793029785156, - "learning_rate": 1.7628444444444444e-05, - "loss": 1.7497, - "step": 12920 - }, - { - "epoch": 0.41376, - "grad_norm": 47.302711486816406, - "learning_rate": 1.7624888888888892e-05, - "loss": 1.7199, - "step": 12930 - }, - { - "epoch": 0.41408, - "grad_norm": 44.98503112792969, - "learning_rate": 1.7621333333333334e-05, - "loss": 1.717, - "step": 12940 - }, - { - "epoch": 0.4144, - "grad_norm": 45.62991714477539, - "learning_rate": 1.761777777777778e-05, - "loss": 1.7229, - "step": 12950 - }, - { - "epoch": 0.41472, - "grad_norm": 47.17755126953125, - "learning_rate": 1.7614222222222223e-05, - "loss": 1.7291, - "step": 12960 - }, - { - "epoch": 0.41504, - "grad_norm": 47.77396011352539, - "learning_rate": 1.7610666666666667e-05, - "loss": 1.7388, - "step": 12970 - }, - { - "epoch": 0.41536, - "grad_norm": 45.62364196777344, - "learning_rate": 1.7607111111111112e-05, - "loss": 1.7444, - "step": 12980 - }, - { - "epoch": 0.41568, - "grad_norm": 46.032386779785156, - "learning_rate": 1.7603555555555556e-05, - "loss": 1.7229, - "step": 12990 - }, - { - "epoch": 0.416, - "grad_norm": 44.89250946044922, - "learning_rate": 1.76e-05, - "loss": 1.748, - "step": 13000 - }, - { - "epoch": 0.41632, - "grad_norm": 45.50238800048828, - "learning_rate": 1.7596444444444445e-05, - "loss": 1.7233, - "step": 13010 - }, - { - "epoch": 0.41664, - "grad_norm": 44.66646194458008, - "learning_rate": 1.759288888888889e-05, - "loss": 1.7281, - "step": 13020 - }, - { - "epoch": 0.41696, - "grad_norm": 45.436370849609375, - "learning_rate": 1.7589333333333335e-05, - "loss": 1.7203, - "step": 13030 - }, - { - "epoch": 0.41728, - "grad_norm": 44.28810119628906, - "learning_rate": 1.758577777777778e-05, - "loss": 1.7491, - "step": 13040 - }, - { - "epoch": 0.4176, - "grad_norm": 45.3896369934082, - "learning_rate": 1.7582222222222224e-05, - "loss": 1.7098, - "step": 13050 - }, - { - "epoch": 0.41792, - "grad_norm": 46.0196533203125, - "learning_rate": 1.7578666666666668e-05, - "loss": 1.7428, - "step": 13060 - }, - { - "epoch": 0.41824, - "grad_norm": 48.799415588378906, - "learning_rate": 1.7575111111111113e-05, - "loss": 1.7505, - "step": 13070 - }, - { - "epoch": 0.41856, - "grad_norm": 46.70732116699219, - "learning_rate": 1.7571555555555557e-05, - "loss": 1.7166, - "step": 13080 - }, - { - "epoch": 0.41888, - "grad_norm": 43.49319839477539, - "learning_rate": 1.7568000000000002e-05, - "loss": 1.7557, - "step": 13090 - }, - { - "epoch": 0.4192, - "grad_norm": 46.664398193359375, - "learning_rate": 1.7564444444444446e-05, - "loss": 1.7581, - "step": 13100 - }, - { - "epoch": 0.41952, - "grad_norm": 48.30585479736328, - "learning_rate": 1.756088888888889e-05, - "loss": 1.7517, - "step": 13110 - }, - { - "epoch": 0.41984, - "grad_norm": 45.69898223876953, - "learning_rate": 1.7557333333333336e-05, - "loss": 1.7352, - "step": 13120 - }, - { - "epoch": 0.42016, - "grad_norm": 44.632633209228516, - "learning_rate": 1.755377777777778e-05, - "loss": 1.7386, - "step": 13130 - }, - { - "epoch": 0.42048, - "grad_norm": 46.91196060180664, - "learning_rate": 1.7550222222222225e-05, - "loss": 1.729, - "step": 13140 - }, - { - "epoch": 0.4208, - "grad_norm": 45.67945861816406, - "learning_rate": 1.7546666666666666e-05, - "loss": 1.7209, - "step": 13150 - }, - { - "epoch": 0.42112, - "grad_norm": 46.89979934692383, - "learning_rate": 1.7543111111111114e-05, - "loss": 1.7585, - "step": 13160 - }, - { - "epoch": 0.42144, - "grad_norm": 47.15142059326172, - "learning_rate": 1.7539555555555555e-05, - "loss": 1.7552, - "step": 13170 - }, - { - "epoch": 0.42176, - "grad_norm": 46.78049850463867, - "learning_rate": 1.7536000000000003e-05, - "loss": 1.7493, - "step": 13180 - }, - { - "epoch": 0.42208, - "grad_norm": 45.9544563293457, - "learning_rate": 1.7532444444444444e-05, - "loss": 1.7206, - "step": 13190 - }, - { - "epoch": 0.4224, - "grad_norm": 45.94736099243164, - "learning_rate": 1.7528888888888892e-05, - "loss": 1.7026, - "step": 13200 - }, - { - "epoch": 0.42272, - "grad_norm": 44.32487106323242, - "learning_rate": 1.7525333333333333e-05, - "loss": 1.7201, - "step": 13210 - }, - { - "epoch": 0.42304, - "grad_norm": 45.86711883544922, - "learning_rate": 1.752177777777778e-05, - "loss": 1.7331, - "step": 13220 - }, - { - "epoch": 0.42336, - "grad_norm": 44.53461837768555, - "learning_rate": 1.7518222222222222e-05, - "loss": 1.7525, - "step": 13230 - }, - { - "epoch": 0.42368, - "grad_norm": 44.8212776184082, - "learning_rate": 1.751466666666667e-05, - "loss": 1.758, - "step": 13240 - }, - { - "epoch": 0.424, - "grad_norm": 45.448646545410156, - "learning_rate": 1.751111111111111e-05, - "loss": 1.7499, - "step": 13250 - }, - { - "epoch": 0.42432, - "grad_norm": 48.57109069824219, - "learning_rate": 1.750755555555556e-05, - "loss": 1.7272, - "step": 13260 - }, - { - "epoch": 0.42464, - "grad_norm": 44.58144760131836, - "learning_rate": 1.7504e-05, - "loss": 1.7274, - "step": 13270 - }, - { - "epoch": 0.42496, - "grad_norm": 44.63924026489258, - "learning_rate": 1.7500444444444445e-05, - "loss": 1.7055, - "step": 13280 - }, - { - "epoch": 0.42528, - "grad_norm": 46.89718246459961, - "learning_rate": 1.749688888888889e-05, - "loss": 1.7027, - "step": 13290 - }, - { - "epoch": 0.4256, - "grad_norm": 43.74995422363281, - "learning_rate": 1.7493333333333334e-05, - "loss": 1.7279, - "step": 13300 - }, - { - "epoch": 0.42592, - "grad_norm": 44.514400482177734, - "learning_rate": 1.748977777777778e-05, - "loss": 1.7189, - "step": 13310 - }, - { - "epoch": 0.42624, - "grad_norm": 45.729801177978516, - "learning_rate": 1.7486222222222223e-05, - "loss": 1.7564, - "step": 13320 - }, - { - "epoch": 0.42656, - "grad_norm": 46.24195861816406, - "learning_rate": 1.7482666666666668e-05, - "loss": 1.7214, - "step": 13330 - }, - { - "epoch": 0.42688, - "grad_norm": 43.434078216552734, - "learning_rate": 1.7479111111111112e-05, - "loss": 1.7219, - "step": 13340 - }, - { - "epoch": 0.4272, - "grad_norm": 46.25753402709961, - "learning_rate": 1.7475555555555557e-05, - "loss": 1.7537, - "step": 13350 - }, - { - "epoch": 0.42752, - "grad_norm": 46.87751388549805, - "learning_rate": 1.7472e-05, - "loss": 1.7413, - "step": 13360 - }, - { - "epoch": 0.42784, - "grad_norm": 45.92871856689453, - "learning_rate": 1.7468444444444446e-05, - "loss": 1.7182, - "step": 13370 - }, - { - "epoch": 0.42816, - "grad_norm": 45.935028076171875, - "learning_rate": 1.746488888888889e-05, - "loss": 1.7382, - "step": 13380 - }, - { - "epoch": 0.42848, - "grad_norm": 49.45673370361328, - "learning_rate": 1.7461333333333335e-05, - "loss": 1.7248, - "step": 13390 - }, - { - "epoch": 0.4288, - "grad_norm": 46.06485366821289, - "learning_rate": 1.745777777777778e-05, - "loss": 1.7203, - "step": 13400 - }, - { - "epoch": 0.42912, - "grad_norm": 44.7425537109375, - "learning_rate": 1.7454222222222224e-05, - "loss": 1.741, - "step": 13410 - }, - { - "epoch": 0.42944, - "grad_norm": 45.23655700683594, - "learning_rate": 1.745066666666667e-05, - "loss": 1.7522, - "step": 13420 - }, - { - "epoch": 0.42976, - "grad_norm": 44.002384185791016, - "learning_rate": 1.7447111111111113e-05, - "loss": 1.7217, - "step": 13430 - }, - { - "epoch": 0.43008, - "grad_norm": 46.94925308227539, - "learning_rate": 1.7443555555555558e-05, - "loss": 1.7299, - "step": 13440 - }, - { - "epoch": 0.4304, - "grad_norm": 45.58026123046875, - "learning_rate": 1.7440000000000002e-05, - "loss": 1.7075, - "step": 13450 - }, - { - "epoch": 0.43072, - "grad_norm": 52.205322265625, - "learning_rate": 1.7436444444444447e-05, - "loss": 1.732, - "step": 13460 - }, - { - "epoch": 0.43104, - "grad_norm": 42.87519836425781, - "learning_rate": 1.743288888888889e-05, - "loss": 1.7388, - "step": 13470 - }, - { - "epoch": 0.43136, - "grad_norm": 45.98421096801758, - "learning_rate": 1.7429333333333333e-05, - "loss": 1.7596, - "step": 13480 - }, - { - "epoch": 0.43168, - "grad_norm": 45.058380126953125, - "learning_rate": 1.742577777777778e-05, - "loss": 1.7358, - "step": 13490 - }, - { - "epoch": 0.432, - "grad_norm": 45.228031158447266, - "learning_rate": 1.7422222222222222e-05, - "loss": 1.7616, - "step": 13500 - }, - { - "epoch": 0.43232, - "grad_norm": 45.51875305175781, - "learning_rate": 1.741866666666667e-05, - "loss": 1.7531, - "step": 13510 - }, - { - "epoch": 0.43264, - "grad_norm": 46.320167541503906, - "learning_rate": 1.741511111111111e-05, - "loss": 1.7092, - "step": 13520 - }, - { - "epoch": 0.43296, - "grad_norm": 44.57635498046875, - "learning_rate": 1.7411555555555555e-05, - "loss": 1.7187, - "step": 13530 - }, - { - "epoch": 0.43328, - "grad_norm": 44.553035736083984, - "learning_rate": 1.7408e-05, - "loss": 1.7103, - "step": 13540 - }, - { - "epoch": 0.4336, - "grad_norm": 45.63328170776367, - "learning_rate": 1.7404444444444445e-05, - "loss": 1.7485, - "step": 13550 - }, - { - "epoch": 0.43392, - "grad_norm": 46.18551254272461, - "learning_rate": 1.740088888888889e-05, - "loss": 1.7419, - "step": 13560 - }, - { - "epoch": 0.43424, - "grad_norm": 44.467525482177734, - "learning_rate": 1.7397333333333334e-05, - "loss": 1.7, - "step": 13570 - }, - { - "epoch": 0.43456, - "grad_norm": 46.38378143310547, - "learning_rate": 1.7393777777777778e-05, - "loss": 1.7244, - "step": 13580 - }, - { - "epoch": 0.43488, - "grad_norm": 45.0339469909668, - "learning_rate": 1.7390222222222223e-05, - "loss": 1.7539, - "step": 13590 - }, - { - "epoch": 0.4352, - "grad_norm": 44.02116394042969, - "learning_rate": 1.7386666666666667e-05, - "loss": 1.7397, - "step": 13600 - }, - { - "epoch": 0.43552, - "grad_norm": 49.48076248168945, - "learning_rate": 1.7383111111111112e-05, - "loss": 1.7492, - "step": 13610 - }, - { - "epoch": 0.43584, - "grad_norm": 45.204132080078125, - "learning_rate": 1.7379555555555556e-05, - "loss": 1.7168, - "step": 13620 - }, - { - "epoch": 0.43616, - "grad_norm": 46.511878967285156, - "learning_rate": 1.7376e-05, - "loss": 1.7473, - "step": 13630 - }, - { - "epoch": 0.43648, - "grad_norm": 45.37043762207031, - "learning_rate": 1.7372444444444446e-05, - "loss": 1.7164, - "step": 13640 - }, - { - "epoch": 0.4368, - "grad_norm": 44.92662048339844, - "learning_rate": 1.736888888888889e-05, - "loss": 1.7352, - "step": 13650 - }, - { - "epoch": 0.43712, - "grad_norm": 45.94477844238281, - "learning_rate": 1.7365333333333335e-05, - "loss": 1.7325, - "step": 13660 - }, - { - "epoch": 0.43744, - "grad_norm": 44.44512176513672, - "learning_rate": 1.736177777777778e-05, - "loss": 1.7126, - "step": 13670 - }, - { - "epoch": 0.43776, - "grad_norm": 44.867652893066406, - "learning_rate": 1.7358222222222224e-05, - "loss": 1.7423, - "step": 13680 - }, - { - "epoch": 0.43808, - "grad_norm": 42.79388427734375, - "learning_rate": 1.735466666666667e-05, - "loss": 1.7241, - "step": 13690 - }, - { - "epoch": 0.4384, - "grad_norm": 45.237342834472656, - "learning_rate": 1.7351111111111113e-05, - "loss": 1.7428, - "step": 13700 - }, - { - "epoch": 0.43872, - "grad_norm": 45.06656265258789, - "learning_rate": 1.7347555555555557e-05, - "loss": 1.7279, - "step": 13710 - }, - { - "epoch": 0.43904, - "grad_norm": 44.70061111450195, - "learning_rate": 1.7344000000000002e-05, - "loss": 1.7414, - "step": 13720 - }, - { - "epoch": 0.43936, - "grad_norm": 43.81660842895508, - "learning_rate": 1.7340444444444447e-05, - "loss": 1.7378, - "step": 13730 - }, - { - "epoch": 0.43968, - "grad_norm": 45.633880615234375, - "learning_rate": 1.733688888888889e-05, - "loss": 1.7447, - "step": 13740 - }, - { - "epoch": 0.44, - "grad_norm": 46.27029800415039, - "learning_rate": 1.7333333333333336e-05, - "loss": 1.7176, - "step": 13750 - }, - { - "epoch": 0.44032, - "grad_norm": 47.65945053100586, - "learning_rate": 1.732977777777778e-05, - "loss": 1.7371, - "step": 13760 - }, - { - "epoch": 0.44064, - "grad_norm": 45.97589111328125, - "learning_rate": 1.7326222222222225e-05, - "loss": 1.6964, - "step": 13770 - }, - { - "epoch": 0.44096, - "grad_norm": 44.704036712646484, - "learning_rate": 1.732266666666667e-05, - "loss": 1.7066, - "step": 13780 - }, - { - "epoch": 0.44128, - "grad_norm": 46.602542877197266, - "learning_rate": 1.7319111111111114e-05, - "loss": 1.7192, - "step": 13790 - }, - { - "epoch": 0.4416, - "grad_norm": 46.97726821899414, - "learning_rate": 1.7315555555555555e-05, - "loss": 1.7008, - "step": 13800 - }, - { - "epoch": 0.44192, - "grad_norm": 46.14274978637695, - "learning_rate": 1.7312000000000003e-05, - "loss": 1.7701, - "step": 13810 - }, - { - "epoch": 0.44224, - "grad_norm": 45.656639099121094, - "learning_rate": 1.7308444444444444e-05, - "loss": 1.7404, - "step": 13820 - }, - { - "epoch": 0.44256, - "grad_norm": 45.84688949584961, - "learning_rate": 1.7304888888888892e-05, - "loss": 1.7281, - "step": 13830 - }, - { - "epoch": 0.44288, - "grad_norm": 44.40279006958008, - "learning_rate": 1.7301333333333333e-05, - "loss": 1.7412, - "step": 13840 - }, - { - "epoch": 0.4432, - "grad_norm": 44.84839630126953, - "learning_rate": 1.729777777777778e-05, - "loss": 1.7568, - "step": 13850 - }, - { - "epoch": 0.44352, - "grad_norm": 45.91680145263672, - "learning_rate": 1.7294222222222222e-05, - "loss": 1.7247, - "step": 13860 - }, - { - "epoch": 0.44384, - "grad_norm": 43.44932556152344, - "learning_rate": 1.729066666666667e-05, - "loss": 1.7638, - "step": 13870 - }, - { - "epoch": 0.44416, - "grad_norm": 45.238861083984375, - "learning_rate": 1.728711111111111e-05, - "loss": 1.7215, - "step": 13880 - }, - { - "epoch": 0.44448, - "grad_norm": 45.49114990234375, - "learning_rate": 1.728355555555556e-05, - "loss": 1.7085, - "step": 13890 - }, - { - "epoch": 0.4448, - "grad_norm": 44.86478805541992, - "learning_rate": 1.728e-05, - "loss": 1.713, - "step": 13900 - }, - { - "epoch": 0.44512, - "grad_norm": 45.341129302978516, - "learning_rate": 1.7276444444444445e-05, - "loss": 1.7263, - "step": 13910 - }, - { - "epoch": 0.44544, - "grad_norm": 44.82181167602539, - "learning_rate": 1.727288888888889e-05, - "loss": 1.7458, - "step": 13920 - }, - { - "epoch": 0.44576, - "grad_norm": 42.75132751464844, - "learning_rate": 1.7269333333333334e-05, - "loss": 1.7263, - "step": 13930 - }, - { - "epoch": 0.44608, - "grad_norm": 46.062068939208984, - "learning_rate": 1.726577777777778e-05, - "loss": 1.7592, - "step": 13940 - }, - { - "epoch": 0.4464, - "grad_norm": 43.95463562011719, - "learning_rate": 1.7262222222222223e-05, - "loss": 1.7476, - "step": 13950 - }, - { - "epoch": 0.44672, - "grad_norm": 44.0671272277832, - "learning_rate": 1.7258666666666668e-05, - "loss": 1.7189, - "step": 13960 - }, - { - "epoch": 0.44704, - "grad_norm": 46.24520492553711, - "learning_rate": 1.7255111111111112e-05, - "loss": 1.7354, - "step": 13970 - }, - { - "epoch": 0.44736, - "grad_norm": 43.28176498413086, - "learning_rate": 1.7251555555555557e-05, - "loss": 1.6807, - "step": 13980 - }, - { - "epoch": 0.44768, - "grad_norm": 44.17888259887695, - "learning_rate": 1.7248e-05, - "loss": 1.7, - "step": 13990 - }, - { - "epoch": 0.448, - "grad_norm": 45.7882080078125, - "learning_rate": 1.7244444444444446e-05, - "loss": 1.7423, - "step": 14000 - }, - { - "epoch": 0.44832, - "grad_norm": 47.963966369628906, - "learning_rate": 1.724088888888889e-05, - "loss": 1.753, - "step": 14010 - }, - { - "epoch": 0.44864, - "grad_norm": 47.81916427612305, - "learning_rate": 1.7237333333333335e-05, - "loss": 1.7314, - "step": 14020 - }, - { - "epoch": 0.44896, - "grad_norm": 43.19534683227539, - "learning_rate": 1.723377777777778e-05, - "loss": 1.7115, - "step": 14030 - }, - { - "epoch": 0.44928, - "grad_norm": 44.247955322265625, - "learning_rate": 1.7230222222222224e-05, - "loss": 1.7209, - "step": 14040 - }, - { - "epoch": 0.4496, - "grad_norm": 45.006771087646484, - "learning_rate": 1.7226666666666665e-05, - "loss": 1.6751, - "step": 14050 - }, - { - "epoch": 0.44992, - "grad_norm": 45.61868667602539, - "learning_rate": 1.7223111111111113e-05, - "loss": 1.7024, - "step": 14060 - }, - { - "epoch": 0.45024, - "grad_norm": 43.44654083251953, - "learning_rate": 1.7219555555555555e-05, - "loss": 1.6982, - "step": 14070 - }, - { - "epoch": 0.45056, - "grad_norm": 45.38816833496094, - "learning_rate": 1.7216000000000003e-05, - "loss": 1.7487, - "step": 14080 - }, - { - "epoch": 0.45088, - "grad_norm": 46.881126403808594, - "learning_rate": 1.7212444444444444e-05, - "loss": 1.6988, - "step": 14090 - }, - { - "epoch": 0.4512, - "grad_norm": 45.429744720458984, - "learning_rate": 1.720888888888889e-05, - "loss": 1.7508, - "step": 14100 - }, - { - "epoch": 0.45152, - "grad_norm": 46.08954620361328, - "learning_rate": 1.7205333333333333e-05, - "loss": 1.7342, - "step": 14110 - }, - { - "epoch": 0.45184, - "grad_norm": 43.329837799072266, - "learning_rate": 1.720177777777778e-05, - "loss": 1.7219, - "step": 14120 - }, - { - "epoch": 0.45216, - "grad_norm": 45.99585723876953, - "learning_rate": 1.7198222222222222e-05, - "loss": 1.7176, - "step": 14130 - }, - { - "epoch": 0.45248, - "grad_norm": 43.80538558959961, - "learning_rate": 1.719466666666667e-05, - "loss": 1.7207, - "step": 14140 - }, - { - "epoch": 0.4528, - "grad_norm": 43.61793518066406, - "learning_rate": 1.719111111111111e-05, - "loss": 1.706, - "step": 14150 - }, - { - "epoch": 0.45312, - "grad_norm": 46.47148513793945, - "learning_rate": 1.718755555555556e-05, - "loss": 1.7146, - "step": 14160 - }, - { - "epoch": 0.45344, - "grad_norm": 48.728145599365234, - "learning_rate": 1.7184e-05, - "loss": 1.7175, - "step": 14170 - }, - { - "epoch": 0.45376, - "grad_norm": 43.15789031982422, - "learning_rate": 1.7180444444444445e-05, - "loss": 1.7148, - "step": 14180 - }, - { - "epoch": 0.45408, - "grad_norm": 46.423492431640625, - "learning_rate": 1.717688888888889e-05, - "loss": 1.725, - "step": 14190 - }, - { - "epoch": 0.4544, - "grad_norm": 46.454776763916016, - "learning_rate": 1.7173333333333334e-05, - "loss": 1.6916, - "step": 14200 - }, - { - "epoch": 0.45472, - "grad_norm": 48.568687438964844, - "learning_rate": 1.716977777777778e-05, - "loss": 1.7083, - "step": 14210 - }, - { - "epoch": 0.45504, - "grad_norm": 44.02393341064453, - "learning_rate": 1.7166222222222223e-05, - "loss": 1.7551, - "step": 14220 - }, - { - "epoch": 0.45536, - "grad_norm": 47.03176498413086, - "learning_rate": 1.7162666666666667e-05, - "loss": 1.7424, - "step": 14230 - }, - { - "epoch": 0.45568, - "grad_norm": 43.364501953125, - "learning_rate": 1.7159111111111112e-05, - "loss": 1.7158, - "step": 14240 - }, - { - "epoch": 0.456, - "grad_norm": 45.3157844543457, - "learning_rate": 1.7155555555555557e-05, - "loss": 1.7286, - "step": 14250 - }, - { - "epoch": 0.45632, - "grad_norm": 45.80545425415039, - "learning_rate": 1.7152e-05, - "loss": 1.7186, - "step": 14260 - }, - { - "epoch": 0.45664, - "grad_norm": 43.9619140625, - "learning_rate": 1.7148444444444446e-05, - "loss": 1.7541, - "step": 14270 - }, - { - "epoch": 0.45696, - "grad_norm": 45.12228012084961, - "learning_rate": 1.714488888888889e-05, - "loss": 1.7254, - "step": 14280 - }, - { - "epoch": 0.45728, - "grad_norm": 47.905296325683594, - "learning_rate": 1.7141333333333335e-05, - "loss": 1.7621, - "step": 14290 - }, - { - "epoch": 0.4576, - "grad_norm": 45.022823333740234, - "learning_rate": 1.713777777777778e-05, - "loss": 1.7174, - "step": 14300 - }, - { - "epoch": 0.45792, - "grad_norm": 43.46538162231445, - "learning_rate": 1.7134222222222224e-05, - "loss": 1.6973, - "step": 14310 - }, - { - "epoch": 0.45824, - "grad_norm": 46.357234954833984, - "learning_rate": 1.713066666666667e-05, - "loss": 1.731, - "step": 14320 - }, - { - "epoch": 0.45856, - "grad_norm": 45.371124267578125, - "learning_rate": 1.7127111111111113e-05, - "loss": 1.7153, - "step": 14330 - }, - { - "epoch": 0.45888, - "grad_norm": 44.28989028930664, - "learning_rate": 1.7123555555555558e-05, - "loss": 1.6944, - "step": 14340 - }, - { - "epoch": 0.4592, - "grad_norm": 45.44322967529297, - "learning_rate": 1.7120000000000002e-05, - "loss": 1.7328, - "step": 14350 - }, - { - "epoch": 0.45952, - "grad_norm": 45.2812614440918, - "learning_rate": 1.7116444444444447e-05, - "loss": 1.7483, - "step": 14360 - }, - { - "epoch": 0.45984, - "grad_norm": 44.76524353027344, - "learning_rate": 1.711288888888889e-05, - "loss": 1.7532, - "step": 14370 - }, - { - "epoch": 0.46016, - "grad_norm": 44.747642517089844, - "learning_rate": 1.7109333333333336e-05, - "loss": 1.7378, - "step": 14380 - }, - { - "epoch": 0.46048, - "grad_norm": 46.987571716308594, - "learning_rate": 1.710577777777778e-05, - "loss": 1.7158, - "step": 14390 - }, - { - "epoch": 0.4608, - "grad_norm": 43.8972053527832, - "learning_rate": 1.7102222222222225e-05, - "loss": 1.6894, - "step": 14400 - }, - { - "epoch": 0.46112, - "grad_norm": 44.63825225830078, - "learning_rate": 1.709866666666667e-05, - "loss": 1.7255, - "step": 14410 - }, - { - "epoch": 0.46144, - "grad_norm": 45.58122253417969, - "learning_rate": 1.7095111111111114e-05, - "loss": 1.7339, - "step": 14420 - }, - { - "epoch": 0.46176, - "grad_norm": 45.17179489135742, - "learning_rate": 1.7091555555555555e-05, - "loss": 1.729, - "step": 14430 - }, - { - "epoch": 0.46208, - "grad_norm": 44.413856506347656, - "learning_rate": 1.7088000000000003e-05, - "loss": 1.6972, - "step": 14440 - }, - { - "epoch": 0.4624, - "grad_norm": 46.393951416015625, - "learning_rate": 1.7084444444444444e-05, - "loss": 1.7067, - "step": 14450 - }, - { - "epoch": 0.46272, - "grad_norm": 44.711326599121094, - "learning_rate": 1.7080888888888892e-05, - "loss": 1.7007, - "step": 14460 - }, - { - "epoch": 0.46304, - "grad_norm": 45.364009857177734, - "learning_rate": 1.7077333333333333e-05, - "loss": 1.728, - "step": 14470 - }, - { - "epoch": 0.46336, - "grad_norm": 45.55656433105469, - "learning_rate": 1.707377777777778e-05, - "loss": 1.7087, - "step": 14480 - }, - { - "epoch": 0.46368, - "grad_norm": 44.69358444213867, - "learning_rate": 1.7070222222222222e-05, - "loss": 1.7276, - "step": 14490 - }, - { - "epoch": 0.464, - "grad_norm": 43.40841293334961, - "learning_rate": 1.706666666666667e-05, - "loss": 1.7234, - "step": 14500 - }, - { - "epoch": 0.46432, - "grad_norm": 46.0664176940918, - "learning_rate": 1.706311111111111e-05, - "loss": 1.7575, - "step": 14510 - }, - { - "epoch": 0.46464, - "grad_norm": 45.3603401184082, - "learning_rate": 1.7059555555555556e-05, - "loss": 1.7083, - "step": 14520 - }, - { - "epoch": 0.46496, - "grad_norm": 43.96553039550781, - "learning_rate": 1.7056e-05, - "loss": 1.7176, - "step": 14530 - }, - { - "epoch": 0.46528, - "grad_norm": 46.85286331176758, - "learning_rate": 1.7052444444444445e-05, - "loss": 1.7251, - "step": 14540 - }, - { - "epoch": 0.4656, - "grad_norm": 45.323936462402344, - "learning_rate": 1.704888888888889e-05, - "loss": 1.6989, - "step": 14550 - }, - { - "epoch": 0.46592, - "grad_norm": 46.17856979370117, - "learning_rate": 1.7045333333333334e-05, - "loss": 1.7122, - "step": 14560 - }, - { - "epoch": 0.46624, - "grad_norm": 45.23008728027344, - "learning_rate": 1.704177777777778e-05, - "loss": 1.7531, - "step": 14570 - }, - { - "epoch": 0.46656, - "grad_norm": 96.77664184570312, - "learning_rate": 1.7038222222222223e-05, - "loss": 1.7109, - "step": 14580 - }, - { - "epoch": 0.46688, - "grad_norm": 48.62037658691406, - "learning_rate": 1.7034666666666668e-05, - "loss": 1.7294, - "step": 14590 - }, - { - "epoch": 0.4672, - "grad_norm": 46.559295654296875, - "learning_rate": 1.7031111111111113e-05, - "loss": 1.7075, - "step": 14600 - }, - { - "epoch": 0.46752, - "grad_norm": 49.41647720336914, - "learning_rate": 1.7027555555555557e-05, - "loss": 1.7256, - "step": 14610 - }, - { - "epoch": 0.46784, - "grad_norm": 46.767372131347656, - "learning_rate": 1.7024e-05, - "loss": 1.7183, - "step": 14620 - }, - { - "epoch": 0.46816, - "grad_norm": 46.65950012207031, - "learning_rate": 1.7020444444444446e-05, - "loss": 1.7412, - "step": 14630 - }, - { - "epoch": 0.46848, - "grad_norm": 46.2630615234375, - "learning_rate": 1.701688888888889e-05, - "loss": 1.7295, - "step": 14640 - }, - { - "epoch": 0.4688, - "grad_norm": 43.83213424682617, - "learning_rate": 1.7013333333333335e-05, - "loss": 1.7162, - "step": 14650 - }, - { - "epoch": 0.46912, - "grad_norm": 44.1187858581543, - "learning_rate": 1.700977777777778e-05, - "loss": 1.7099, - "step": 14660 - }, - { - "epoch": 0.46944, - "grad_norm": 46.37276077270508, - "learning_rate": 1.7006222222222224e-05, - "loss": 1.6949, - "step": 14670 - }, - { - "epoch": 0.46976, - "grad_norm": 44.964237213134766, - "learning_rate": 1.700266666666667e-05, - "loss": 1.7339, - "step": 14680 - }, - { - "epoch": 0.47008, - "grad_norm": 44.88374328613281, - "learning_rate": 1.6999111111111114e-05, - "loss": 1.7327, - "step": 14690 - }, - { - "epoch": 0.4704, - "grad_norm": 46.43785095214844, - "learning_rate": 1.6995555555555555e-05, - "loss": 1.7196, - "step": 14700 - }, - { - "epoch": 0.47072, - "grad_norm": 44.99215316772461, - "learning_rate": 1.6992000000000003e-05, - "loss": 1.7084, - "step": 14710 - }, - { - "epoch": 0.47104, - "grad_norm": 43.4759521484375, - "learning_rate": 1.6988444444444444e-05, - "loss": 1.6985, - "step": 14720 - }, - { - "epoch": 0.47136, - "grad_norm": 43.7542839050293, - "learning_rate": 1.6984888888888892e-05, - "loss": 1.7298, - "step": 14730 - }, - { - "epoch": 0.47168, - "grad_norm": 44.91163635253906, - "learning_rate": 1.6981333333333333e-05, - "loss": 1.7201, - "step": 14740 - }, - { - "epoch": 0.472, - "grad_norm": 49.6381950378418, - "learning_rate": 1.697777777777778e-05, - "loss": 1.7273, - "step": 14750 - }, - { - "epoch": 0.47232, - "grad_norm": 46.76472473144531, - "learning_rate": 1.6974222222222222e-05, - "loss": 1.7034, - "step": 14760 - }, - { - "epoch": 0.47264, - "grad_norm": 44.74431610107422, - "learning_rate": 1.697066666666667e-05, - "loss": 1.7266, - "step": 14770 - }, - { - "epoch": 0.47296, - "grad_norm": 45.28511428833008, - "learning_rate": 1.696711111111111e-05, - "loss": 1.7461, - "step": 14780 - }, - { - "epoch": 0.47328, - "grad_norm": 44.02510452270508, - "learning_rate": 1.696355555555556e-05, - "loss": 1.7089, - "step": 14790 - }, - { - "epoch": 0.4736, - "grad_norm": 45.76411437988281, - "learning_rate": 1.696e-05, - "loss": 1.7251, - "step": 14800 - }, - { - "epoch": 0.47392, - "grad_norm": 44.46986770629883, - "learning_rate": 1.6956444444444445e-05, - "loss": 1.6815, - "step": 14810 - }, - { - "epoch": 0.47424, - "grad_norm": 45.384033203125, - "learning_rate": 1.695288888888889e-05, - "loss": 1.7373, - "step": 14820 - }, - { - "epoch": 0.47456, - "grad_norm": 43.98371887207031, - "learning_rate": 1.6949333333333334e-05, - "loss": 1.7153, - "step": 14830 - }, - { - "epoch": 0.47488, - "grad_norm": 44.387054443359375, - "learning_rate": 1.694577777777778e-05, - "loss": 1.7194, - "step": 14840 - }, - { - "epoch": 0.4752, - "grad_norm": 44.254051208496094, - "learning_rate": 1.6942222222222223e-05, - "loss": 1.6953, - "step": 14850 - }, - { - "epoch": 0.47552, - "grad_norm": 47.075626373291016, - "learning_rate": 1.6938666666666668e-05, - "loss": 1.7409, - "step": 14860 - }, - { - "epoch": 0.47584, - "grad_norm": 48.30929946899414, - "learning_rate": 1.6935111111111112e-05, - "loss": 1.7164, - "step": 14870 - }, - { - "epoch": 0.47616, - "grad_norm": 45.149024963378906, - "learning_rate": 1.6931555555555557e-05, - "loss": 1.6862, - "step": 14880 - }, - { - "epoch": 0.47648, - "grad_norm": 45.58299255371094, - "learning_rate": 1.6928e-05, - "loss": 1.6864, - "step": 14890 - }, - { - "epoch": 0.4768, - "grad_norm": 44.711151123046875, - "learning_rate": 1.6924444444444446e-05, - "loss": 1.7183, - "step": 14900 - }, - { - "epoch": 0.47712, - "grad_norm": 47.265140533447266, - "learning_rate": 1.692088888888889e-05, - "loss": 1.7222, - "step": 14910 - }, - { - "epoch": 0.47744, - "grad_norm": 45.18415069580078, - "learning_rate": 1.6917333333333335e-05, - "loss": 1.704, - "step": 14920 - }, - { - "epoch": 0.47776, - "grad_norm": 45.706817626953125, - "learning_rate": 1.691377777777778e-05, - "loss": 1.6791, - "step": 14930 - }, - { - "epoch": 0.47808, - "grad_norm": 46.975589752197266, - "learning_rate": 1.6910222222222224e-05, - "loss": 1.7106, - "step": 14940 - }, - { - "epoch": 0.4784, - "grad_norm": 42.881412506103516, - "learning_rate": 1.690666666666667e-05, - "loss": 1.7162, - "step": 14950 - }, - { - "epoch": 0.47872, - "grad_norm": 45.331024169921875, - "learning_rate": 1.6903111111111113e-05, - "loss": 1.7089, - "step": 14960 - }, - { - "epoch": 0.47904, - "grad_norm": 45.485511779785156, - "learning_rate": 1.6899555555555558e-05, - "loss": 1.6941, - "step": 14970 - }, - { - "epoch": 0.47936, - "grad_norm": 44.187564849853516, - "learning_rate": 1.6896000000000002e-05, - "loss": 1.7062, - "step": 14980 - }, - { - "epoch": 0.47968, - "grad_norm": 44.031944274902344, - "learning_rate": 1.6892444444444447e-05, - "loss": 1.6959, - "step": 14990 - }, - { - "epoch": 0.48, - "grad_norm": 44.38731384277344, - "learning_rate": 1.688888888888889e-05, - "loss": 1.7016, - "step": 15000 - }, - { - "epoch": 0.48032, - "grad_norm": 43.112335205078125, - "learning_rate": 1.6885333333333336e-05, - "loss": 1.6804, - "step": 15010 - }, - { - "epoch": 0.48064, - "grad_norm": 47.42929458618164, - "learning_rate": 1.688177777777778e-05, - "loss": 1.7187, - "step": 15020 - }, - { - "epoch": 0.48096, - "grad_norm": 46.1917610168457, - "learning_rate": 1.687822222222222e-05, - "loss": 1.728, - "step": 15030 - }, - { - "epoch": 0.48128, - "grad_norm": 47.63662338256836, - "learning_rate": 1.687466666666667e-05, - "loss": 1.7058, - "step": 15040 - }, - { - "epoch": 0.4816, - "grad_norm": 45.0674934387207, - "learning_rate": 1.687111111111111e-05, - "loss": 1.7012, - "step": 15050 - }, - { - "epoch": 0.48192, - "grad_norm": 43.3830451965332, - "learning_rate": 1.686755555555556e-05, - "loss": 1.7319, - "step": 15060 - }, - { - "epoch": 0.48224, - "grad_norm": 48.20471954345703, - "learning_rate": 1.6864e-05, - "loss": 1.7366, - "step": 15070 - }, - { - "epoch": 0.48256, - "grad_norm": 47.284584045410156, - "learning_rate": 1.6860444444444444e-05, - "loss": 1.7092, - "step": 15080 - }, - { - "epoch": 0.48288, - "grad_norm": 45.17268753051758, - "learning_rate": 1.685688888888889e-05, - "loss": 1.7328, - "step": 15090 - }, - { - "epoch": 0.4832, - "grad_norm": 45.82081985473633, - "learning_rate": 1.6853333333333333e-05, - "loss": 1.7183, - "step": 15100 - }, - { - "epoch": 0.48352, - "grad_norm": 46.140403747558594, - "learning_rate": 1.6849777777777778e-05, - "loss": 1.7129, - "step": 15110 - }, - { - "epoch": 0.48384, - "grad_norm": 44.811729431152344, - "learning_rate": 1.6846222222222223e-05, - "loss": 1.6577, - "step": 15120 - }, - { - "epoch": 0.48416, - "grad_norm": 44.09989547729492, - "learning_rate": 1.6842666666666667e-05, - "loss": 1.7063, - "step": 15130 - }, - { - "epoch": 0.48448, - "grad_norm": 45.67482376098633, - "learning_rate": 1.683911111111111e-05, - "loss": 1.7513, - "step": 15140 - }, - { - "epoch": 0.4848, - "grad_norm": 44.314327239990234, - "learning_rate": 1.6835555555555556e-05, - "loss": 1.7474, - "step": 15150 - }, - { - "epoch": 0.48512, - "grad_norm": 45.371646881103516, - "learning_rate": 1.6832e-05, - "loss": 1.6844, - "step": 15160 - }, - { - "epoch": 0.48544, - "grad_norm": 44.82754135131836, - "learning_rate": 1.6828444444444445e-05, - "loss": 1.7245, - "step": 15170 - }, - { - "epoch": 0.48576, - "grad_norm": 43.516319274902344, - "learning_rate": 1.682488888888889e-05, - "loss": 1.713, - "step": 15180 - }, - { - "epoch": 0.48608, - "grad_norm": 44.03482437133789, - "learning_rate": 1.6821333333333334e-05, - "loss": 1.6786, - "step": 15190 - }, - { - "epoch": 0.4864, - "grad_norm": 43.02853012084961, - "learning_rate": 1.681777777777778e-05, - "loss": 1.7134, - "step": 15200 - }, - { - "epoch": 0.48672, - "grad_norm": 43.97295379638672, - "learning_rate": 1.6814222222222224e-05, - "loss": 1.7292, - "step": 15210 - }, - { - "epoch": 0.48704, - "grad_norm": 43.18336868286133, - "learning_rate": 1.6810666666666668e-05, - "loss": 1.7036, - "step": 15220 - }, - { - "epoch": 0.48736, - "grad_norm": 45.938255310058594, - "learning_rate": 1.6807111111111113e-05, - "loss": 1.6985, - "step": 15230 - }, - { - "epoch": 0.48768, - "grad_norm": 45.43522262573242, - "learning_rate": 1.6803555555555557e-05, - "loss": 1.6988, - "step": 15240 - }, - { - "epoch": 0.488, - "grad_norm": 44.32875442504883, - "learning_rate": 1.6800000000000002e-05, - "loss": 1.7163, - "step": 15250 - }, - { - "epoch": 0.48832, - "grad_norm": 45.96543502807617, - "learning_rate": 1.6796444444444446e-05, - "loss": 1.7364, - "step": 15260 - }, - { - "epoch": 0.48864, - "grad_norm": 44.921085357666016, - "learning_rate": 1.679288888888889e-05, - "loss": 1.6942, - "step": 15270 - }, - { - "epoch": 0.48896, - "grad_norm": 46.332210540771484, - "learning_rate": 1.6789333333333335e-05, - "loss": 1.7332, - "step": 15280 - }, - { - "epoch": 0.48928, - "grad_norm": 45.14290237426758, - "learning_rate": 1.678577777777778e-05, - "loss": 1.7133, - "step": 15290 - }, - { - "epoch": 0.4896, - "grad_norm": 43.08831787109375, - "learning_rate": 1.6782222222222225e-05, - "loss": 1.763, - "step": 15300 - }, - { - "epoch": 0.48992, - "grad_norm": 44.531578063964844, - "learning_rate": 1.677866666666667e-05, - "loss": 1.7257, - "step": 15310 - }, - { - "epoch": 0.49024, - "grad_norm": 42.90713119506836, - "learning_rate": 1.6775111111111114e-05, - "loss": 1.7057, - "step": 15320 - }, - { - "epoch": 0.49056, - "grad_norm": 44.45404052734375, - "learning_rate": 1.6771555555555555e-05, - "loss": 1.6935, - "step": 15330 - }, - { - "epoch": 0.49088, - "grad_norm": 43.76084899902344, - "learning_rate": 1.6768000000000003e-05, - "loss": 1.7232, - "step": 15340 - }, - { - "epoch": 0.4912, - "grad_norm": 44.55863952636719, - "learning_rate": 1.6764444444444444e-05, - "loss": 1.6916, - "step": 15350 - }, - { - "epoch": 0.49152, - "grad_norm": 43.15925979614258, - "learning_rate": 1.6760888888888892e-05, - "loss": 1.7028, - "step": 15360 - }, - { - "epoch": 0.49184, - "grad_norm": 45.47795104980469, - "learning_rate": 1.6757333333333333e-05, - "loss": 1.6957, - "step": 15370 - }, - { - "epoch": 0.49216, - "grad_norm": 45.608734130859375, - "learning_rate": 1.675377777777778e-05, - "loss": 1.6955, - "step": 15380 - }, - { - "epoch": 0.49248, - "grad_norm": 43.81676483154297, - "learning_rate": 1.6750222222222222e-05, - "loss": 1.7257, - "step": 15390 - }, - { - "epoch": 0.4928, - "grad_norm": 45.095298767089844, - "learning_rate": 1.674666666666667e-05, - "loss": 1.7381, - "step": 15400 - }, - { - "epoch": 0.49312, - "grad_norm": 43.94649887084961, - "learning_rate": 1.674311111111111e-05, - "loss": 1.6942, - "step": 15410 - }, - { - "epoch": 0.49344, - "grad_norm": 45.50575256347656, - "learning_rate": 1.673955555555556e-05, - "loss": 1.6912, - "step": 15420 - }, - { - "epoch": 0.49376, - "grad_norm": 48.223209381103516, - "learning_rate": 1.6736e-05, - "loss": 1.7267, - "step": 15430 - }, - { - "epoch": 0.49408, - "grad_norm": 44.392372131347656, - "learning_rate": 1.6732444444444448e-05, - "loss": 1.7077, - "step": 15440 - }, - { - "epoch": 0.4944, - "grad_norm": 44.331565856933594, - "learning_rate": 1.672888888888889e-05, - "loss": 1.7118, - "step": 15450 - }, - { - "epoch": 0.49472, - "grad_norm": 44.65738296508789, - "learning_rate": 1.6725333333333334e-05, - "loss": 1.6891, - "step": 15460 - }, - { - "epoch": 0.49504, - "grad_norm": 43.0691032409668, - "learning_rate": 1.672177777777778e-05, - "loss": 1.7015, - "step": 15470 - }, - { - "epoch": 0.49536, - "grad_norm": 44.851234436035156, - "learning_rate": 1.6718222222222223e-05, - "loss": 1.7186, - "step": 15480 - }, - { - "epoch": 0.49568, - "grad_norm": 44.26792907714844, - "learning_rate": 1.6714666666666668e-05, - "loss": 1.6874, - "step": 15490 - }, - { - "epoch": 0.496, - "grad_norm": 45.67174530029297, - "learning_rate": 1.6711111111111112e-05, - "loss": 1.6646, - "step": 15500 - }, - { - "epoch": 0.49632, - "grad_norm": 46.26319885253906, - "learning_rate": 1.6707555555555557e-05, - "loss": 1.7374, - "step": 15510 - }, - { - "epoch": 0.49664, - "grad_norm": 45.9160041809082, - "learning_rate": 1.6704e-05, - "loss": 1.6884, - "step": 15520 - }, - { - "epoch": 0.49696, - "grad_norm": 43.73548126220703, - "learning_rate": 1.6700444444444446e-05, - "loss": 1.704, - "step": 15530 - }, - { - "epoch": 0.49728, - "grad_norm": 46.672542572021484, - "learning_rate": 1.669688888888889e-05, - "loss": 1.7175, - "step": 15540 - }, - { - "epoch": 0.4976, - "grad_norm": 43.42729949951172, - "learning_rate": 1.6693333333333335e-05, - "loss": 1.7285, - "step": 15550 - }, - { - "epoch": 0.49792, - "grad_norm": 43.43124008178711, - "learning_rate": 1.668977777777778e-05, - "loss": 1.7281, - "step": 15560 - }, - { - "epoch": 0.49824, - "grad_norm": 43.26439666748047, - "learning_rate": 1.6686222222222224e-05, - "loss": 1.7142, - "step": 15570 - }, - { - "epoch": 0.49856, - "grad_norm": 46.92955780029297, - "learning_rate": 1.668266666666667e-05, - "loss": 1.7141, - "step": 15580 - }, - { - "epoch": 0.49888, - "grad_norm": 45.263893127441406, - "learning_rate": 1.6679111111111113e-05, - "loss": 1.7077, - "step": 15590 - }, - { - "epoch": 0.4992, - "grad_norm": 45.84095001220703, - "learning_rate": 1.6675555555555554e-05, - "loss": 1.726, - "step": 15600 - }, - { - "epoch": 0.49952, - "grad_norm": 44.74760055541992, - "learning_rate": 1.6672000000000002e-05, - "loss": 1.6628, - "step": 15610 - }, - { - "epoch": 0.49984, - "grad_norm": 45.276981353759766, - "learning_rate": 1.6668444444444443e-05, - "loss": 1.7268, - "step": 15620 - }, - { - "epoch": 0.50016, - "grad_norm": 44.45388412475586, - "learning_rate": 1.666488888888889e-05, - "loss": 1.7051, - "step": 15630 - }, - { - "epoch": 0.50048, - "grad_norm": 43.3873291015625, - "learning_rate": 1.6661333333333333e-05, - "loss": 1.66, - "step": 15640 - }, - { - "epoch": 0.5008, - "grad_norm": 45.358951568603516, - "learning_rate": 1.665777777777778e-05, - "loss": 1.7032, - "step": 15650 - }, - { - "epoch": 0.50112, - "grad_norm": 45.18826675415039, - "learning_rate": 1.6654222222222222e-05, - "loss": 1.7122, - "step": 15660 - }, - { - "epoch": 0.50144, - "grad_norm": 47.63956832885742, - "learning_rate": 1.665066666666667e-05, - "loss": 1.7176, - "step": 15670 - }, - { - "epoch": 0.50176, - "grad_norm": 45.27025604248047, - "learning_rate": 1.664711111111111e-05, - "loss": 1.6904, - "step": 15680 - }, - { - "epoch": 0.50208, - "grad_norm": 44.5028190612793, - "learning_rate": 1.664355555555556e-05, - "loss": 1.6702, - "step": 15690 - }, - { - "epoch": 0.5024, - "grad_norm": 43.3703727722168, - "learning_rate": 1.664e-05, - "loss": 1.7137, - "step": 15700 - }, - { - "epoch": 0.50272, - "grad_norm": 43.58053207397461, - "learning_rate": 1.6636444444444448e-05, - "loss": 1.7239, - "step": 15710 - }, - { - "epoch": 0.50304, - "grad_norm": 44.74704360961914, - "learning_rate": 1.663288888888889e-05, - "loss": 1.6596, - "step": 15720 - }, - { - "epoch": 0.50336, - "grad_norm": 43.05439376831055, - "learning_rate": 1.6629333333333334e-05, - "loss": 1.6869, - "step": 15730 - }, - { - "epoch": 0.50368, - "grad_norm": 45.10047149658203, - "learning_rate": 1.6625777777777778e-05, - "loss": 1.7405, - "step": 15740 - }, - { - "epoch": 0.504, - "grad_norm": 44.82457733154297, - "learning_rate": 1.6622222222222223e-05, - "loss": 1.6951, - "step": 15750 - }, - { - "epoch": 0.50432, - "grad_norm": 46.221214294433594, - "learning_rate": 1.6618666666666667e-05, - "loss": 1.721, - "step": 15760 - }, - { - "epoch": 0.50464, - "grad_norm": 41.99622344970703, - "learning_rate": 1.6615111111111112e-05, - "loss": 1.6501, - "step": 15770 - }, - { - "epoch": 0.50496, - "grad_norm": 44.5416145324707, - "learning_rate": 1.6611555555555556e-05, - "loss": 1.6827, - "step": 15780 - }, - { - "epoch": 0.50528, - "grad_norm": 43.418243408203125, - "learning_rate": 1.6608e-05, - "loss": 1.7221, - "step": 15790 - }, - { - "epoch": 0.5056, - "grad_norm": 44.96596908569336, - "learning_rate": 1.6604444444444445e-05, - "loss": 1.6984, - "step": 15800 - }, - { - "epoch": 0.50592, - "grad_norm": 47.30256652832031, - "learning_rate": 1.660088888888889e-05, - "loss": 1.7409, - "step": 15810 - }, - { - "epoch": 0.50624, - "grad_norm": 45.20161819458008, - "learning_rate": 1.6597333333333335e-05, - "loss": 1.7211, - "step": 15820 - }, - { - "epoch": 0.50656, - "grad_norm": 45.83244705200195, - "learning_rate": 1.659377777777778e-05, - "loss": 1.6697, - "step": 15830 - }, - { - "epoch": 0.50688, - "grad_norm": 46.3140754699707, - "learning_rate": 1.6590222222222224e-05, - "loss": 1.7022, - "step": 15840 - }, - { - "epoch": 0.5072, - "grad_norm": 45.87006378173828, - "learning_rate": 1.6586666666666668e-05, - "loss": 1.722, - "step": 15850 - }, - { - "epoch": 0.50752, - "grad_norm": 45.3338508605957, - "learning_rate": 1.6583111111111113e-05, - "loss": 1.7095, - "step": 15860 - }, - { - "epoch": 0.50784, - "grad_norm": 44.24995422363281, - "learning_rate": 1.6579555555555557e-05, - "loss": 1.7225, - "step": 15870 - }, - { - "epoch": 0.50816, - "grad_norm": 45.53012466430664, - "learning_rate": 1.6576000000000002e-05, - "loss": 1.6879, - "step": 15880 - }, - { - "epoch": 0.50848, - "grad_norm": 46.71226119995117, - "learning_rate": 1.6572444444444446e-05, - "loss": 1.6784, - "step": 15890 - }, - { - "epoch": 0.5088, - "grad_norm": 44.13043212890625, - "learning_rate": 1.656888888888889e-05, - "loss": 1.6958, - "step": 15900 - }, - { - "epoch": 0.50912, - "grad_norm": 44.21406555175781, - "learning_rate": 1.6565333333333336e-05, - "loss": 1.6638, - "step": 15910 - }, - { - "epoch": 0.50944, - "grad_norm": 43.99684143066406, - "learning_rate": 1.656177777777778e-05, - "loss": 1.7091, - "step": 15920 - }, - { - "epoch": 0.50976, - "grad_norm": 44.244503021240234, - "learning_rate": 1.6558222222222225e-05, - "loss": 1.7032, - "step": 15930 - }, - { - "epoch": 0.51008, - "grad_norm": 45.38497543334961, - "learning_rate": 1.655466666666667e-05, - "loss": 1.6894, - "step": 15940 - }, - { - "epoch": 0.5104, - "grad_norm": 44.65730667114258, - "learning_rate": 1.6551111111111114e-05, - "loss": 1.7331, - "step": 15950 - }, - { - "epoch": 0.51072, - "grad_norm": 44.94163513183594, - "learning_rate": 1.654755555555556e-05, - "loss": 1.7253, - "step": 15960 - }, - { - "epoch": 0.51104, - "grad_norm": 43.73176193237305, - "learning_rate": 1.6544000000000003e-05, - "loss": 1.7087, - "step": 15970 - }, - { - "epoch": 0.51136, - "grad_norm": 45.248966217041016, - "learning_rate": 1.6540444444444444e-05, - "loss": 1.7013, - "step": 15980 - }, - { - "epoch": 0.51168, - "grad_norm": 43.847721099853516, - "learning_rate": 1.6536888888888892e-05, - "loss": 1.7007, - "step": 15990 - }, - { - "epoch": 0.512, - "grad_norm": 43.88396453857422, - "learning_rate": 1.6533333333333333e-05, - "loss": 1.6882, - "step": 16000 - }, - { - "epoch": 0.51232, - "grad_norm": 43.48516845703125, - "learning_rate": 1.652977777777778e-05, - "loss": 1.7187, - "step": 16010 - }, - { - "epoch": 0.51264, - "grad_norm": 44.9348030090332, - "learning_rate": 1.6526222222222222e-05, - "loss": 1.6971, - "step": 16020 - }, - { - "epoch": 0.51296, - "grad_norm": 48.24411392211914, - "learning_rate": 1.652266666666667e-05, - "loss": 1.6926, - "step": 16030 - }, - { - "epoch": 0.51328, - "grad_norm": 45.70420455932617, - "learning_rate": 1.651911111111111e-05, - "loss": 1.7344, - "step": 16040 - }, - { - "epoch": 0.5136, - "grad_norm": 43.00725555419922, - "learning_rate": 1.651555555555556e-05, - "loss": 1.702, - "step": 16050 - }, - { - "epoch": 0.51392, - "grad_norm": 43.134979248046875, - "learning_rate": 1.6512e-05, - "loss": 1.7159, - "step": 16060 - }, - { - "epoch": 0.51424, - "grad_norm": 43.83757781982422, - "learning_rate": 1.6508444444444445e-05, - "loss": 1.6921, - "step": 16070 - }, - { - "epoch": 0.51456, - "grad_norm": 44.838863372802734, - "learning_rate": 1.650488888888889e-05, - "loss": 1.7255, - "step": 16080 - }, - { - "epoch": 0.51488, - "grad_norm": 46.207984924316406, - "learning_rate": 1.6501333333333334e-05, - "loss": 1.697, - "step": 16090 - }, - { - "epoch": 0.5152, - "grad_norm": 42.93474578857422, - "learning_rate": 1.649777777777778e-05, - "loss": 1.7225, - "step": 16100 - }, - { - "epoch": 0.51552, - "grad_norm": 44.5800895690918, - "learning_rate": 1.6494222222222223e-05, - "loss": 1.7124, - "step": 16110 - }, - { - "epoch": 0.51584, - "grad_norm": 45.92104721069336, - "learning_rate": 1.6490666666666668e-05, - "loss": 1.718, - "step": 16120 - }, - { - "epoch": 0.51616, - "grad_norm": 45.22180938720703, - "learning_rate": 1.6487111111111112e-05, - "loss": 1.7042, - "step": 16130 - }, - { - "epoch": 0.51648, - "grad_norm": 45.1376838684082, - "learning_rate": 1.6483555555555557e-05, - "loss": 1.6804, - "step": 16140 - }, - { - "epoch": 0.5168, - "grad_norm": 44.4555778503418, - "learning_rate": 1.648e-05, - "loss": 1.7167, - "step": 16150 - }, - { - "epoch": 0.51712, - "grad_norm": 46.41279602050781, - "learning_rate": 1.6476444444444446e-05, - "loss": 1.7113, - "step": 16160 - }, - { - "epoch": 0.51744, - "grad_norm": 45.017879486083984, - "learning_rate": 1.647288888888889e-05, - "loss": 1.7323, - "step": 16170 - }, - { - "epoch": 0.51776, - "grad_norm": 44.85575485229492, - "learning_rate": 1.6469333333333335e-05, - "loss": 1.7163, - "step": 16180 - }, - { - "epoch": 0.51808, - "grad_norm": 43.125179290771484, - "learning_rate": 1.646577777777778e-05, - "loss": 1.7103, - "step": 16190 - }, - { - "epoch": 0.5184, - "grad_norm": 47.32752227783203, - "learning_rate": 1.6462222222222224e-05, - "loss": 1.7208, - "step": 16200 - }, - { - "epoch": 0.51872, - "grad_norm": 44.214813232421875, - "learning_rate": 1.645866666666667e-05, - "loss": 1.6869, - "step": 16210 - }, - { - "epoch": 0.51904, - "grad_norm": 43.823272705078125, - "learning_rate": 1.6455111111111113e-05, - "loss": 1.7107, - "step": 16220 - }, - { - "epoch": 0.51936, - "grad_norm": 44.63423538208008, - "learning_rate": 1.6451555555555554e-05, - "loss": 1.665, - "step": 16230 - }, - { - "epoch": 0.51968, - "grad_norm": 43.06886672973633, - "learning_rate": 1.6448000000000002e-05, - "loss": 1.7051, - "step": 16240 - }, - { - "epoch": 0.52, - "grad_norm": 43.636932373046875, - "learning_rate": 1.6444444444444444e-05, - "loss": 1.7217, - "step": 16250 - }, - { - "epoch": 0.52032, - "grad_norm": 44.1069221496582, - "learning_rate": 1.644088888888889e-05, - "loss": 1.7063, - "step": 16260 - }, - { - "epoch": 0.52064, - "grad_norm": 46.94747543334961, - "learning_rate": 1.6437333333333333e-05, - "loss": 1.699, - "step": 16270 - }, - { - "epoch": 0.52096, - "grad_norm": 45.03914260864258, - "learning_rate": 1.643377777777778e-05, - "loss": 1.7257, - "step": 16280 - }, - { - "epoch": 0.52128, - "grad_norm": 45.86468505859375, - "learning_rate": 1.6430222222222222e-05, - "loss": 1.722, - "step": 16290 - }, - { - "epoch": 0.5216, - "grad_norm": 43.51268005371094, - "learning_rate": 1.642666666666667e-05, - "loss": 1.676, - "step": 16300 - }, - { - "epoch": 0.52192, - "grad_norm": 42.855106353759766, - "learning_rate": 1.642311111111111e-05, - "loss": 1.6782, - "step": 16310 - }, - { - "epoch": 0.52224, - "grad_norm": 48.07331466674805, - "learning_rate": 1.641955555555556e-05, - "loss": 1.7173, - "step": 16320 - }, - { - "epoch": 0.52256, - "grad_norm": 43.21236038208008, - "learning_rate": 1.6416e-05, - "loss": 1.7336, - "step": 16330 - }, - { - "epoch": 0.52288, - "grad_norm": 44.75568771362305, - "learning_rate": 1.6412444444444448e-05, - "loss": 1.6918, - "step": 16340 - }, - { - "epoch": 0.5232, - "grad_norm": 44.176422119140625, - "learning_rate": 1.640888888888889e-05, - "loss": 1.7002, - "step": 16350 - }, - { - "epoch": 0.52352, - "grad_norm": 44.09846115112305, - "learning_rate": 1.6405333333333334e-05, - "loss": 1.6942, - "step": 16360 - }, - { - "epoch": 0.52384, - "grad_norm": 42.87156295776367, - "learning_rate": 1.6401777777777778e-05, - "loss": 1.7208, - "step": 16370 - }, - { - "epoch": 0.52416, - "grad_norm": 46.28589630126953, - "learning_rate": 1.6398222222222223e-05, - "loss": 1.6768, - "step": 16380 - }, - { - "epoch": 0.52448, - "grad_norm": 45.45729064941406, - "learning_rate": 1.6394666666666667e-05, - "loss": 1.6837, - "step": 16390 - }, - { - "epoch": 0.5248, - "grad_norm": 45.6191520690918, - "learning_rate": 1.6391111111111112e-05, - "loss": 1.7055, - "step": 16400 - }, - { - "epoch": 0.52512, - "grad_norm": 45.490787506103516, - "learning_rate": 1.6387555555555556e-05, - "loss": 1.7033, - "step": 16410 - }, - { - "epoch": 0.52544, - "grad_norm": 44.59261703491211, - "learning_rate": 1.6384e-05, - "loss": 1.7125, - "step": 16420 - }, - { - "epoch": 0.52576, - "grad_norm": 45.315059661865234, - "learning_rate": 1.6380444444444446e-05, - "loss": 1.7056, - "step": 16430 - }, - { - "epoch": 0.52608, - "grad_norm": 45.0549430847168, - "learning_rate": 1.637688888888889e-05, - "loss": 1.7085, - "step": 16440 - }, - { - "epoch": 0.5264, - "grad_norm": 44.14567565917969, - "learning_rate": 1.6373333333333335e-05, - "loss": 1.6874, - "step": 16450 - }, - { - "epoch": 0.52672, - "grad_norm": 45.77822494506836, - "learning_rate": 1.636977777777778e-05, - "loss": 1.7104, - "step": 16460 - }, - { - "epoch": 0.52704, - "grad_norm": 46.14954376220703, - "learning_rate": 1.6366222222222224e-05, - "loss": 1.6697, - "step": 16470 - }, - { - "epoch": 0.52736, - "grad_norm": 45.201969146728516, - "learning_rate": 1.636266666666667e-05, - "loss": 1.7316, - "step": 16480 - }, - { - "epoch": 0.52768, - "grad_norm": 47.41619110107422, - "learning_rate": 1.6359111111111113e-05, - "loss": 1.6999, - "step": 16490 - }, - { - "epoch": 0.528, - "grad_norm": 43.96504211425781, - "learning_rate": 1.6355555555555557e-05, - "loss": 1.7277, - "step": 16500 - }, - { - "epoch": 0.52832, - "grad_norm": 45.96023178100586, - "learning_rate": 1.6352000000000002e-05, - "loss": 1.703, - "step": 16510 - }, - { - "epoch": 0.52864, - "grad_norm": 46.28660583496094, - "learning_rate": 1.6348444444444447e-05, - "loss": 1.6972, - "step": 16520 - }, - { - "epoch": 0.52896, - "grad_norm": 43.34731674194336, - "learning_rate": 1.634488888888889e-05, - "loss": 1.6852, - "step": 16530 - }, - { - "epoch": 0.52928, - "grad_norm": 45.01405334472656, - "learning_rate": 1.6341333333333336e-05, - "loss": 1.6812, - "step": 16540 - }, - { - "epoch": 0.5296, - "grad_norm": 44.526119232177734, - "learning_rate": 1.633777777777778e-05, - "loss": 1.7143, - "step": 16550 - }, - { - "epoch": 0.52992, - "grad_norm": 45.97877883911133, - "learning_rate": 1.6334222222222225e-05, - "loss": 1.7182, - "step": 16560 - }, - { - "epoch": 0.53024, - "grad_norm": 47.06319046020508, - "learning_rate": 1.633066666666667e-05, - "loss": 1.704, - "step": 16570 - }, - { - "epoch": 0.53056, - "grad_norm": 44.56294250488281, - "learning_rate": 1.632711111111111e-05, - "loss": 1.7028, - "step": 16580 - }, - { - "epoch": 0.53088, - "grad_norm": 43.73545837402344, - "learning_rate": 1.632355555555556e-05, - "loss": 1.6899, - "step": 16590 - }, - { - "epoch": 0.5312, - "grad_norm": 45.30746841430664, - "learning_rate": 1.632e-05, - "loss": 1.687, - "step": 16600 - }, - { - "epoch": 0.53152, - "grad_norm": 44.87981414794922, - "learning_rate": 1.6316444444444448e-05, - "loss": 1.6603, - "step": 16610 - }, - { - "epoch": 0.53184, - "grad_norm": 44.816471099853516, - "learning_rate": 1.631288888888889e-05, - "loss": 1.7085, - "step": 16620 - }, - { - "epoch": 0.53216, - "grad_norm": 44.822017669677734, - "learning_rate": 1.6309333333333333e-05, - "loss": 1.717, - "step": 16630 - }, - { - "epoch": 0.53248, - "grad_norm": 46.93596649169922, - "learning_rate": 1.6305777777777778e-05, - "loss": 1.6723, - "step": 16640 - }, - { - "epoch": 0.5328, - "grad_norm": 45.41704177856445, - "learning_rate": 1.6302222222222222e-05, - "loss": 1.7343, - "step": 16650 - }, - { - "epoch": 0.53312, - "grad_norm": 45.636146545410156, - "learning_rate": 1.6298666666666667e-05, - "loss": 1.698, - "step": 16660 - }, - { - "epoch": 0.53344, - "grad_norm": 43.23383712768555, - "learning_rate": 1.629511111111111e-05, - "loss": 1.6966, - "step": 16670 - }, - { - "epoch": 0.53376, - "grad_norm": 42.99844741821289, - "learning_rate": 1.6291555555555556e-05, - "loss": 1.7189, - "step": 16680 - }, - { - "epoch": 0.53408, - "grad_norm": 42.8013801574707, - "learning_rate": 1.6288e-05, - "loss": 1.7105, - "step": 16690 - }, - { - "epoch": 0.5344, - "grad_norm": 44.297874450683594, - "learning_rate": 1.6284444444444445e-05, - "loss": 1.6963, - "step": 16700 - }, - { - "epoch": 0.53472, - "grad_norm": 45.40163803100586, - "learning_rate": 1.628088888888889e-05, - "loss": 1.7061, - "step": 16710 - }, - { - "epoch": 0.53504, - "grad_norm": 44.55815887451172, - "learning_rate": 1.6277333333333334e-05, - "loss": 1.686, - "step": 16720 - }, - { - "epoch": 0.53536, - "grad_norm": 47.977874755859375, - "learning_rate": 1.627377777777778e-05, - "loss": 1.7157, - "step": 16730 - }, - { - "epoch": 0.53568, - "grad_norm": 44.50165557861328, - "learning_rate": 1.6270222222222223e-05, - "loss": 1.6814, - "step": 16740 - }, - { - "epoch": 0.536, - "grad_norm": 42.11151885986328, - "learning_rate": 1.6266666666666668e-05, - "loss": 1.7029, - "step": 16750 - }, - { - "epoch": 0.53632, - "grad_norm": 48.29054641723633, - "learning_rate": 1.6263111111111112e-05, - "loss": 1.6978, - "step": 16760 - }, - { - "epoch": 0.53664, - "grad_norm": 44.33515930175781, - "learning_rate": 1.6259555555555557e-05, - "loss": 1.6746, - "step": 16770 - }, - { - "epoch": 0.53696, - "grad_norm": 49.40854263305664, - "learning_rate": 1.6256e-05, - "loss": 1.7046, - "step": 16780 - }, - { - "epoch": 0.53728, - "grad_norm": 44.964656829833984, - "learning_rate": 1.6252444444444446e-05, - "loss": 1.7062, - "step": 16790 - }, - { - "epoch": 0.5376, - "grad_norm": 45.332523345947266, - "learning_rate": 1.624888888888889e-05, - "loss": 1.7132, - "step": 16800 - }, - { - "epoch": 0.53792, - "grad_norm": 44.173912048339844, - "learning_rate": 1.6245333333333335e-05, - "loss": 1.7039, - "step": 16810 - }, - { - "epoch": 0.53824, - "grad_norm": 45.531803131103516, - "learning_rate": 1.624177777777778e-05, - "loss": 1.6931, - "step": 16820 - }, - { - "epoch": 0.53856, - "grad_norm": 45.48138427734375, - "learning_rate": 1.6238222222222224e-05, - "loss": 1.7275, - "step": 16830 - }, - { - "epoch": 0.53888, - "grad_norm": 47.76054763793945, - "learning_rate": 1.623466666666667e-05, - "loss": 1.6815, - "step": 16840 - }, - { - "epoch": 0.5392, - "grad_norm": 44.69922637939453, - "learning_rate": 1.6231111111111113e-05, - "loss": 1.6909, - "step": 16850 - }, - { - "epoch": 0.53952, - "grad_norm": 44.52599334716797, - "learning_rate": 1.6227555555555558e-05, - "loss": 1.6969, - "step": 16860 - }, - { - "epoch": 0.53984, - "grad_norm": 45.41663360595703, - "learning_rate": 1.6224000000000003e-05, - "loss": 1.6778, - "step": 16870 - }, - { - "epoch": 0.54016, - "grad_norm": 44.901668548583984, - "learning_rate": 1.6220444444444444e-05, - "loss": 1.6551, - "step": 16880 - }, - { - "epoch": 0.54048, - "grad_norm": 43.41035461425781, - "learning_rate": 1.621688888888889e-05, - "loss": 1.7107, - "step": 16890 - }, - { - "epoch": 0.5408, - "grad_norm": 48.324066162109375, - "learning_rate": 1.6213333333333333e-05, - "loss": 1.6959, - "step": 16900 - }, - { - "epoch": 0.54112, - "grad_norm": 43.75379943847656, - "learning_rate": 1.620977777777778e-05, - "loss": 1.7029, - "step": 16910 - }, - { - "epoch": 0.54144, - "grad_norm": 43.38237380981445, - "learning_rate": 1.6206222222222222e-05, - "loss": 1.6957, - "step": 16920 - }, - { - "epoch": 0.54176, - "grad_norm": 44.03130340576172, - "learning_rate": 1.620266666666667e-05, - "loss": 1.6884, - "step": 16930 - }, - { - "epoch": 0.54208, - "grad_norm": 45.490447998046875, - "learning_rate": 1.619911111111111e-05, - "loss": 1.6706, - "step": 16940 - }, - { - "epoch": 0.5424, - "grad_norm": 45.389976501464844, - "learning_rate": 1.619555555555556e-05, - "loss": 1.7352, - "step": 16950 - }, - { - "epoch": 0.54272, - "grad_norm": 44.04669189453125, - "learning_rate": 1.6192e-05, - "loss": 1.6794, - "step": 16960 - }, - { - "epoch": 0.54304, - "grad_norm": 44.17753601074219, - "learning_rate": 1.6188444444444448e-05, - "loss": 1.6918, - "step": 16970 - }, - { - "epoch": 0.54336, - "grad_norm": 43.18086242675781, - "learning_rate": 1.618488888888889e-05, - "loss": 1.6545, - "step": 16980 - }, - { - "epoch": 0.54368, - "grad_norm": 43.751461029052734, - "learning_rate": 1.6181333333333337e-05, - "loss": 1.689, - "step": 16990 - }, - { - "epoch": 0.544, - "grad_norm": 44.13274002075195, - "learning_rate": 1.617777777777778e-05, - "loss": 1.7332, - "step": 17000 - }, - { - "epoch": 0.54432, - "grad_norm": 45.5268440246582, - "learning_rate": 1.6174222222222223e-05, - "loss": 1.701, - "step": 17010 - }, - { - "epoch": 0.54464, - "grad_norm": 45.75001525878906, - "learning_rate": 1.6170666666666667e-05, - "loss": 1.711, - "step": 17020 - }, - { - "epoch": 0.54496, - "grad_norm": 43.565834045410156, - "learning_rate": 1.6167111111111112e-05, - "loss": 1.6872, - "step": 17030 - }, - { - "epoch": 0.54528, - "grad_norm": 45.25817108154297, - "learning_rate": 1.6163555555555557e-05, - "loss": 1.6778, - "step": 17040 - }, - { - "epoch": 0.5456, - "grad_norm": 43.177886962890625, - "learning_rate": 1.616e-05, - "loss": 1.7062, - "step": 17050 - }, - { - "epoch": 0.54592, - "grad_norm": 45.22313690185547, - "learning_rate": 1.6156444444444446e-05, - "loss": 1.6735, - "step": 17060 - }, - { - "epoch": 0.54624, - "grad_norm": 45.08768844604492, - "learning_rate": 1.615288888888889e-05, - "loss": 1.7249, - "step": 17070 - }, - { - "epoch": 0.54656, - "grad_norm": 44.014251708984375, - "learning_rate": 1.6149333333333335e-05, - "loss": 1.6784, - "step": 17080 - }, - { - "epoch": 0.54688, - "grad_norm": 45.65128707885742, - "learning_rate": 1.614577777777778e-05, - "loss": 1.6982, - "step": 17090 - }, - { - "epoch": 0.5472, - "grad_norm": 44.55026626586914, - "learning_rate": 1.6142222222222224e-05, - "loss": 1.7088, - "step": 17100 - }, - { - "epoch": 0.54752, - "grad_norm": 63.36881637573242, - "learning_rate": 1.613866666666667e-05, - "loss": 1.6831, - "step": 17110 - }, - { - "epoch": 0.54784, - "grad_norm": 45.86473083496094, - "learning_rate": 1.6135111111111113e-05, - "loss": 1.7168, - "step": 17120 - }, - { - "epoch": 0.54816, - "grad_norm": 44.3762092590332, - "learning_rate": 1.6131555555555554e-05, - "loss": 1.6983, - "step": 17130 - }, - { - "epoch": 0.54848, - "grad_norm": 45.90697479248047, - "learning_rate": 1.6128000000000002e-05, - "loss": 1.6779, - "step": 17140 - }, - { - "epoch": 0.5488, - "grad_norm": 47.833343505859375, - "learning_rate": 1.6124444444444443e-05, - "loss": 1.6911, - "step": 17150 - }, - { - "epoch": 0.54912, - "grad_norm": 43.13536834716797, - "learning_rate": 1.612088888888889e-05, - "loss": 1.7273, - "step": 17160 - }, - { - "epoch": 0.54944, - "grad_norm": 45.21577453613281, - "learning_rate": 1.6117333333333332e-05, - "loss": 1.695, - "step": 17170 - }, - { - "epoch": 0.54976, - "grad_norm": 48.69087219238281, - "learning_rate": 1.611377777777778e-05, - "loss": 1.7112, - "step": 17180 - }, - { - "epoch": 0.55008, - "grad_norm": 45.58196258544922, - "learning_rate": 1.611022222222222e-05, - "loss": 1.6941, - "step": 17190 - }, - { - "epoch": 0.5504, - "grad_norm": 45.56036376953125, - "learning_rate": 1.610666666666667e-05, - "loss": 1.6949, - "step": 17200 - }, - { - "epoch": 0.55072, - "grad_norm": 49.056949615478516, - "learning_rate": 1.610311111111111e-05, - "loss": 1.7246, - "step": 17210 - }, - { - "epoch": 0.55104, - "grad_norm": 45.82423782348633, - "learning_rate": 1.609955555555556e-05, - "loss": 1.6986, - "step": 17220 - }, - { - "epoch": 0.55136, - "grad_norm": 44.68406677246094, - "learning_rate": 1.6096e-05, - "loss": 1.7044, - "step": 17230 - }, - { - "epoch": 0.55168, - "grad_norm": 45.451416015625, - "learning_rate": 1.6092444444444448e-05, - "loss": 1.686, - "step": 17240 - }, - { - "epoch": 0.552, - "grad_norm": 47.66313552856445, - "learning_rate": 1.608888888888889e-05, - "loss": 1.7049, - "step": 17250 - }, - { - "epoch": 0.55232, - "grad_norm": 42.10512924194336, - "learning_rate": 1.6085333333333333e-05, - "loss": 1.6732, - "step": 17260 - }, - { - "epoch": 0.55264, - "grad_norm": 44.73134231567383, - "learning_rate": 1.6081777777777778e-05, - "loss": 1.6905, - "step": 17270 - }, - { - "epoch": 0.55296, - "grad_norm": 43.184906005859375, - "learning_rate": 1.6078222222222222e-05, - "loss": 1.6986, - "step": 17280 - }, - { - "epoch": 0.55328, - "grad_norm": 44.09386444091797, - "learning_rate": 1.6074666666666667e-05, - "loss": 1.677, - "step": 17290 - }, - { - "epoch": 0.5536, - "grad_norm": 42.74454116821289, - "learning_rate": 1.607111111111111e-05, - "loss": 1.7163, - "step": 17300 - }, - { - "epoch": 0.55392, - "grad_norm": 43.98388671875, - "learning_rate": 1.6067555555555556e-05, - "loss": 1.713, - "step": 17310 - }, - { - "epoch": 0.55424, - "grad_norm": 45.99345397949219, - "learning_rate": 1.6064e-05, - "loss": 1.6824, - "step": 17320 - }, - { - "epoch": 0.55456, - "grad_norm": 43.22311019897461, - "learning_rate": 1.6060444444444445e-05, - "loss": 1.7054, - "step": 17330 - }, - { - "epoch": 0.55488, - "grad_norm": 45.42384338378906, - "learning_rate": 1.605688888888889e-05, - "loss": 1.689, - "step": 17340 - }, - { - "epoch": 0.5552, - "grad_norm": 43.916080474853516, - "learning_rate": 1.6053333333333334e-05, - "loss": 1.6624, - "step": 17350 - }, - { - "epoch": 0.55552, - "grad_norm": 46.04616165161133, - "learning_rate": 1.604977777777778e-05, - "loss": 1.6876, - "step": 17360 - }, - { - "epoch": 0.55584, - "grad_norm": 44.111244201660156, - "learning_rate": 1.6046222222222223e-05, - "loss": 1.713, - "step": 17370 - }, - { - "epoch": 0.55616, - "grad_norm": 44.648834228515625, - "learning_rate": 1.6042666666666668e-05, - "loss": 1.7224, - "step": 17380 - }, - { - "epoch": 0.55648, - "grad_norm": 44.2633171081543, - "learning_rate": 1.6039111111111113e-05, - "loss": 1.6621, - "step": 17390 - }, - { - "epoch": 0.5568, - "grad_norm": 45.79343795776367, - "learning_rate": 1.6035555555555557e-05, - "loss": 1.6911, - "step": 17400 - }, - { - "epoch": 0.55712, - "grad_norm": 44.808135986328125, - "learning_rate": 1.6032e-05, - "loss": 1.6654, - "step": 17410 - }, - { - "epoch": 0.55744, - "grad_norm": 45.78480529785156, - "learning_rate": 1.6028444444444446e-05, - "loss": 1.7339, - "step": 17420 - }, - { - "epoch": 0.55776, - "grad_norm": 44.06301498413086, - "learning_rate": 1.602488888888889e-05, - "loss": 1.6808, - "step": 17430 - }, - { - "epoch": 0.55808, - "grad_norm": 43.80913162231445, - "learning_rate": 1.6021333333333335e-05, - "loss": 1.709, - "step": 17440 - }, - { - "epoch": 0.5584, - "grad_norm": 43.52967071533203, - "learning_rate": 1.601777777777778e-05, - "loss": 1.7063, - "step": 17450 - }, - { - "epoch": 0.55872, - "grad_norm": 45.998050689697266, - "learning_rate": 1.6014222222222224e-05, - "loss": 1.6858, - "step": 17460 - }, - { - "epoch": 0.55904, - "grad_norm": 44.415245056152344, - "learning_rate": 1.601066666666667e-05, - "loss": 1.7001, - "step": 17470 - }, - { - "epoch": 0.55936, - "grad_norm": 43.485721588134766, - "learning_rate": 1.6007111111111114e-05, - "loss": 1.695, - "step": 17480 - }, - { - "epoch": 0.55968, - "grad_norm": 44.18275833129883, - "learning_rate": 1.6003555555555558e-05, - "loss": 1.6661, - "step": 17490 - }, - { - "epoch": 0.56, - "grad_norm": 43.11178207397461, - "learning_rate": 1.6000000000000003e-05, - "loss": 1.7007, - "step": 17500 - }, - { - "epoch": 0.56032, - "grad_norm": 44.23533630371094, - "learning_rate": 1.5996444444444447e-05, - "loss": 1.6794, - "step": 17510 - }, - { - "epoch": 0.56064, - "grad_norm": 47.07341003417969, - "learning_rate": 1.5992888888888892e-05, - "loss": 1.6812, - "step": 17520 - }, - { - "epoch": 0.56096, - "grad_norm": 44.18885040283203, - "learning_rate": 1.5989333333333333e-05, - "loss": 1.6781, - "step": 17530 - }, - { - "epoch": 0.56128, - "grad_norm": 43.99700927734375, - "learning_rate": 1.598577777777778e-05, - "loss": 1.6692, - "step": 17540 - }, - { - "epoch": 0.5616, - "grad_norm": 43.02412796020508, - "learning_rate": 1.5982222222222222e-05, - "loss": 1.6889, - "step": 17550 - }, - { - "epoch": 0.56192, - "grad_norm": 43.83738327026367, - "learning_rate": 1.597866666666667e-05, - "loss": 1.7167, - "step": 17560 - }, - { - "epoch": 0.56224, - "grad_norm": 45.4013671875, - "learning_rate": 1.597511111111111e-05, - "loss": 1.6789, - "step": 17570 - }, - { - "epoch": 0.56256, - "grad_norm": 43.10468673706055, - "learning_rate": 1.597155555555556e-05, - "loss": 1.7157, - "step": 17580 - }, - { - "epoch": 0.56288, - "grad_norm": 47.6757698059082, - "learning_rate": 1.5968e-05, - "loss": 1.7217, - "step": 17590 - }, - { - "epoch": 0.5632, - "grad_norm": 43.0317268371582, - "learning_rate": 1.5964444444444448e-05, - "loss": 1.7004, - "step": 17600 - }, - { - "epoch": 0.56352, - "grad_norm": 46.82234191894531, - "learning_rate": 1.596088888888889e-05, - "loss": 1.6829, - "step": 17610 - }, - { - "epoch": 0.56384, - "grad_norm": 44.28413009643555, - "learning_rate": 1.5957333333333334e-05, - "loss": 1.7082, - "step": 17620 - }, - { - "epoch": 0.56416, - "grad_norm": 45.0130615234375, - "learning_rate": 1.595377777777778e-05, - "loss": 1.7367, - "step": 17630 - }, - { - "epoch": 0.56448, - "grad_norm": 46.60377883911133, - "learning_rate": 1.5950222222222223e-05, - "loss": 1.6681, - "step": 17640 - }, - { - "epoch": 0.5648, - "grad_norm": 45.25273513793945, - "learning_rate": 1.5946666666666668e-05, - "loss": 1.6995, - "step": 17650 - }, - { - "epoch": 0.56512, - "grad_norm": 43.679325103759766, - "learning_rate": 1.5943111111111112e-05, - "loss": 1.6902, - "step": 17660 - }, - { - "epoch": 0.56544, - "grad_norm": 44.908199310302734, - "learning_rate": 1.5939555555555557e-05, - "loss": 1.7095, - "step": 17670 - }, - { - "epoch": 0.56576, - "grad_norm": 45.180484771728516, - "learning_rate": 1.5936e-05, - "loss": 1.7037, - "step": 17680 - }, - { - "epoch": 0.56608, - "grad_norm": 46.96930694580078, - "learning_rate": 1.5932444444444446e-05, - "loss": 1.7447, - "step": 17690 - }, - { - "epoch": 0.5664, - "grad_norm": 45.75835037231445, - "learning_rate": 1.592888888888889e-05, - "loss": 1.7116, - "step": 17700 - }, - { - "epoch": 0.56672, - "grad_norm": 43.56595230102539, - "learning_rate": 1.5925333333333335e-05, - "loss": 1.7121, - "step": 17710 - }, - { - "epoch": 0.56704, - "grad_norm": 44.589962005615234, - "learning_rate": 1.592177777777778e-05, - "loss": 1.7161, - "step": 17720 - }, - { - "epoch": 0.56736, - "grad_norm": 42.827396392822266, - "learning_rate": 1.5918222222222224e-05, - "loss": 1.6813, - "step": 17730 - }, - { - "epoch": 0.56768, - "grad_norm": 42.656185150146484, - "learning_rate": 1.591466666666667e-05, - "loss": 1.6775, - "step": 17740 - }, - { - "epoch": 0.568, - "grad_norm": 45.042747497558594, - "learning_rate": 1.5911111111111113e-05, - "loss": 1.6921, - "step": 17750 - }, - { - "epoch": 0.56832, - "grad_norm": 44.19981384277344, - "learning_rate": 1.5907555555555558e-05, - "loss": 1.6934, - "step": 17760 - }, - { - "epoch": 0.56864, - "grad_norm": 44.734893798828125, - "learning_rate": 1.5904000000000002e-05, - "loss": 1.686, - "step": 17770 - }, - { - "epoch": 0.56896, - "grad_norm": 43.76615905761719, - "learning_rate": 1.5900444444444443e-05, - "loss": 1.7312, - "step": 17780 - }, - { - "epoch": 0.56928, - "grad_norm": 44.63469696044922, - "learning_rate": 1.589688888888889e-05, - "loss": 1.7115, - "step": 17790 - }, - { - "epoch": 0.5696, - "grad_norm": 43.65608215332031, - "learning_rate": 1.5893333333333333e-05, - "loss": 1.6742, - "step": 17800 - }, - { - "epoch": 0.56992, - "grad_norm": 45.9371337890625, - "learning_rate": 1.588977777777778e-05, - "loss": 1.7102, - "step": 17810 - }, - { - "epoch": 0.57024, - "grad_norm": 44.41642379760742, - "learning_rate": 1.588622222222222e-05, - "loss": 1.7064, - "step": 17820 - }, - { - "epoch": 0.57056, - "grad_norm": 45.774192810058594, - "learning_rate": 1.588266666666667e-05, - "loss": 1.709, - "step": 17830 - }, - { - "epoch": 0.57088, - "grad_norm": 46.056217193603516, - "learning_rate": 1.587911111111111e-05, - "loss": 1.6782, - "step": 17840 - }, - { - "epoch": 0.5712, - "grad_norm": 50.94669723510742, - "learning_rate": 1.587555555555556e-05, - "loss": 1.6707, - "step": 17850 - }, - { - "epoch": 0.57152, - "grad_norm": 44.1059455871582, - "learning_rate": 1.5872e-05, - "loss": 1.67, - "step": 17860 - }, - { - "epoch": 0.57184, - "grad_norm": 44.278621673583984, - "learning_rate": 1.5868444444444448e-05, - "loss": 1.7092, - "step": 17870 - }, - { - "epoch": 0.57216, - "grad_norm": 46.264774322509766, - "learning_rate": 1.586488888888889e-05, - "loss": 1.6798, - "step": 17880 - }, - { - "epoch": 0.57248, - "grad_norm": 42.81804656982422, - "learning_rate": 1.5861333333333337e-05, - "loss": 1.6711, - "step": 17890 - }, - { - "epoch": 0.5728, - "grad_norm": 42.7204475402832, - "learning_rate": 1.5857777777777778e-05, - "loss": 1.7242, - "step": 17900 - }, - { - "epoch": 0.57312, - "grad_norm": 47.116390228271484, - "learning_rate": 1.5854222222222223e-05, - "loss": 1.6984, - "step": 17910 - }, - { - "epoch": 0.57344, - "grad_norm": 43.8947639465332, - "learning_rate": 1.5850666666666667e-05, - "loss": 1.6769, - "step": 17920 - }, - { - "epoch": 0.57376, - "grad_norm": 44.71806716918945, - "learning_rate": 1.5847111111111112e-05, - "loss": 1.7098, - "step": 17930 - }, - { - "epoch": 0.57408, - "grad_norm": 44.0560302734375, - "learning_rate": 1.5843555555555556e-05, - "loss": 1.7095, - "step": 17940 - }, - { - "epoch": 0.5744, - "grad_norm": 44.2470817565918, - "learning_rate": 1.584e-05, - "loss": 1.7066, - "step": 17950 - }, - { - "epoch": 0.57472, - "grad_norm": 43.45963668823242, - "learning_rate": 1.5836444444444445e-05, - "loss": 1.6706, - "step": 17960 - }, - { - "epoch": 0.57504, - "grad_norm": 45.13947296142578, - "learning_rate": 1.583288888888889e-05, - "loss": 1.6806, - "step": 17970 - }, - { - "epoch": 0.57536, - "grad_norm": 45.662574768066406, - "learning_rate": 1.5829333333333334e-05, - "loss": 1.7123, - "step": 17980 - }, - { - "epoch": 0.57568, - "grad_norm": 44.67338562011719, - "learning_rate": 1.582577777777778e-05, - "loss": 1.6867, - "step": 17990 - }, - { - "epoch": 0.576, - "grad_norm": 44.79176712036133, - "learning_rate": 1.5822222222222224e-05, - "loss": 1.7151, - "step": 18000 - }, - { - "epoch": 0.57632, - "grad_norm": 46.5061149597168, - "learning_rate": 1.5818666666666668e-05, - "loss": 1.7163, - "step": 18010 - }, - { - "epoch": 0.57664, - "grad_norm": 45.3877067565918, - "learning_rate": 1.5815111111111113e-05, - "loss": 1.7154, - "step": 18020 - }, - { - "epoch": 0.57696, - "grad_norm": 45.97566223144531, - "learning_rate": 1.5811555555555557e-05, - "loss": 1.6957, - "step": 18030 - }, - { - "epoch": 0.57728, - "grad_norm": 44.81576919555664, - "learning_rate": 1.5808000000000002e-05, - "loss": 1.6721, - "step": 18040 - }, - { - "epoch": 0.5776, - "grad_norm": 43.295509338378906, - "learning_rate": 1.5804444444444446e-05, - "loss": 1.6782, - "step": 18050 - }, - { - "epoch": 0.57792, - "grad_norm": 44.87993621826172, - "learning_rate": 1.580088888888889e-05, - "loss": 1.6799, - "step": 18060 - }, - { - "epoch": 0.57824, - "grad_norm": 43.835365295410156, - "learning_rate": 1.5797333333333335e-05, - "loss": 1.6654, - "step": 18070 - }, - { - "epoch": 0.57856, - "grad_norm": 46.73650360107422, - "learning_rate": 1.579377777777778e-05, - "loss": 1.6948, - "step": 18080 - }, - { - "epoch": 0.57888, - "grad_norm": 43.62708282470703, - "learning_rate": 1.5790222222222225e-05, - "loss": 1.6844, - "step": 18090 - }, - { - "epoch": 0.5792, - "grad_norm": 42.22301483154297, - "learning_rate": 1.578666666666667e-05, - "loss": 1.6359, - "step": 18100 - }, - { - "epoch": 0.57952, - "grad_norm": 43.23906326293945, - "learning_rate": 1.5783111111111114e-05, - "loss": 1.701, - "step": 18110 - }, - { - "epoch": 0.57984, - "grad_norm": 44.715187072753906, - "learning_rate": 1.5779555555555558e-05, - "loss": 1.6639, - "step": 18120 - }, - { - "epoch": 0.58016, - "grad_norm": 42.52825164794922, - "learning_rate": 1.5776e-05, - "loss": 1.6719, - "step": 18130 - }, - { - "epoch": 0.58048, - "grad_norm": 44.43534469604492, - "learning_rate": 1.5772444444444447e-05, - "loss": 1.7142, - "step": 18140 - }, - { - "epoch": 0.5808, - "grad_norm": 41.77783966064453, - "learning_rate": 1.576888888888889e-05, - "loss": 1.7062, - "step": 18150 - }, - { - "epoch": 0.58112, - "grad_norm": 44.75507354736328, - "learning_rate": 1.5765333333333333e-05, - "loss": 1.6819, - "step": 18160 - }, - { - "epoch": 0.58144, - "grad_norm": 47.013145446777344, - "learning_rate": 1.5761777777777778e-05, - "loss": 1.7089, - "step": 18170 - }, - { - "epoch": 0.58176, - "grad_norm": 45.202571868896484, - "learning_rate": 1.5758222222222222e-05, - "loss": 1.6876, - "step": 18180 - }, - { - "epoch": 0.58208, - "grad_norm": 43.15135192871094, - "learning_rate": 1.5754666666666667e-05, - "loss": 1.6999, - "step": 18190 - }, - { - "epoch": 0.5824, - "grad_norm": 44.06850051879883, - "learning_rate": 1.575111111111111e-05, - "loss": 1.7127, - "step": 18200 - }, - { - "epoch": 0.58272, - "grad_norm": 44.04061508178711, - "learning_rate": 1.5747555555555556e-05, - "loss": 1.6819, - "step": 18210 - }, - { - "epoch": 0.58304, - "grad_norm": 46.223114013671875, - "learning_rate": 1.5744e-05, - "loss": 1.6726, - "step": 18220 - }, - { - "epoch": 0.58336, - "grad_norm": 44.79490280151367, - "learning_rate": 1.5740444444444445e-05, - "loss": 1.6703, - "step": 18230 - }, - { - "epoch": 0.58368, - "grad_norm": 44.74020767211914, - "learning_rate": 1.573688888888889e-05, - "loss": 1.676, - "step": 18240 - }, - { - "epoch": 0.584, - "grad_norm": 45.662872314453125, - "learning_rate": 1.5733333333333334e-05, - "loss": 1.6959, - "step": 18250 - }, - { - "epoch": 0.58432, - "grad_norm": 43.779197692871094, - "learning_rate": 1.572977777777778e-05, - "loss": 1.7072, - "step": 18260 - }, - { - "epoch": 0.58464, - "grad_norm": 46.27597427368164, - "learning_rate": 1.5726222222222223e-05, - "loss": 1.7126, - "step": 18270 - }, - { - "epoch": 0.58496, - "grad_norm": 44.4024658203125, - "learning_rate": 1.5722666666666668e-05, - "loss": 1.6876, - "step": 18280 - }, - { - "epoch": 0.58528, - "grad_norm": 44.50633239746094, - "learning_rate": 1.5719111111111112e-05, - "loss": 1.7045, - "step": 18290 - }, - { - "epoch": 0.5856, - "grad_norm": 45.14801025390625, - "learning_rate": 1.5715555555555557e-05, - "loss": 1.6796, - "step": 18300 - }, - { - "epoch": 0.58592, - "grad_norm": 43.62449264526367, - "learning_rate": 1.5712e-05, - "loss": 1.6682, - "step": 18310 - }, - { - "epoch": 0.58624, - "grad_norm": 43.99118423461914, - "learning_rate": 1.5708444444444446e-05, - "loss": 1.6998, - "step": 18320 - }, - { - "epoch": 0.58656, - "grad_norm": 44.510032653808594, - "learning_rate": 1.570488888888889e-05, - "loss": 1.7034, - "step": 18330 - }, - { - "epoch": 0.58688, - "grad_norm": 44.636871337890625, - "learning_rate": 1.5701333333333335e-05, - "loss": 1.704, - "step": 18340 - }, - { - "epoch": 0.5872, - "grad_norm": 43.79024887084961, - "learning_rate": 1.569777777777778e-05, - "loss": 1.6864, - "step": 18350 - }, - { - "epoch": 0.58752, - "grad_norm": 45.9245719909668, - "learning_rate": 1.5694222222222224e-05, - "loss": 1.6936, - "step": 18360 - }, - { - "epoch": 0.58784, - "grad_norm": 44.46293640136719, - "learning_rate": 1.569066666666667e-05, - "loss": 1.7028, - "step": 18370 - }, - { - "epoch": 0.58816, - "grad_norm": 43.017608642578125, - "learning_rate": 1.5687111111111113e-05, - "loss": 1.6934, - "step": 18380 - }, - { - "epoch": 0.58848, - "grad_norm": 44.94241714477539, - "learning_rate": 1.5683555555555558e-05, - "loss": 1.6954, - "step": 18390 - }, - { - "epoch": 0.5888, - "grad_norm": 44.682857513427734, - "learning_rate": 1.5680000000000002e-05, - "loss": 1.6912, - "step": 18400 - }, - { - "epoch": 0.58912, - "grad_norm": 46.192054748535156, - "learning_rate": 1.5676444444444447e-05, - "loss": 1.7088, - "step": 18410 - }, - { - "epoch": 0.58944, - "grad_norm": 44.73866271972656, - "learning_rate": 1.567288888888889e-05, - "loss": 1.6758, - "step": 18420 - }, - { - "epoch": 0.58976, - "grad_norm": 44.03883743286133, - "learning_rate": 1.5669333333333333e-05, - "loss": 1.6583, - "step": 18430 - }, - { - "epoch": 0.59008, - "grad_norm": 44.191524505615234, - "learning_rate": 1.566577777777778e-05, - "loss": 1.6804, - "step": 18440 - }, - { - "epoch": 0.5904, - "grad_norm": 45.07223892211914, - "learning_rate": 1.5662222222222222e-05, - "loss": 1.6737, - "step": 18450 - }, - { - "epoch": 0.59072, - "grad_norm": 45.84001541137695, - "learning_rate": 1.565866666666667e-05, - "loss": 1.6878, - "step": 18460 - }, - { - "epoch": 0.59104, - "grad_norm": 43.74607467651367, - "learning_rate": 1.565511111111111e-05, - "loss": 1.6927, - "step": 18470 - }, - { - "epoch": 0.59136, - "grad_norm": 43.94548034667969, - "learning_rate": 1.565155555555556e-05, - "loss": 1.671, - "step": 18480 - }, - { - "epoch": 0.59168, - "grad_norm": 45.99900817871094, - "learning_rate": 1.5648e-05, - "loss": 1.6671, - "step": 18490 - }, - { - "epoch": 0.592, - "grad_norm": 45.90909194946289, - "learning_rate": 1.5644444444444448e-05, - "loss": 1.7005, - "step": 18500 - }, - { - "epoch": 0.59232, - "grad_norm": 45.09535217285156, - "learning_rate": 1.564088888888889e-05, - "loss": 1.6521, - "step": 18510 - }, - { - "epoch": 0.59264, - "grad_norm": 45.49460220336914, - "learning_rate": 1.5637333333333337e-05, - "loss": 1.6695, - "step": 18520 - }, - { - "epoch": 0.59296, - "grad_norm": 46.332801818847656, - "learning_rate": 1.5633777777777778e-05, - "loss": 1.7065, - "step": 18530 - }, - { - "epoch": 0.59328, - "grad_norm": 46.69112777709961, - "learning_rate": 1.5630222222222226e-05, - "loss": 1.7032, - "step": 18540 - }, - { - "epoch": 0.5936, - "grad_norm": 46.828651428222656, - "learning_rate": 1.5626666666666667e-05, - "loss": 1.7173, - "step": 18550 - }, - { - "epoch": 0.59392, - "grad_norm": 44.735477447509766, - "learning_rate": 1.5623111111111112e-05, - "loss": 1.6966, - "step": 18560 - }, - { - "epoch": 0.59424, - "grad_norm": 43.93488693237305, - "learning_rate": 1.5619555555555556e-05, - "loss": 1.6617, - "step": 18570 - }, - { - "epoch": 0.59456, - "grad_norm": 44.45972442626953, - "learning_rate": 1.5616e-05, - "loss": 1.6642, - "step": 18580 - }, - { - "epoch": 0.59488, - "grad_norm": 47.216346740722656, - "learning_rate": 1.5612444444444445e-05, - "loss": 1.7012, - "step": 18590 - }, - { - "epoch": 0.5952, - "grad_norm": 41.691734313964844, - "learning_rate": 1.560888888888889e-05, - "loss": 1.6542, - "step": 18600 - }, - { - "epoch": 0.59552, - "grad_norm": 46.06924057006836, - "learning_rate": 1.5605333333333335e-05, - "loss": 1.7038, - "step": 18610 - }, - { - "epoch": 0.59584, - "grad_norm": 42.43273162841797, - "learning_rate": 1.560177777777778e-05, - "loss": 1.6794, - "step": 18620 - }, - { - "epoch": 0.59616, - "grad_norm": 43.787010192871094, - "learning_rate": 1.5598222222222224e-05, - "loss": 1.703, - "step": 18630 - }, - { - "epoch": 0.59648, - "grad_norm": 43.158817291259766, - "learning_rate": 1.5594666666666668e-05, - "loss": 1.6894, - "step": 18640 - }, - { - "epoch": 0.5968, - "grad_norm": 44.50301742553711, - "learning_rate": 1.5591111111111113e-05, - "loss": 1.6775, - "step": 18650 - }, - { - "epoch": 0.59712, - "grad_norm": 43.681915283203125, - "learning_rate": 1.5587555555555557e-05, - "loss": 1.6754, - "step": 18660 - }, - { - "epoch": 0.59744, - "grad_norm": 43.70466995239258, - "learning_rate": 1.5584000000000002e-05, - "loss": 1.6803, - "step": 18670 - }, - { - "epoch": 0.59776, - "grad_norm": 45.950775146484375, - "learning_rate": 1.5580444444444443e-05, - "loss": 1.7145, - "step": 18680 - }, - { - "epoch": 0.59808, - "grad_norm": 44.928184509277344, - "learning_rate": 1.557688888888889e-05, - "loss": 1.6855, - "step": 18690 - }, - { - "epoch": 0.5984, - "grad_norm": 47.1044807434082, - "learning_rate": 1.5573333333333332e-05, - "loss": 1.6987, - "step": 18700 - }, - { - "epoch": 0.59872, - "grad_norm": 46.54291915893555, - "learning_rate": 1.556977777777778e-05, - "loss": 1.6644, - "step": 18710 - }, - { - "epoch": 0.59904, - "grad_norm": 44.95602035522461, - "learning_rate": 1.556622222222222e-05, - "loss": 1.6422, - "step": 18720 - }, - { - "epoch": 0.59936, - "grad_norm": 48.815269470214844, - "learning_rate": 1.556266666666667e-05, - "loss": 1.6894, - "step": 18730 - }, - { - "epoch": 0.59968, - "grad_norm": 44.7770881652832, - "learning_rate": 1.555911111111111e-05, - "loss": 1.707, - "step": 18740 - }, - { - "epoch": 0.6, - "grad_norm": 44.44867706298828, - "learning_rate": 1.555555555555556e-05, - "loss": 1.6788, - "step": 18750 - }, - { - "epoch": 0.60032, - "grad_norm": 44.379432678222656, - "learning_rate": 1.5552e-05, - "loss": 1.6692, - "step": 18760 - }, - { - "epoch": 0.60064, - "grad_norm": 44.882110595703125, - "learning_rate": 1.5548444444444447e-05, - "loss": 1.7007, - "step": 18770 - }, - { - "epoch": 0.60096, - "grad_norm": 43.32308578491211, - "learning_rate": 1.554488888888889e-05, - "loss": 1.6706, - "step": 18780 - }, - { - "epoch": 0.60128, - "grad_norm": 45.213165283203125, - "learning_rate": 1.5541333333333337e-05, - "loss": 1.6923, - "step": 18790 - }, - { - "epoch": 0.6016, - "grad_norm": 45.2623176574707, - "learning_rate": 1.5537777777777778e-05, - "loss": 1.6939, - "step": 18800 - }, - { - "epoch": 0.60192, - "grad_norm": 43.48740005493164, - "learning_rate": 1.5534222222222222e-05, - "loss": 1.6966, - "step": 18810 - }, - { - "epoch": 0.60224, - "grad_norm": 46.13747787475586, - "learning_rate": 1.5530666666666667e-05, - "loss": 1.7097, - "step": 18820 - }, - { - "epoch": 0.60256, - "grad_norm": 47.08616638183594, - "learning_rate": 1.552711111111111e-05, - "loss": 1.6467, - "step": 18830 - }, - { - "epoch": 0.60288, - "grad_norm": 43.652793884277344, - "learning_rate": 1.5523555555555556e-05, - "loss": 1.673, - "step": 18840 - }, - { - "epoch": 0.6032, - "grad_norm": 45.95335006713867, - "learning_rate": 1.552e-05, - "loss": 1.6922, - "step": 18850 - }, - { - "epoch": 0.60352, - "grad_norm": 43.87394332885742, - "learning_rate": 1.5516444444444445e-05, - "loss": 1.6931, - "step": 18860 - }, - { - "epoch": 0.60384, - "grad_norm": 44.94473648071289, - "learning_rate": 1.551288888888889e-05, - "loss": 1.6792, - "step": 18870 - }, - { - "epoch": 0.60416, - "grad_norm": 42.34600830078125, - "learning_rate": 1.5509333333333334e-05, - "loss": 1.6824, - "step": 18880 - }, - { - "epoch": 0.60448, - "grad_norm": 47.03275680541992, - "learning_rate": 1.550577777777778e-05, - "loss": 1.6911, - "step": 18890 - }, - { - "epoch": 0.6048, - "grad_norm": 45.630332946777344, - "learning_rate": 1.5502222222222223e-05, - "loss": 1.6909, - "step": 18900 - }, - { - "epoch": 0.60512, - "grad_norm": 42.56125259399414, - "learning_rate": 1.5498666666666668e-05, - "loss": 1.6462, - "step": 18910 - }, - { - "epoch": 0.60544, - "grad_norm": 43.68327331542969, - "learning_rate": 1.5495111111111112e-05, - "loss": 1.691, - "step": 18920 - }, - { - "epoch": 0.60576, - "grad_norm": 46.32207107543945, - "learning_rate": 1.5491555555555557e-05, - "loss": 1.7196, - "step": 18930 - }, - { - "epoch": 0.60608, - "grad_norm": 46.93421936035156, - "learning_rate": 1.5488e-05, - "loss": 1.684, - "step": 18940 - }, - { - "epoch": 0.6064, - "grad_norm": 42.83504867553711, - "learning_rate": 1.5484444444444446e-05, - "loss": 1.659, - "step": 18950 - }, - { - "epoch": 0.60672, - "grad_norm": 44.189720153808594, - "learning_rate": 1.548088888888889e-05, - "loss": 1.6882, - "step": 18960 - }, - { - "epoch": 0.60704, - "grad_norm": 43.321807861328125, - "learning_rate": 1.5477333333333335e-05, - "loss": 1.7102, - "step": 18970 - }, - { - "epoch": 0.60736, - "grad_norm": 43.58332061767578, - "learning_rate": 1.547377777777778e-05, - "loss": 1.6748, - "step": 18980 - }, - { - "epoch": 0.60768, - "grad_norm": 43.99165344238281, - "learning_rate": 1.5470222222222224e-05, - "loss": 1.6678, - "step": 18990 - }, - { - "epoch": 0.608, - "grad_norm": 43.6734619140625, - "learning_rate": 1.546666666666667e-05, - "loss": 1.6676, - "step": 19000 - }, - { - "epoch": 0.60832, - "grad_norm": 44.50790023803711, - "learning_rate": 1.5463111111111113e-05, - "loss": 1.6732, - "step": 19010 - }, - { - "epoch": 0.60864, - "grad_norm": 45.299781799316406, - "learning_rate": 1.5459555555555558e-05, - "loss": 1.7008, - "step": 19020 - }, - { - "epoch": 0.60896, - "grad_norm": 45.65017318725586, - "learning_rate": 1.5456000000000002e-05, - "loss": 1.7073, - "step": 19030 - }, - { - "epoch": 0.60928, - "grad_norm": 45.88054656982422, - "learning_rate": 1.5452444444444447e-05, - "loss": 1.6941, - "step": 19040 - }, - { - "epoch": 0.6096, - "grad_norm": 44.15474319458008, - "learning_rate": 1.544888888888889e-05, - "loss": 1.6764, - "step": 19050 - }, - { - "epoch": 0.60992, - "grad_norm": 43.338172912597656, - "learning_rate": 1.5445333333333333e-05, - "loss": 1.6635, - "step": 19060 - }, - { - "epoch": 0.61024, - "grad_norm": 45.02217483520508, - "learning_rate": 1.544177777777778e-05, - "loss": 1.7024, - "step": 19070 - }, - { - "epoch": 0.61056, - "grad_norm": 44.492401123046875, - "learning_rate": 1.5438222222222222e-05, - "loss": 1.7113, - "step": 19080 - }, - { - "epoch": 0.61088, - "grad_norm": 46.0334587097168, - "learning_rate": 1.543466666666667e-05, - "loss": 1.6678, - "step": 19090 - }, - { - "epoch": 0.6112, - "grad_norm": 44.86727523803711, - "learning_rate": 1.543111111111111e-05, - "loss": 1.6919, - "step": 19100 - }, - { - "epoch": 0.61152, - "grad_norm": 41.22093963623047, - "learning_rate": 1.542755555555556e-05, - "loss": 1.6657, - "step": 19110 - }, - { - "epoch": 0.61184, - "grad_norm": 43.157474517822266, - "learning_rate": 1.5424e-05, - "loss": 1.6803, - "step": 19120 - }, - { - "epoch": 0.61216, - "grad_norm": 44.455142974853516, - "learning_rate": 1.5420444444444448e-05, - "loss": 1.6962, - "step": 19130 - }, - { - "epoch": 0.61248, - "grad_norm": 44.34128189086914, - "learning_rate": 1.541688888888889e-05, - "loss": 1.6648, - "step": 19140 - }, - { - "epoch": 0.6128, - "grad_norm": 46.374366760253906, - "learning_rate": 1.5413333333333337e-05, - "loss": 1.6898, - "step": 19150 - }, - { - "epoch": 0.61312, - "grad_norm": 45.73741149902344, - "learning_rate": 1.5409777777777778e-05, - "loss": 1.6631, - "step": 19160 - }, - { - "epoch": 0.61344, - "grad_norm": 43.768558502197266, - "learning_rate": 1.5406222222222223e-05, - "loss": 1.6703, - "step": 19170 - }, - { - "epoch": 0.61376, - "grad_norm": 44.202598571777344, - "learning_rate": 1.5402666666666667e-05, - "loss": 1.6574, - "step": 19180 - }, - { - "epoch": 0.61408, - "grad_norm": 43.005733489990234, - "learning_rate": 1.5399111111111112e-05, - "loss": 1.6656, - "step": 19190 - }, - { - "epoch": 0.6144, - "grad_norm": 44.95966339111328, - "learning_rate": 1.5395555555555556e-05, - "loss": 1.6811, - "step": 19200 - }, - { - "epoch": 0.61472, - "grad_norm": 46.06792449951172, - "learning_rate": 1.5392e-05, - "loss": 1.6811, - "step": 19210 - }, - { - "epoch": 0.61504, - "grad_norm": 43.61574172973633, - "learning_rate": 1.5388444444444446e-05, - "loss": 1.6939, - "step": 19220 - }, - { - "epoch": 0.61536, - "grad_norm": 45.055580139160156, - "learning_rate": 1.538488888888889e-05, - "loss": 1.7022, - "step": 19230 - }, - { - "epoch": 0.61568, - "grad_norm": 43.75603485107422, - "learning_rate": 1.5381333333333335e-05, - "loss": 1.6823, - "step": 19240 - }, - { - "epoch": 0.616, - "grad_norm": 44.875606536865234, - "learning_rate": 1.537777777777778e-05, - "loss": 1.6648, - "step": 19250 - }, - { - "epoch": 0.61632, - "grad_norm": 45.77470016479492, - "learning_rate": 1.5374222222222224e-05, - "loss": 1.6661, - "step": 19260 - }, - { - "epoch": 0.61664, - "grad_norm": 43.80479431152344, - "learning_rate": 1.537066666666667e-05, - "loss": 1.6986, - "step": 19270 - }, - { - "epoch": 0.61696, - "grad_norm": 44.32920837402344, - "learning_rate": 1.5367111111111113e-05, - "loss": 1.7064, - "step": 19280 - }, - { - "epoch": 0.61728, - "grad_norm": 41.32722091674805, - "learning_rate": 1.5363555555555557e-05, - "loss": 1.69, - "step": 19290 - }, - { - "epoch": 0.6176, - "grad_norm": 43.27383041381836, - "learning_rate": 1.5360000000000002e-05, - "loss": 1.6953, - "step": 19300 - }, - { - "epoch": 0.61792, - "grad_norm": 44.69233322143555, - "learning_rate": 1.5356444444444447e-05, - "loss": 1.6642, - "step": 19310 - }, - { - "epoch": 0.61824, - "grad_norm": 44.23918914794922, - "learning_rate": 1.535288888888889e-05, - "loss": 1.6615, - "step": 19320 - }, - { - "epoch": 0.61856, - "grad_norm": 43.73684310913086, - "learning_rate": 1.5349333333333332e-05, - "loss": 1.6781, - "step": 19330 - }, - { - "epoch": 0.61888, - "grad_norm": 42.925804138183594, - "learning_rate": 1.534577777777778e-05, - "loss": 1.6809, - "step": 19340 - }, - { - "epoch": 0.6192, - "grad_norm": 43.84880065917969, - "learning_rate": 1.534222222222222e-05, - "loss": 1.708, - "step": 19350 - }, - { - "epoch": 0.61952, - "grad_norm": 45.829620361328125, - "learning_rate": 1.533866666666667e-05, - "loss": 1.6932, - "step": 19360 - }, - { - "epoch": 0.61984, - "grad_norm": 43.478153228759766, - "learning_rate": 1.533511111111111e-05, - "loss": 1.6811, - "step": 19370 - }, - { - "epoch": 0.62016, - "grad_norm": 45.014156341552734, - "learning_rate": 1.533155555555556e-05, - "loss": 1.7159, - "step": 19380 - }, - { - "epoch": 0.62048, - "grad_norm": 43.75464630126953, - "learning_rate": 1.5328e-05, - "loss": 1.6799, - "step": 19390 - }, - { - "epoch": 0.6208, - "grad_norm": 42.851783752441406, - "learning_rate": 1.5324444444444448e-05, - "loss": 1.6647, - "step": 19400 - }, - { - "epoch": 0.62112, - "grad_norm": 42.745121002197266, - "learning_rate": 1.532088888888889e-05, - "loss": 1.6471, - "step": 19410 - }, - { - "epoch": 0.62144, - "grad_norm": 44.881351470947266, - "learning_rate": 1.5317333333333337e-05, - "loss": 1.7028, - "step": 19420 - }, - { - "epoch": 0.62176, - "grad_norm": 45.286781311035156, - "learning_rate": 1.5313777777777778e-05, - "loss": 1.7154, - "step": 19430 - }, - { - "epoch": 0.62208, - "grad_norm": 44.99727249145508, - "learning_rate": 1.5310222222222226e-05, - "loss": 1.6808, - "step": 19440 - }, - { - "epoch": 0.6224, - "grad_norm": 44.824974060058594, - "learning_rate": 1.5306666666666667e-05, - "loss": 1.6674, - "step": 19450 - }, - { - "epoch": 0.62272, - "grad_norm": 44.57046127319336, - "learning_rate": 1.530311111111111e-05, - "loss": 1.6796, - "step": 19460 - }, - { - "epoch": 0.62304, - "grad_norm": 43.56769561767578, - "learning_rate": 1.5299555555555556e-05, - "loss": 1.7004, - "step": 19470 - }, - { - "epoch": 0.62336, - "grad_norm": 44.90610885620117, - "learning_rate": 1.5296e-05, - "loss": 1.6807, - "step": 19480 - }, - { - "epoch": 0.62368, - "grad_norm": 43.514404296875, - "learning_rate": 1.5292444444444445e-05, - "loss": 1.6982, - "step": 19490 - }, - { - "epoch": 0.624, - "grad_norm": 46.587955474853516, - "learning_rate": 1.528888888888889e-05, - "loss": 1.7377, - "step": 19500 - }, - { - "epoch": 0.62432, - "grad_norm": 44.5979118347168, - "learning_rate": 1.5285333333333334e-05, - "loss": 1.6597, - "step": 19510 - }, - { - "epoch": 0.62464, - "grad_norm": 44.230716705322266, - "learning_rate": 1.528177777777778e-05, - "loss": 1.6646, - "step": 19520 - }, - { - "epoch": 0.62496, - "grad_norm": 44.42688751220703, - "learning_rate": 1.5278222222222223e-05, - "loss": 1.6968, - "step": 19530 - }, - { - "epoch": 0.62528, - "grad_norm": 41.63092803955078, - "learning_rate": 1.5274666666666668e-05, - "loss": 1.6577, - "step": 19540 - }, - { - "epoch": 0.6256, - "grad_norm": 44.5085563659668, - "learning_rate": 1.5271111111111112e-05, - "loss": 1.6844, - "step": 19550 - }, - { - "epoch": 0.62592, - "grad_norm": 44.731101989746094, - "learning_rate": 1.5267555555555557e-05, - "loss": 1.649, - "step": 19560 - }, - { - "epoch": 0.62624, - "grad_norm": 44.423377990722656, - "learning_rate": 1.5264e-05, - "loss": 1.6601, - "step": 19570 - }, - { - "epoch": 0.62656, - "grad_norm": 43.00068283081055, - "learning_rate": 1.5260444444444446e-05, - "loss": 1.6932, - "step": 19580 - }, - { - "epoch": 0.62688, - "grad_norm": 43.62189865112305, - "learning_rate": 1.525688888888889e-05, - "loss": 1.693, - "step": 19590 - }, - { - "epoch": 0.6272, - "grad_norm": 45.53034973144531, - "learning_rate": 1.5253333333333335e-05, - "loss": 1.6802, - "step": 19600 - }, - { - "epoch": 0.62752, - "grad_norm": 43.723388671875, - "learning_rate": 1.5249777777777778e-05, - "loss": 1.6553, - "step": 19610 - }, - { - "epoch": 0.62784, - "grad_norm": 42.24656295776367, - "learning_rate": 1.5246222222222224e-05, - "loss": 1.674, - "step": 19620 - }, - { - "epoch": 0.62816, - "grad_norm": 44.599510192871094, - "learning_rate": 1.5242666666666667e-05, - "loss": 1.6999, - "step": 19630 - }, - { - "epoch": 0.62848, - "grad_norm": 46.729061126708984, - "learning_rate": 1.5239111111111113e-05, - "loss": 1.6938, - "step": 19640 - }, - { - "epoch": 0.6288, - "grad_norm": 44.45905685424805, - "learning_rate": 1.5235555555555556e-05, - "loss": 1.6951, - "step": 19650 - }, - { - "epoch": 0.62912, - "grad_norm": 45.92387390136719, - "learning_rate": 1.5232000000000003e-05, - "loss": 1.6967, - "step": 19660 - }, - { - "epoch": 0.62944, - "grad_norm": 44.020652770996094, - "learning_rate": 1.5228444444444445e-05, - "loss": 1.683, - "step": 19670 - }, - { - "epoch": 0.62976, - "grad_norm": 44.318145751953125, - "learning_rate": 1.5224888888888888e-05, - "loss": 1.6638, - "step": 19680 - }, - { - "epoch": 0.63008, - "grad_norm": 42.7271842956543, - "learning_rate": 1.5221333333333335e-05, - "loss": 1.6636, - "step": 19690 - }, - { - "epoch": 0.6304, - "grad_norm": 42.00227355957031, - "learning_rate": 1.5217777777777777e-05, - "loss": 1.6705, - "step": 19700 - }, - { - "epoch": 0.63072, - "grad_norm": 41.81639862060547, - "learning_rate": 1.5214222222222224e-05, - "loss": 1.6666, - "step": 19710 - }, - { - "epoch": 0.63104, - "grad_norm": 45.459503173828125, - "learning_rate": 1.5210666666666666e-05, - "loss": 1.6632, - "step": 19720 - }, - { - "epoch": 0.63136, - "grad_norm": 43.953548431396484, - "learning_rate": 1.5207111111111113e-05, - "loss": 1.6466, - "step": 19730 - }, - { - "epoch": 0.63168, - "grad_norm": 43.911537170410156, - "learning_rate": 1.5203555555555556e-05, - "loss": 1.67, - "step": 19740 - }, - { - "epoch": 0.632, - "grad_norm": 45.1783332824707, - "learning_rate": 1.5200000000000002e-05, - "loss": 1.6857, - "step": 19750 - }, - { - "epoch": 0.63232, - "grad_norm": 43.7866096496582, - "learning_rate": 1.5196444444444445e-05, - "loss": 1.6848, - "step": 19760 - }, - { - "epoch": 0.63264, - "grad_norm": 43.66336441040039, - "learning_rate": 1.5192888888888891e-05, - "loss": 1.6834, - "step": 19770 - }, - { - "epoch": 0.63296, - "grad_norm": 44.24501037597656, - "learning_rate": 1.5189333333333334e-05, - "loss": 1.6539, - "step": 19780 - }, - { - "epoch": 0.63328, - "grad_norm": 44.39887619018555, - "learning_rate": 1.518577777777778e-05, - "loss": 1.6713, - "step": 19790 - }, - { - "epoch": 0.6336, - "grad_norm": 45.32782745361328, - "learning_rate": 1.5182222222222223e-05, - "loss": 1.6631, - "step": 19800 - }, - { - "epoch": 0.63392, - "grad_norm": 45.3266716003418, - "learning_rate": 1.5178666666666667e-05, - "loss": 1.6648, - "step": 19810 - }, - { - "epoch": 0.63424, - "grad_norm": 43.484432220458984, - "learning_rate": 1.5175111111111112e-05, - "loss": 1.6613, - "step": 19820 - }, - { - "epoch": 0.63456, - "grad_norm": 43.348140716552734, - "learning_rate": 1.5171555555555557e-05, - "loss": 1.671, - "step": 19830 - }, - { - "epoch": 0.63488, - "grad_norm": 45.595542907714844, - "learning_rate": 1.5168000000000001e-05, - "loss": 1.662, - "step": 19840 - }, - { - "epoch": 0.6352, - "grad_norm": 45.99067306518555, - "learning_rate": 1.5164444444444446e-05, - "loss": 1.6731, - "step": 19850 - }, - { - "epoch": 0.63552, - "grad_norm": 43.89120864868164, - "learning_rate": 1.516088888888889e-05, - "loss": 1.6906, - "step": 19860 - }, - { - "epoch": 0.63584, - "grad_norm": 44.42757797241211, - "learning_rate": 1.5157333333333335e-05, - "loss": 1.6891, - "step": 19870 - }, - { - "epoch": 0.63616, - "grad_norm": 42.13017654418945, - "learning_rate": 1.5153777777777778e-05, - "loss": 1.6918, - "step": 19880 - }, - { - "epoch": 0.63648, - "grad_norm": 43.843170166015625, - "learning_rate": 1.5150222222222224e-05, - "loss": 1.6718, - "step": 19890 - }, - { - "epoch": 0.6368, - "grad_norm": 44.21430587768555, - "learning_rate": 1.5146666666666667e-05, - "loss": 1.6957, - "step": 19900 - }, - { - "epoch": 0.63712, - "grad_norm": 41.84242630004883, - "learning_rate": 1.5143111111111113e-05, - "loss": 1.6794, - "step": 19910 - }, - { - "epoch": 0.63744, - "grad_norm": 43.27943801879883, - "learning_rate": 1.5139555555555556e-05, - "loss": 1.7045, - "step": 19920 - }, - { - "epoch": 0.63776, - "grad_norm": 45.409423828125, - "learning_rate": 1.5136000000000002e-05, - "loss": 1.7081, - "step": 19930 - }, - { - "epoch": 0.63808, - "grad_norm": 42.21131896972656, - "learning_rate": 1.5132444444444445e-05, - "loss": 1.7003, - "step": 19940 - }, - { - "epoch": 0.6384, - "grad_norm": 44.100101470947266, - "learning_rate": 1.5128888888888891e-05, - "loss": 1.6741, - "step": 19950 - }, - { - "epoch": 0.63872, - "grad_norm": 46.517189025878906, - "learning_rate": 1.5125333333333334e-05, - "loss": 1.6851, - "step": 19960 - }, - { - "epoch": 0.63904, - "grad_norm": 47.045021057128906, - "learning_rate": 1.512177777777778e-05, - "loss": 1.6922, - "step": 19970 - }, - { - "epoch": 0.63936, - "grad_norm": 44.45469665527344, - "learning_rate": 1.5118222222222223e-05, - "loss": 1.6872, - "step": 19980 - }, - { - "epoch": 0.63968, - "grad_norm": 43.917457580566406, - "learning_rate": 1.5114666666666668e-05, - "loss": 1.6584, - "step": 19990 - }, - { - "epoch": 0.64, - "grad_norm": 45.960819244384766, - "learning_rate": 1.5111111111111112e-05, - "loss": 1.7114, - "step": 20000 - }, - { - "epoch": 0.64032, - "grad_norm": 45.35646438598633, - "learning_rate": 1.5107555555555557e-05, - "loss": 1.6932, - "step": 20010 - }, - { - "epoch": 0.64064, - "grad_norm": 42.95730972290039, - "learning_rate": 1.5104000000000001e-05, - "loss": 1.6928, - "step": 20020 - }, - { - "epoch": 0.64096, - "grad_norm": 43.41942596435547, - "learning_rate": 1.5100444444444446e-05, - "loss": 1.6895, - "step": 20030 - }, - { - "epoch": 0.64128, - "grad_norm": 44.41860580444336, - "learning_rate": 1.509688888888889e-05, - "loss": 1.6505, - "step": 20040 - }, - { - "epoch": 0.6416, - "grad_norm": 46.77073287963867, - "learning_rate": 1.5093333333333335e-05, - "loss": 1.6877, - "step": 20050 - }, - { - "epoch": 0.64192, - "grad_norm": 43.96682357788086, - "learning_rate": 1.5089777777777778e-05, - "loss": 1.6668, - "step": 20060 - }, - { - "epoch": 0.64224, - "grad_norm": 41.405120849609375, - "learning_rate": 1.5086222222222224e-05, - "loss": 1.681, - "step": 20070 - }, - { - "epoch": 0.64256, - "grad_norm": 44.63652038574219, - "learning_rate": 1.5082666666666667e-05, - "loss": 1.7161, - "step": 20080 - }, - { - "epoch": 0.64288, - "grad_norm": 44.282630920410156, - "learning_rate": 1.5079111111111113e-05, - "loss": 1.6709, - "step": 20090 - }, - { - "epoch": 0.6432, - "grad_norm": 47.07522964477539, - "learning_rate": 1.5075555555555556e-05, - "loss": 1.6955, - "step": 20100 - }, - { - "epoch": 0.64352, - "grad_norm": 45.80629348754883, - "learning_rate": 1.5072000000000002e-05, - "loss": 1.6828, - "step": 20110 - }, - { - "epoch": 0.64384, - "grad_norm": 45.871124267578125, - "learning_rate": 1.5068444444444445e-05, - "loss": 1.6864, - "step": 20120 - }, - { - "epoch": 0.64416, - "grad_norm": 44.659149169921875, - "learning_rate": 1.5064888888888892e-05, - "loss": 1.6907, - "step": 20130 - }, - { - "epoch": 0.64448, - "grad_norm": 43.87977600097656, - "learning_rate": 1.5061333333333334e-05, - "loss": 1.6988, - "step": 20140 - }, - { - "epoch": 0.6448, - "grad_norm": 44.692874908447266, - "learning_rate": 1.505777777777778e-05, - "loss": 1.683, - "step": 20150 - }, - { - "epoch": 0.64512, - "grad_norm": 43.429168701171875, - "learning_rate": 1.5054222222222223e-05, - "loss": 1.6656, - "step": 20160 - }, - { - "epoch": 0.64544, - "grad_norm": 43.82966613769531, - "learning_rate": 1.505066666666667e-05, - "loss": 1.6797, - "step": 20170 - }, - { - "epoch": 0.64576, - "grad_norm": 42.469825744628906, - "learning_rate": 1.5047111111111113e-05, - "loss": 1.6828, - "step": 20180 - }, - { - "epoch": 0.64608, - "grad_norm": 45.71176528930664, - "learning_rate": 1.5043555555555555e-05, - "loss": 1.7081, - "step": 20190 - }, - { - "epoch": 0.6464, - "grad_norm": 43.655677795410156, - "learning_rate": 1.5040000000000002e-05, - "loss": 1.7044, - "step": 20200 - }, - { - "epoch": 0.64672, - "grad_norm": 45.45912170410156, - "learning_rate": 1.5036444444444445e-05, - "loss": 1.6965, - "step": 20210 - }, - { - "epoch": 0.64704, - "grad_norm": 43.07465362548828, - "learning_rate": 1.503288888888889e-05, - "loss": 1.6755, - "step": 20220 - }, - { - "epoch": 0.64736, - "grad_norm": 44.67756652832031, - "learning_rate": 1.5029333333333334e-05, - "loss": 1.7036, - "step": 20230 - }, - { - "epoch": 0.64768, - "grad_norm": 45.22612762451172, - "learning_rate": 1.502577777777778e-05, - "loss": 1.6593, - "step": 20240 - }, - { - "epoch": 0.648, - "grad_norm": 43.58930587768555, - "learning_rate": 1.5022222222222223e-05, - "loss": 1.6873, - "step": 20250 - }, - { - "epoch": 0.64832, - "grad_norm": 42.889339447021484, - "learning_rate": 1.5018666666666667e-05, - "loss": 1.6751, - "step": 20260 - }, - { - "epoch": 0.64864, - "grad_norm": 43.393394470214844, - "learning_rate": 1.5015111111111112e-05, - "loss": 1.6846, - "step": 20270 - }, - { - "epoch": 0.64896, - "grad_norm": 47.57592010498047, - "learning_rate": 1.5011555555555556e-05, - "loss": 1.6632, - "step": 20280 - }, - { - "epoch": 0.64928, - "grad_norm": 43.86087417602539, - "learning_rate": 1.5008000000000001e-05, - "loss": 1.6643, - "step": 20290 - }, - { - "epoch": 0.6496, - "grad_norm": 43.808414459228516, - "learning_rate": 1.5004444444444446e-05, - "loss": 1.7019, - "step": 20300 - }, - { - "epoch": 0.64992, - "grad_norm": 44.32374572753906, - "learning_rate": 1.500088888888889e-05, - "loss": 1.6773, - "step": 20310 - }, - { - "epoch": 0.65024, - "grad_norm": 45.46870422363281, - "learning_rate": 1.4997333333333335e-05, - "loss": 1.6681, - "step": 20320 - }, - { - "epoch": 0.65056, - "grad_norm": 45.13983917236328, - "learning_rate": 1.4993777777777778e-05, - "loss": 1.6726, - "step": 20330 - }, - { - "epoch": 0.65088, - "grad_norm": 44.52128982543945, - "learning_rate": 1.4990222222222224e-05, - "loss": 1.6837, - "step": 20340 - }, - { - "epoch": 0.6512, - "grad_norm": 45.18207931518555, - "learning_rate": 1.4986666666666667e-05, - "loss": 1.6595, - "step": 20350 - }, - { - "epoch": 0.65152, - "grad_norm": 44.02824020385742, - "learning_rate": 1.4983111111111113e-05, - "loss": 1.6353, - "step": 20360 - }, - { - "epoch": 0.65184, - "grad_norm": 42.066959381103516, - "learning_rate": 1.4979555555555556e-05, - "loss": 1.6624, - "step": 20370 - }, - { - "epoch": 0.65216, - "grad_norm": 45.960731506347656, - "learning_rate": 1.4976000000000002e-05, - "loss": 1.649, - "step": 20380 - }, - { - "epoch": 0.65248, - "grad_norm": 43.551265716552734, - "learning_rate": 1.4972444444444445e-05, - "loss": 1.6984, - "step": 20390 - }, - { - "epoch": 0.6528, - "grad_norm": 44.72035217285156, - "learning_rate": 1.4968888888888891e-05, - "loss": 1.6866, - "step": 20400 - }, - { - "epoch": 0.65312, - "grad_norm": 44.827938079833984, - "learning_rate": 1.4965333333333334e-05, - "loss": 1.6996, - "step": 20410 - }, - { - "epoch": 0.65344, - "grad_norm": 43.63932800292969, - "learning_rate": 1.496177777777778e-05, - "loss": 1.6786, - "step": 20420 - }, - { - "epoch": 0.65376, - "grad_norm": 44.68216323852539, - "learning_rate": 1.4958222222222223e-05, - "loss": 1.6908, - "step": 20430 - }, - { - "epoch": 0.65408, - "grad_norm": 44.0313835144043, - "learning_rate": 1.495466666666667e-05, - "loss": 1.7021, - "step": 20440 - }, - { - "epoch": 0.6544, - "grad_norm": 44.26289367675781, - "learning_rate": 1.4951111111111112e-05, - "loss": 1.6965, - "step": 20450 - }, - { - "epoch": 0.65472, - "grad_norm": 46.12016296386719, - "learning_rate": 1.4947555555555557e-05, - "loss": 1.6913, - "step": 20460 - }, - { - "epoch": 0.65504, - "grad_norm": 43.7845458984375, - "learning_rate": 1.4944000000000001e-05, - "loss": 1.6764, - "step": 20470 - }, - { - "epoch": 0.65536, - "grad_norm": 43.606197357177734, - "learning_rate": 1.4940444444444446e-05, - "loss": 1.6903, - "step": 20480 - }, - { - "epoch": 0.65568, - "grad_norm": 44.065025329589844, - "learning_rate": 1.493688888888889e-05, - "loss": 1.6771, - "step": 20490 - }, - { - "epoch": 0.656, - "grad_norm": 44.80516052246094, - "learning_rate": 1.4933333333333335e-05, - "loss": 1.6638, - "step": 20500 - }, - { - "epoch": 0.65632, - "grad_norm": 45.39891052246094, - "learning_rate": 1.4929777777777778e-05, - "loss": 1.6808, - "step": 20510 - }, - { - "epoch": 0.65664, - "grad_norm": 44.93928146362305, - "learning_rate": 1.4926222222222224e-05, - "loss": 1.6991, - "step": 20520 - }, - { - "epoch": 0.65696, - "grad_norm": 42.415870666503906, - "learning_rate": 1.4922666666666667e-05, - "loss": 1.6825, - "step": 20530 - }, - { - "epoch": 0.65728, - "grad_norm": 44.756160736083984, - "learning_rate": 1.4919111111111113e-05, - "loss": 1.6651, - "step": 20540 - }, - { - "epoch": 0.6576, - "grad_norm": 43.42251968383789, - "learning_rate": 1.4915555555555556e-05, - "loss": 1.6734, - "step": 20550 - }, - { - "epoch": 0.65792, - "grad_norm": 43.07693099975586, - "learning_rate": 1.4912000000000002e-05, - "loss": 1.673, - "step": 20560 - }, - { - "epoch": 0.65824, - "grad_norm": 45.04411697387695, - "learning_rate": 1.4908444444444445e-05, - "loss": 1.6723, - "step": 20570 - }, - { - "epoch": 0.65856, - "grad_norm": 44.08964920043945, - "learning_rate": 1.4904888888888891e-05, - "loss": 1.675, - "step": 20580 - }, - { - "epoch": 0.65888, - "grad_norm": 41.829559326171875, - "learning_rate": 1.4901333333333334e-05, - "loss": 1.6365, - "step": 20590 - }, - { - "epoch": 0.6592, - "grad_norm": 43.900211334228516, - "learning_rate": 1.489777777777778e-05, - "loss": 1.6787, - "step": 20600 - }, - { - "epoch": 0.65952, - "grad_norm": 47.46596908569336, - "learning_rate": 1.4894222222222223e-05, - "loss": 1.6487, - "step": 20610 - }, - { - "epoch": 0.65984, - "grad_norm": 43.5471076965332, - "learning_rate": 1.489066666666667e-05, - "loss": 1.7001, - "step": 20620 - }, - { - "epoch": 0.66016, - "grad_norm": 44.62773895263672, - "learning_rate": 1.4887111111111112e-05, - "loss": 1.7028, - "step": 20630 - }, - { - "epoch": 0.66048, - "grad_norm": 43.4677619934082, - "learning_rate": 1.4883555555555557e-05, - "loss": 1.6899, - "step": 20640 - }, - { - "epoch": 0.6608, - "grad_norm": 43.40306854248047, - "learning_rate": 1.4880000000000002e-05, - "loss": 1.6764, - "step": 20650 - }, - { - "epoch": 0.66112, - "grad_norm": 46.035621643066406, - "learning_rate": 1.4876444444444446e-05, - "loss": 1.676, - "step": 20660 - }, - { - "epoch": 0.66144, - "grad_norm": 45.9166145324707, - "learning_rate": 1.487288888888889e-05, - "loss": 1.6585, - "step": 20670 - }, - { - "epoch": 0.66176, - "grad_norm": 44.903480529785156, - "learning_rate": 1.4869333333333335e-05, - "loss": 1.6823, - "step": 20680 - }, - { - "epoch": 0.66208, - "grad_norm": 44.58999252319336, - "learning_rate": 1.486577777777778e-05, - "loss": 1.6352, - "step": 20690 - }, - { - "epoch": 0.6624, - "grad_norm": 44.45679473876953, - "learning_rate": 1.4862222222222223e-05, - "loss": 1.6689, - "step": 20700 - }, - { - "epoch": 0.66272, - "grad_norm": 46.027244567871094, - "learning_rate": 1.4858666666666667e-05, - "loss": 1.6583, - "step": 20710 - }, - { - "epoch": 0.66304, - "grad_norm": 46.073158264160156, - "learning_rate": 1.4855111111111112e-05, - "loss": 1.7041, - "step": 20720 - }, - { - "epoch": 0.66336, - "grad_norm": 46.57586669921875, - "learning_rate": 1.4851555555555556e-05, - "loss": 1.7034, - "step": 20730 - }, - { - "epoch": 0.66368, - "grad_norm": 42.948909759521484, - "learning_rate": 1.4848e-05, - "loss": 1.6693, - "step": 20740 - }, - { - "epoch": 0.664, - "grad_norm": 45.48905944824219, - "learning_rate": 1.4844444444444445e-05, - "loss": 1.7216, - "step": 20750 - }, - { - "epoch": 0.66432, - "grad_norm": 44.7623405456543, - "learning_rate": 1.484088888888889e-05, - "loss": 1.6741, - "step": 20760 - }, - { - "epoch": 0.66464, - "grad_norm": 45.26624298095703, - "learning_rate": 1.4837333333333334e-05, - "loss": 1.6803, - "step": 20770 - }, - { - "epoch": 0.66496, - "grad_norm": 43.05164337158203, - "learning_rate": 1.4833777777777777e-05, - "loss": 1.6731, - "step": 20780 - }, - { - "epoch": 0.66528, - "grad_norm": 43.42141342163086, - "learning_rate": 1.4830222222222224e-05, - "loss": 1.6429, - "step": 20790 - }, - { - "epoch": 0.6656, - "grad_norm": 43.655452728271484, - "learning_rate": 1.4826666666666666e-05, - "loss": 1.6912, - "step": 20800 - }, - { - "epoch": 0.66592, - "grad_norm": 44.76203536987305, - "learning_rate": 1.4823111111111113e-05, - "loss": 1.6713, - "step": 20810 - }, - { - "epoch": 0.66624, - "grad_norm": 44.26459503173828, - "learning_rate": 1.4819555555555556e-05, - "loss": 1.6916, - "step": 20820 - }, - { - "epoch": 0.66656, - "grad_norm": 42.33543014526367, - "learning_rate": 1.4816000000000002e-05, - "loss": 1.6932, - "step": 20830 - }, - { - "epoch": 0.66688, - "grad_norm": 43.858802795410156, - "learning_rate": 1.4812444444444445e-05, - "loss": 1.6687, - "step": 20840 - }, - { - "epoch": 0.6672, - "grad_norm": 45.832191467285156, - "learning_rate": 1.4808888888888891e-05, - "loss": 1.6923, - "step": 20850 - }, - { - "epoch": 0.66752, - "grad_norm": 44.79924774169922, - "learning_rate": 1.4805333333333334e-05, - "loss": 1.6613, - "step": 20860 - }, - { - "epoch": 0.66784, - "grad_norm": 45.21271514892578, - "learning_rate": 1.480177777777778e-05, - "loss": 1.6804, - "step": 20870 - }, - { - "epoch": 0.66816, - "grad_norm": 45.01856994628906, - "learning_rate": 1.4798222222222223e-05, - "loss": 1.6578, - "step": 20880 - }, - { - "epoch": 0.66848, - "grad_norm": 43.530738830566406, - "learning_rate": 1.4794666666666669e-05, - "loss": 1.6844, - "step": 20890 - }, - { - "epoch": 0.6688, - "grad_norm": 46.218666076660156, - "learning_rate": 1.4791111111111112e-05, - "loss": 1.6917, - "step": 20900 - }, - { - "epoch": 0.66912, - "grad_norm": 47.14932632446289, - "learning_rate": 1.4787555555555557e-05, - "loss": 1.6595, - "step": 20910 - }, - { - "epoch": 0.66944, - "grad_norm": 47.606266021728516, - "learning_rate": 1.4784000000000001e-05, - "loss": 1.6732, - "step": 20920 - }, - { - "epoch": 0.66976, - "grad_norm": 43.61834716796875, - "learning_rate": 1.4780444444444446e-05, - "loss": 1.668, - "step": 20930 - }, - { - "epoch": 0.67008, - "grad_norm": 43.1433219909668, - "learning_rate": 1.477688888888889e-05, - "loss": 1.6584, - "step": 20940 - }, - { - "epoch": 0.6704, - "grad_norm": 43.34695053100586, - "learning_rate": 1.4773333333333335e-05, - "loss": 1.6478, - "step": 20950 - }, - { - "epoch": 0.67072, - "grad_norm": 44.85986328125, - "learning_rate": 1.4769777777777778e-05, - "loss": 1.6849, - "step": 20960 - }, - { - "epoch": 0.67104, - "grad_norm": 42.863189697265625, - "learning_rate": 1.4766222222222224e-05, - "loss": 1.6944, - "step": 20970 - }, - { - "epoch": 0.67136, - "grad_norm": 43.975914001464844, - "learning_rate": 1.4762666666666667e-05, - "loss": 1.6611, - "step": 20980 - }, - { - "epoch": 0.67168, - "grad_norm": 44.24748992919922, - "learning_rate": 1.4759111111111113e-05, - "loss": 1.6517, - "step": 20990 - }, - { - "epoch": 0.672, - "grad_norm": 43.405853271484375, - "learning_rate": 1.4755555555555556e-05, - "loss": 1.6585, - "step": 21000 - }, - { - "epoch": 0.67232, - "grad_norm": 43.27943801879883, - "learning_rate": 1.4752000000000002e-05, - "loss": 1.6554, - "step": 21010 - }, - { - "epoch": 0.67264, - "grad_norm": 46.6523323059082, - "learning_rate": 1.4748444444444445e-05, - "loss": 1.6923, - "step": 21020 - }, - { - "epoch": 0.67296, - "grad_norm": 44.580291748046875, - "learning_rate": 1.4744888888888891e-05, - "loss": 1.6568, - "step": 21030 - }, - { - "epoch": 0.67328, - "grad_norm": 43.5629768371582, - "learning_rate": 1.4741333333333334e-05, - "loss": 1.6756, - "step": 21040 - }, - { - "epoch": 0.6736, - "grad_norm": 43.935951232910156, - "learning_rate": 1.473777777777778e-05, - "loss": 1.6426, - "step": 21050 - }, - { - "epoch": 0.67392, - "grad_norm": 43.343994140625, - "learning_rate": 1.4734222222222223e-05, - "loss": 1.6679, - "step": 21060 - }, - { - "epoch": 0.67424, - "grad_norm": 45.69078063964844, - "learning_rate": 1.473066666666667e-05, - "loss": 1.6671, - "step": 21070 - }, - { - "epoch": 0.67456, - "grad_norm": 43.63746643066406, - "learning_rate": 1.4727111111111112e-05, - "loss": 1.721, - "step": 21080 - }, - { - "epoch": 0.67488, - "grad_norm": 46.481971740722656, - "learning_rate": 1.4723555555555557e-05, - "loss": 1.6843, - "step": 21090 - }, - { - "epoch": 0.6752, - "grad_norm": 43.81696319580078, - "learning_rate": 1.4720000000000001e-05, - "loss": 1.6857, - "step": 21100 - }, - { - "epoch": 0.67552, - "grad_norm": 44.01736831665039, - "learning_rate": 1.4716444444444446e-05, - "loss": 1.6955, - "step": 21110 - }, - { - "epoch": 0.67584, - "grad_norm": 42.9880256652832, - "learning_rate": 1.471288888888889e-05, - "loss": 1.7147, - "step": 21120 - }, - { - "epoch": 0.67616, - "grad_norm": 42.299346923828125, - "learning_rate": 1.4709333333333335e-05, - "loss": 1.6807, - "step": 21130 - }, - { - "epoch": 0.67648, - "grad_norm": 43.38955307006836, - "learning_rate": 1.470577777777778e-05, - "loss": 1.6958, - "step": 21140 - }, - { - "epoch": 0.6768, - "grad_norm": 42.738651275634766, - "learning_rate": 1.4702222222222224e-05, - "loss": 1.6508, - "step": 21150 - }, - { - "epoch": 0.67712, - "grad_norm": 44.31309509277344, - "learning_rate": 1.4698666666666667e-05, - "loss": 1.6497, - "step": 21160 - }, - { - "epoch": 0.67744, - "grad_norm": 42.17631912231445, - "learning_rate": 1.4695111111111113e-05, - "loss": 1.7003, - "step": 21170 - }, - { - "epoch": 0.67776, - "grad_norm": 43.73914337158203, - "learning_rate": 1.4691555555555556e-05, - "loss": 1.6769, - "step": 21180 - }, - { - "epoch": 0.67808, - "grad_norm": 43.96467590332031, - "learning_rate": 1.4688000000000002e-05, - "loss": 1.6889, - "step": 21190 - }, - { - "epoch": 0.6784, - "grad_norm": 46.01679611206055, - "learning_rate": 1.4684444444444445e-05, - "loss": 1.6636, - "step": 21200 - }, - { - "epoch": 0.67872, - "grad_norm": 41.097747802734375, - "learning_rate": 1.4680888888888891e-05, - "loss": 1.681, - "step": 21210 - }, - { - "epoch": 0.67904, - "grad_norm": 44.2218017578125, - "learning_rate": 1.4677333333333334e-05, - "loss": 1.6958, - "step": 21220 - }, - { - "epoch": 0.67936, - "grad_norm": 52.44313049316406, - "learning_rate": 1.4673777777777777e-05, - "loss": 1.6334, - "step": 21230 - }, - { - "epoch": 0.67968, - "grad_norm": 42.90603256225586, - "learning_rate": 1.4670222222222223e-05, - "loss": 1.6467, - "step": 21240 - }, - { - "epoch": 0.68, - "grad_norm": 43.24466323852539, - "learning_rate": 1.4666666666666666e-05, - "loss": 1.6957, - "step": 21250 - }, - { - "epoch": 0.68032, - "grad_norm": 44.07612609863281, - "learning_rate": 1.4663111111111113e-05, - "loss": 1.6713, - "step": 21260 - }, - { - "epoch": 0.68064, - "grad_norm": 44.573612213134766, - "learning_rate": 1.4659555555555555e-05, - "loss": 1.6881, - "step": 21270 - }, - { - "epoch": 0.68096, - "grad_norm": 44.9393424987793, - "learning_rate": 1.4656000000000002e-05, - "loss": 1.7043, - "step": 21280 - }, - { - "epoch": 0.68128, - "grad_norm": 41.96256637573242, - "learning_rate": 1.4652444444444445e-05, - "loss": 1.7014, - "step": 21290 - }, - { - "epoch": 0.6816, - "grad_norm": 42.43061065673828, - "learning_rate": 1.464888888888889e-05, - "loss": 1.66, - "step": 21300 - }, - { - "epoch": 0.68192, - "grad_norm": 43.678466796875, - "learning_rate": 1.4645333333333334e-05, - "loss": 1.6978, - "step": 21310 - }, - { - "epoch": 0.68224, - "grad_norm": 44.234283447265625, - "learning_rate": 1.464177777777778e-05, - "loss": 1.6687, - "step": 21320 - }, - { - "epoch": 0.68256, - "grad_norm": 45.48586654663086, - "learning_rate": 1.4638222222222223e-05, - "loss": 1.637, - "step": 21330 - }, - { - "epoch": 0.68288, - "grad_norm": 45.24445724487305, - "learning_rate": 1.4634666666666669e-05, - "loss": 1.685, - "step": 21340 - }, - { - "epoch": 0.6832, - "grad_norm": 44.61650085449219, - "learning_rate": 1.4631111111111112e-05, - "loss": 1.6878, - "step": 21350 - }, - { - "epoch": 0.68352, - "grad_norm": 45.00833511352539, - "learning_rate": 1.4627555555555556e-05, - "loss": 1.6808, - "step": 21360 - }, - { - "epoch": 0.68384, - "grad_norm": 42.275150299072266, - "learning_rate": 1.4624000000000001e-05, - "loss": 1.6747, - "step": 21370 - }, - { - "epoch": 0.68416, - "grad_norm": 42.742698669433594, - "learning_rate": 1.4620444444444445e-05, - "loss": 1.6608, - "step": 21380 - }, - { - "epoch": 0.68448, - "grad_norm": 44.61221694946289, - "learning_rate": 1.461688888888889e-05, - "loss": 1.6709, - "step": 21390 - }, - { - "epoch": 0.6848, - "grad_norm": 45.47736740112305, - "learning_rate": 1.4613333333333335e-05, - "loss": 1.675, - "step": 21400 - }, - { - "epoch": 0.68512, - "grad_norm": 47.367313385009766, - "learning_rate": 1.4609777777777777e-05, - "loss": 1.6819, - "step": 21410 - }, - { - "epoch": 0.68544, - "grad_norm": 42.664772033691406, - "learning_rate": 1.4606222222222224e-05, - "loss": 1.6893, - "step": 21420 - }, - { - "epoch": 0.68576, - "grad_norm": 45.12783432006836, - "learning_rate": 1.4602666666666667e-05, - "loss": 1.7108, - "step": 21430 - }, - { - "epoch": 0.68608, - "grad_norm": 42.89619827270508, - "learning_rate": 1.4599111111111113e-05, - "loss": 1.6802, - "step": 21440 - }, - { - "epoch": 0.6864, - "grad_norm": 45.1823844909668, - "learning_rate": 1.4595555555555556e-05, - "loss": 1.6557, - "step": 21450 - }, - { - "epoch": 0.68672, - "grad_norm": 46.50447463989258, - "learning_rate": 1.4592000000000002e-05, - "loss": 1.6734, - "step": 21460 - }, - { - "epoch": 0.68704, - "grad_norm": 43.853580474853516, - "learning_rate": 1.4588444444444445e-05, - "loss": 1.6702, - "step": 21470 - }, - { - "epoch": 0.68736, - "grad_norm": 43.31742477416992, - "learning_rate": 1.4584888888888891e-05, - "loss": 1.6708, - "step": 21480 - }, - { - "epoch": 0.68768, - "grad_norm": 47.49016571044922, - "learning_rate": 1.4581333333333334e-05, - "loss": 1.6976, - "step": 21490 - }, - { - "epoch": 0.688, - "grad_norm": 42.12497329711914, - "learning_rate": 1.457777777777778e-05, - "loss": 1.6694, - "step": 21500 - }, - { - "epoch": 0.68832, - "grad_norm": 45.087188720703125, - "learning_rate": 1.4574222222222223e-05, - "loss": 1.7175, - "step": 21510 - }, - { - "epoch": 0.68864, - "grad_norm": 45.94072341918945, - "learning_rate": 1.457066666666667e-05, - "loss": 1.6674, - "step": 21520 - }, - { - "epoch": 0.68896, - "grad_norm": 45.907684326171875, - "learning_rate": 1.4567111111111112e-05, - "loss": 1.6998, - "step": 21530 - }, - { - "epoch": 0.68928, - "grad_norm": 43.56646728515625, - "learning_rate": 1.4563555555555557e-05, - "loss": 1.6627, - "step": 21540 - }, - { - "epoch": 0.6896, - "grad_norm": 44.42494201660156, - "learning_rate": 1.4560000000000001e-05, - "loss": 1.6713, - "step": 21550 - }, - { - "epoch": 0.68992, - "grad_norm": 43.25820541381836, - "learning_rate": 1.4556444444444446e-05, - "loss": 1.6823, - "step": 21560 - }, - { - "epoch": 0.69024, - "grad_norm": 44.242862701416016, - "learning_rate": 1.455288888888889e-05, - "loss": 1.6743, - "step": 21570 - }, - { - "epoch": 0.69056, - "grad_norm": 43.78083801269531, - "learning_rate": 1.4549333333333335e-05, - "loss": 1.6951, - "step": 21580 - }, - { - "epoch": 0.69088, - "grad_norm": 44.01467514038086, - "learning_rate": 1.454577777777778e-05, - "loss": 1.6622, - "step": 21590 - }, - { - "epoch": 0.6912, - "grad_norm": 44.382904052734375, - "learning_rate": 1.4542222222222224e-05, - "loss": 1.7014, - "step": 21600 - }, - { - "epoch": 0.69152, - "grad_norm": 43.727657318115234, - "learning_rate": 1.4538666666666667e-05, - "loss": 1.6609, - "step": 21610 - }, - { - "epoch": 0.69184, - "grad_norm": 44.795719146728516, - "learning_rate": 1.4535111111111113e-05, - "loss": 1.6852, - "step": 21620 - }, - { - "epoch": 0.69216, - "grad_norm": 45.50511932373047, - "learning_rate": 1.4531555555555556e-05, - "loss": 1.7151, - "step": 21630 - }, - { - "epoch": 0.69248, - "grad_norm": 44.66515350341797, - "learning_rate": 1.4528000000000002e-05, - "loss": 1.6791, - "step": 21640 - }, - { - "epoch": 0.6928, - "grad_norm": 43.8026008605957, - "learning_rate": 1.4524444444444445e-05, - "loss": 1.6504, - "step": 21650 - }, - { - "epoch": 0.69312, - "grad_norm": 44.98323440551758, - "learning_rate": 1.4520888888888891e-05, - "loss": 1.6957, - "step": 21660 - }, - { - "epoch": 0.69344, - "grad_norm": 44.08596420288086, - "learning_rate": 1.4517333333333334e-05, - "loss": 1.6781, - "step": 21670 - }, - { - "epoch": 0.69376, - "grad_norm": 43.75856018066406, - "learning_rate": 1.451377777777778e-05, - "loss": 1.6984, - "step": 21680 - }, - { - "epoch": 0.69408, - "grad_norm": 42.918182373046875, - "learning_rate": 1.4510222222222223e-05, - "loss": 1.6458, - "step": 21690 - }, - { - "epoch": 0.6944, - "grad_norm": 43.657752990722656, - "learning_rate": 1.450666666666667e-05, - "loss": 1.6763, - "step": 21700 - }, - { - "epoch": 0.69472, - "grad_norm": 44.31597900390625, - "learning_rate": 1.4503111111111112e-05, - "loss": 1.6781, - "step": 21710 - }, - { - "epoch": 0.69504, - "grad_norm": 45.91122817993164, - "learning_rate": 1.4499555555555559e-05, - "loss": 1.6734, - "step": 21720 - }, - { - "epoch": 0.69536, - "grad_norm": 43.836753845214844, - "learning_rate": 1.4496000000000001e-05, - "loss": 1.6747, - "step": 21730 - }, - { - "epoch": 0.69568, - "grad_norm": 44.824256896972656, - "learning_rate": 1.4492444444444444e-05, - "loss": 1.6594, - "step": 21740 - }, - { - "epoch": 0.696, - "grad_norm": 45.5569953918457, - "learning_rate": 1.448888888888889e-05, - "loss": 1.6569, - "step": 21750 - }, - { - "epoch": 0.69632, - "grad_norm": 42.6880989074707, - "learning_rate": 1.4485333333333333e-05, - "loss": 1.7061, - "step": 21760 - }, - { - "epoch": 0.69664, - "grad_norm": 45.63669967651367, - "learning_rate": 1.448177777777778e-05, - "loss": 1.6642, - "step": 21770 - }, - { - "epoch": 0.69696, - "grad_norm": 46.666629791259766, - "learning_rate": 1.4478222222222223e-05, - "loss": 1.661, - "step": 21780 - }, - { - "epoch": 0.69728, - "grad_norm": 43.4208984375, - "learning_rate": 1.4474666666666669e-05, - "loss": 1.6883, - "step": 21790 - }, - { - "epoch": 0.6976, - "grad_norm": 43.890933990478516, - "learning_rate": 1.4471111111111112e-05, - "loss": 1.6596, - "step": 21800 - }, - { - "epoch": 0.69792, - "grad_norm": 42.00779724121094, - "learning_rate": 1.4467555555555556e-05, - "loss": 1.6665, - "step": 21810 - }, - { - "epoch": 0.69824, - "grad_norm": 41.96549987792969, - "learning_rate": 1.4464e-05, - "loss": 1.6505, - "step": 21820 - }, - { - "epoch": 0.69856, - "grad_norm": 43.60977554321289, - "learning_rate": 1.4460444444444445e-05, - "loss": 1.6747, - "step": 21830 - }, - { - "epoch": 0.69888, - "grad_norm": 43.725929260253906, - "learning_rate": 1.445688888888889e-05, - "loss": 1.6546, - "step": 21840 - }, - { - "epoch": 0.6992, - "grad_norm": 45.897518157958984, - "learning_rate": 1.4453333333333334e-05, - "loss": 1.669, - "step": 21850 - }, - { - "epoch": 0.69952, - "grad_norm": 42.812721252441406, - "learning_rate": 1.4449777777777777e-05, - "loss": 1.6681, - "step": 21860 - }, - { - "epoch": 0.69984, - "grad_norm": 42.7755126953125, - "learning_rate": 1.4446222222222224e-05, - "loss": 1.6507, - "step": 21870 - }, - { - "epoch": 0.70016, - "grad_norm": 43.823123931884766, - "learning_rate": 1.4442666666666666e-05, - "loss": 1.6878, - "step": 21880 - }, - { - "epoch": 0.70048, - "grad_norm": 46.74770736694336, - "learning_rate": 1.4439111111111113e-05, - "loss": 1.6812, - "step": 21890 - }, - { - "epoch": 0.7008, - "grad_norm": 45.07465362548828, - "learning_rate": 1.4435555555555556e-05, - "loss": 1.6529, - "step": 21900 - }, - { - "epoch": 0.70112, - "grad_norm": 45.174041748046875, - "learning_rate": 1.4432000000000002e-05, - "loss": 1.6818, - "step": 21910 - }, - { - "epoch": 0.70144, - "grad_norm": 43.29008483886719, - "learning_rate": 1.4428444444444445e-05, - "loss": 1.6575, - "step": 21920 - }, - { - "epoch": 0.70176, - "grad_norm": 44.14530563354492, - "learning_rate": 1.4424888888888891e-05, - "loss": 1.6732, - "step": 21930 - }, - { - "epoch": 0.70208, - "grad_norm": 44.139373779296875, - "learning_rate": 1.4421333333333334e-05, - "loss": 1.6953, - "step": 21940 - }, - { - "epoch": 0.7024, - "grad_norm": 42.85390090942383, - "learning_rate": 1.441777777777778e-05, - "loss": 1.6489, - "step": 21950 - }, - { - "epoch": 0.70272, - "grad_norm": 44.265907287597656, - "learning_rate": 1.4414222222222223e-05, - "loss": 1.683, - "step": 21960 - }, - { - "epoch": 0.70304, - "grad_norm": 43.23847961425781, - "learning_rate": 1.4410666666666669e-05, - "loss": 1.6433, - "step": 21970 - }, - { - "epoch": 0.70336, - "grad_norm": 44.420169830322266, - "learning_rate": 1.4407111111111112e-05, - "loss": 1.6664, - "step": 21980 - }, - { - "epoch": 0.70368, - "grad_norm": 43.698692321777344, - "learning_rate": 1.4403555555555556e-05, - "loss": 1.6422, - "step": 21990 - }, - { - "epoch": 0.704, - "grad_norm": 43.02571105957031, - "learning_rate": 1.4400000000000001e-05, - "loss": 1.6612, - "step": 22000 - }, - { - "epoch": 0.70432, - "grad_norm": 43.11848068237305, - "learning_rate": 1.4396444444444446e-05, - "loss": 1.6877, - "step": 22010 - }, - { - "epoch": 0.70464, - "grad_norm": 44.298397064208984, - "learning_rate": 1.439288888888889e-05, - "loss": 1.6554, - "step": 22020 - }, - { - "epoch": 0.70496, - "grad_norm": 43.6672248840332, - "learning_rate": 1.4389333333333335e-05, - "loss": 1.6756, - "step": 22030 - }, - { - "epoch": 0.70528, - "grad_norm": 45.8585205078125, - "learning_rate": 1.438577777777778e-05, - "loss": 1.6875, - "step": 22040 - }, - { - "epoch": 0.7056, - "grad_norm": 44.58899688720703, - "learning_rate": 1.4382222222222224e-05, - "loss": 1.65, - "step": 22050 - }, - { - "epoch": 0.70592, - "grad_norm": 43.89964294433594, - "learning_rate": 1.4378666666666667e-05, - "loss": 1.6596, - "step": 22060 - }, - { - "epoch": 0.70624, - "grad_norm": 43.34562683105469, - "learning_rate": 1.4375111111111113e-05, - "loss": 1.6597, - "step": 22070 - }, - { - "epoch": 0.70656, - "grad_norm": 42.41712188720703, - "learning_rate": 1.4371555555555556e-05, - "loss": 1.6537, - "step": 22080 - }, - { - "epoch": 0.70688, - "grad_norm": 44.702484130859375, - "learning_rate": 1.4368000000000002e-05, - "loss": 1.6462, - "step": 22090 - }, - { - "epoch": 0.7072, - "grad_norm": 44.63224792480469, - "learning_rate": 1.4364444444444445e-05, - "loss": 1.669, - "step": 22100 - }, - { - "epoch": 0.70752, - "grad_norm": 44.96320724487305, - "learning_rate": 1.4360888888888891e-05, - "loss": 1.6802, - "step": 22110 - }, - { - "epoch": 0.70784, - "grad_norm": 45.22520065307617, - "learning_rate": 1.4357333333333334e-05, - "loss": 1.668, - "step": 22120 - }, - { - "epoch": 0.70816, - "grad_norm": 43.496524810791016, - "learning_rate": 1.435377777777778e-05, - "loss": 1.6822, - "step": 22130 - }, - { - "epoch": 0.70848, - "grad_norm": 45.04349136352539, - "learning_rate": 1.4350222222222223e-05, - "loss": 1.6227, - "step": 22140 - }, - { - "epoch": 0.7088, - "grad_norm": 43.959861755371094, - "learning_rate": 1.434666666666667e-05, - "loss": 1.6815, - "step": 22150 - }, - { - "epoch": 0.70912, - "grad_norm": 44.3282470703125, - "learning_rate": 1.4343111111111112e-05, - "loss": 1.6754, - "step": 22160 - }, - { - "epoch": 0.70944, - "grad_norm": 42.763973236083984, - "learning_rate": 1.4339555555555558e-05, - "loss": 1.6731, - "step": 22170 - }, - { - "epoch": 0.70976, - "grad_norm": 44.765254974365234, - "learning_rate": 1.4336000000000001e-05, - "loss": 1.6602, - "step": 22180 - }, - { - "epoch": 0.71008, - "grad_norm": 43.3527946472168, - "learning_rate": 1.4332444444444446e-05, - "loss": 1.665, - "step": 22190 - }, - { - "epoch": 0.7104, - "grad_norm": 43.666568756103516, - "learning_rate": 1.432888888888889e-05, - "loss": 1.6816, - "step": 22200 - }, - { - "epoch": 0.71072, - "grad_norm": 46.126827239990234, - "learning_rate": 1.4325333333333335e-05, - "loss": 1.6815, - "step": 22210 - }, - { - "epoch": 0.71104, - "grad_norm": 44.10695266723633, - "learning_rate": 1.432177777777778e-05, - "loss": 1.6675, - "step": 22220 - }, - { - "epoch": 0.71136, - "grad_norm": 45.392333984375, - "learning_rate": 1.4318222222222224e-05, - "loss": 1.6706, - "step": 22230 - }, - { - "epoch": 0.71168, - "grad_norm": 45.036781311035156, - "learning_rate": 1.4314666666666669e-05, - "loss": 1.6644, - "step": 22240 - }, - { - "epoch": 0.712, - "grad_norm": 44.691246032714844, - "learning_rate": 1.4311111111111111e-05, - "loss": 1.6886, - "step": 22250 - }, - { - "epoch": 0.71232, - "grad_norm": 44.20449447631836, - "learning_rate": 1.4307555555555556e-05, - "loss": 1.6956, - "step": 22260 - }, - { - "epoch": 0.71264, - "grad_norm": 43.10037612915039, - "learning_rate": 1.4304e-05, - "loss": 1.6857, - "step": 22270 - }, - { - "epoch": 0.71296, - "grad_norm": 43.95088577270508, - "learning_rate": 1.4300444444444445e-05, - "loss": 1.6607, - "step": 22280 - }, - { - "epoch": 0.71328, - "grad_norm": 44.31607437133789, - "learning_rate": 1.429688888888889e-05, - "loss": 1.6689, - "step": 22290 - }, - { - "epoch": 0.7136, - "grad_norm": 41.93162536621094, - "learning_rate": 1.4293333333333334e-05, - "loss": 1.6705, - "step": 22300 - }, - { - "epoch": 0.71392, - "grad_norm": 45.12978744506836, - "learning_rate": 1.4289777777777777e-05, - "loss": 1.6745, - "step": 22310 - }, - { - "epoch": 0.71424, - "grad_norm": 46.437583923339844, - "learning_rate": 1.4286222222222223e-05, - "loss": 1.6456, - "step": 22320 - }, - { - "epoch": 0.71456, - "grad_norm": 42.88267517089844, - "learning_rate": 1.4282666666666666e-05, - "loss": 1.6257, - "step": 22330 - }, - { - "epoch": 0.71488, - "grad_norm": 46.049652099609375, - "learning_rate": 1.4279111111111112e-05, - "loss": 1.6578, - "step": 22340 - }, - { - "epoch": 0.7152, - "grad_norm": 45.01377868652344, - "learning_rate": 1.4275555555555555e-05, - "loss": 1.678, - "step": 22350 - }, - { - "epoch": 0.71552, - "grad_norm": 44.42237091064453, - "learning_rate": 1.4272000000000002e-05, - "loss": 1.6648, - "step": 22360 - }, - { - "epoch": 0.71584, - "grad_norm": 43.99474334716797, - "learning_rate": 1.4268444444444444e-05, - "loss": 1.6889, - "step": 22370 - }, - { - "epoch": 0.71616, - "grad_norm": 41.76171875, - "learning_rate": 1.426488888888889e-05, - "loss": 1.6735, - "step": 22380 - }, - { - "epoch": 0.71648, - "grad_norm": 43.52986526489258, - "learning_rate": 1.4261333333333334e-05, - "loss": 1.6696, - "step": 22390 - }, - { - "epoch": 0.7168, - "grad_norm": 43.7346076965332, - "learning_rate": 1.425777777777778e-05, - "loss": 1.6738, - "step": 22400 - }, - { - "epoch": 0.71712, - "grad_norm": 46.53422546386719, - "learning_rate": 1.4254222222222223e-05, - "loss": 1.6429, - "step": 22410 - }, - { - "epoch": 0.71744, - "grad_norm": 43.2796516418457, - "learning_rate": 1.4250666666666669e-05, - "loss": 1.661, - "step": 22420 - }, - { - "epoch": 0.71776, - "grad_norm": 45.7249641418457, - "learning_rate": 1.4247111111111112e-05, - "loss": 1.6454, - "step": 22430 - }, - { - "epoch": 0.71808, - "grad_norm": 44.16270446777344, - "learning_rate": 1.4243555555555556e-05, - "loss": 1.654, - "step": 22440 - }, - { - "epoch": 0.7184, - "grad_norm": 46.57696533203125, - "learning_rate": 1.4240000000000001e-05, - "loss": 1.6901, - "step": 22450 - }, - { - "epoch": 0.71872, - "grad_norm": 43.533870697021484, - "learning_rate": 1.4236444444444445e-05, - "loss": 1.6344, - "step": 22460 - }, - { - "epoch": 0.71904, - "grad_norm": 43.986446380615234, - "learning_rate": 1.423288888888889e-05, - "loss": 1.6738, - "step": 22470 - }, - { - "epoch": 0.71936, - "grad_norm": 43.613040924072266, - "learning_rate": 1.4229333333333335e-05, - "loss": 1.6628, - "step": 22480 - }, - { - "epoch": 0.71968, - "grad_norm": 43.533058166503906, - "learning_rate": 1.4225777777777779e-05, - "loss": 1.6739, - "step": 22490 - }, - { - "epoch": 0.72, - "grad_norm": 46.17250442504883, - "learning_rate": 1.4222222222222224e-05, - "loss": 1.6625, - "step": 22500 - }, - { - "epoch": 0.72032, - "grad_norm": 44.67798614501953, - "learning_rate": 1.4218666666666667e-05, - "loss": 1.6553, - "step": 22510 - }, - { - "epoch": 0.72064, - "grad_norm": 43.80499267578125, - "learning_rate": 1.4215111111111113e-05, - "loss": 1.6733, - "step": 22520 - }, - { - "epoch": 0.72096, - "grad_norm": 45.787498474121094, - "learning_rate": 1.4211555555555556e-05, - "loss": 1.6607, - "step": 22530 - }, - { - "epoch": 0.72128, - "grad_norm": 43.61078643798828, - "learning_rate": 1.4208000000000002e-05, - "loss": 1.6785, - "step": 22540 - }, - { - "epoch": 0.7216, - "grad_norm": 44.815433502197266, - "learning_rate": 1.4204444444444445e-05, - "loss": 1.6646, - "step": 22550 - }, - { - "epoch": 0.72192, - "grad_norm": 44.859615325927734, - "learning_rate": 1.4200888888888891e-05, - "loss": 1.6768, - "step": 22560 - }, - { - "epoch": 0.72224, - "grad_norm": 47.96544647216797, - "learning_rate": 1.4197333333333334e-05, - "loss": 1.6902, - "step": 22570 - }, - { - "epoch": 0.72256, - "grad_norm": 44.82842254638672, - "learning_rate": 1.419377777777778e-05, - "loss": 1.6897, - "step": 22580 - }, - { - "epoch": 0.72288, - "grad_norm": 45.776710510253906, - "learning_rate": 1.4190222222222223e-05, - "loss": 1.6566, - "step": 22590 - }, - { - "epoch": 0.7232, - "grad_norm": 42.437870025634766, - "learning_rate": 1.418666666666667e-05, - "loss": 1.6823, - "step": 22600 - }, - { - "epoch": 0.72352, - "grad_norm": 43.7807731628418, - "learning_rate": 1.4183111111111112e-05, - "loss": 1.6802, - "step": 22610 - }, - { - "epoch": 0.72384, - "grad_norm": 43.51747512817383, - "learning_rate": 1.4179555555555558e-05, - "loss": 1.6449, - "step": 22620 - }, - { - "epoch": 0.72416, - "grad_norm": 42.205467224121094, - "learning_rate": 1.4176000000000001e-05, - "loss": 1.6829, - "step": 22630 - }, - { - "epoch": 0.72448, - "grad_norm": 43.446014404296875, - "learning_rate": 1.4172444444444446e-05, - "loss": 1.6902, - "step": 22640 - }, - { - "epoch": 0.7248, - "grad_norm": 43.7180061340332, - "learning_rate": 1.416888888888889e-05, - "loss": 1.6697, - "step": 22650 - }, - { - "epoch": 0.72512, - "grad_norm": 44.47442626953125, - "learning_rate": 1.4165333333333335e-05, - "loss": 1.6647, - "step": 22660 - }, - { - "epoch": 0.72544, - "grad_norm": 46.07502746582031, - "learning_rate": 1.416177777777778e-05, - "loss": 1.6755, - "step": 22670 - }, - { - "epoch": 0.72576, - "grad_norm": 44.2828369140625, - "learning_rate": 1.4158222222222224e-05, - "loss": 1.6874, - "step": 22680 - }, - { - "epoch": 0.72608, - "grad_norm": 42.204612731933594, - "learning_rate": 1.4154666666666668e-05, - "loss": 1.6864, - "step": 22690 - }, - { - "epoch": 0.7264, - "grad_norm": 42.07124710083008, - "learning_rate": 1.4151111111111113e-05, - "loss": 1.6453, - "step": 22700 - }, - { - "epoch": 0.72672, - "grad_norm": 49.05067825317383, - "learning_rate": 1.4147555555555556e-05, - "loss": 1.6727, - "step": 22710 - }, - { - "epoch": 0.72704, - "grad_norm": 45.1103401184082, - "learning_rate": 1.4144000000000002e-05, - "loss": 1.6688, - "step": 22720 - }, - { - "epoch": 0.72736, - "grad_norm": 44.48417282104492, - "learning_rate": 1.4140444444444445e-05, - "loss": 1.6702, - "step": 22730 - }, - { - "epoch": 0.72768, - "grad_norm": 45.34505844116211, - "learning_rate": 1.4136888888888891e-05, - "loss": 1.6479, - "step": 22740 - }, - { - "epoch": 0.728, - "grad_norm": 46.812767028808594, - "learning_rate": 1.4133333333333334e-05, - "loss": 1.6598, - "step": 22750 - }, - { - "epoch": 0.72832, - "grad_norm": 46.07847595214844, - "learning_rate": 1.412977777777778e-05, - "loss": 1.6663, - "step": 22760 - }, - { - "epoch": 0.72864, - "grad_norm": 47.190879821777344, - "learning_rate": 1.4126222222222223e-05, - "loss": 1.6324, - "step": 22770 - }, - { - "epoch": 0.72896, - "grad_norm": 43.76619338989258, - "learning_rate": 1.4122666666666666e-05, - "loss": 1.6851, - "step": 22780 - }, - { - "epoch": 0.72928, - "grad_norm": 45.95934295654297, - "learning_rate": 1.4119111111111112e-05, - "loss": 1.6608, - "step": 22790 - }, - { - "epoch": 0.7296, - "grad_norm": 43.19196701049805, - "learning_rate": 1.4115555555555555e-05, - "loss": 1.6564, - "step": 22800 - }, - { - "epoch": 0.72992, - "grad_norm": 43.22726821899414, - "learning_rate": 1.4112000000000001e-05, - "loss": 1.6412, - "step": 22810 - }, - { - "epoch": 0.73024, - "grad_norm": 45.99415588378906, - "learning_rate": 1.4108444444444444e-05, - "loss": 1.6814, - "step": 22820 - }, - { - "epoch": 0.73056, - "grad_norm": 43.976158142089844, - "learning_rate": 1.410488888888889e-05, - "loss": 1.6803, - "step": 22830 - }, - { - "epoch": 0.73088, - "grad_norm": 44.73454284667969, - "learning_rate": 1.4101333333333333e-05, - "loss": 1.669, - "step": 22840 - }, - { - "epoch": 0.7312, - "grad_norm": 42.69565963745117, - "learning_rate": 1.409777777777778e-05, - "loss": 1.667, - "step": 22850 - }, - { - "epoch": 0.73152, - "grad_norm": 43.98191452026367, - "learning_rate": 1.4094222222222223e-05, - "loss": 1.6988, - "step": 22860 - }, - { - "epoch": 0.73184, - "grad_norm": 44.971351623535156, - "learning_rate": 1.4090666666666669e-05, - "loss": 1.6534, - "step": 22870 - }, - { - "epoch": 0.73216, - "grad_norm": 45.63749694824219, - "learning_rate": 1.4087111111111112e-05, - "loss": 1.651, - "step": 22880 - }, - { - "epoch": 0.73248, - "grad_norm": 44.00672912597656, - "learning_rate": 1.4083555555555556e-05, - "loss": 1.6516, - "step": 22890 - }, - { - "epoch": 0.7328, - "grad_norm": 44.87765884399414, - "learning_rate": 1.408e-05, - "loss": 1.6654, - "step": 22900 - }, - { - "epoch": 0.73312, - "grad_norm": 43.8448371887207, - "learning_rate": 1.4076444444444445e-05, - "loss": 1.6614, - "step": 22910 - }, - { - "epoch": 0.73344, - "grad_norm": 45.86703872680664, - "learning_rate": 1.407288888888889e-05, - "loss": 1.6794, - "step": 22920 - }, - { - "epoch": 0.73376, - "grad_norm": 45.04334259033203, - "learning_rate": 1.4069333333333334e-05, - "loss": 1.6469, - "step": 22930 - }, - { - "epoch": 0.73408, - "grad_norm": 43.51724624633789, - "learning_rate": 1.4065777777777779e-05, - "loss": 1.6847, - "step": 22940 - }, - { - "epoch": 0.7344, - "grad_norm": 45.09579086303711, - "learning_rate": 1.4062222222222223e-05, - "loss": 1.6817, - "step": 22950 - }, - { - "epoch": 0.73472, - "grad_norm": 45.397491455078125, - "learning_rate": 1.4058666666666666e-05, - "loss": 1.684, - "step": 22960 - }, - { - "epoch": 0.73504, - "grad_norm": 43.79633712768555, - "learning_rate": 1.4055111111111113e-05, - "loss": 1.6278, - "step": 22970 - }, - { - "epoch": 0.73536, - "grad_norm": 43.882774353027344, - "learning_rate": 1.4051555555555555e-05, - "loss": 1.6492, - "step": 22980 - }, - { - "epoch": 0.73568, - "grad_norm": 44.50541687011719, - "learning_rate": 1.4048000000000002e-05, - "loss": 1.659, - "step": 22990 - }, - { - "epoch": 0.736, - "grad_norm": 45.097389221191406, - "learning_rate": 1.4044444444444445e-05, - "loss": 1.6857, - "step": 23000 - }, - { - "epoch": 0.73632, - "grad_norm": 45.81336212158203, - "learning_rate": 1.404088888888889e-05, - "loss": 1.677, - "step": 23010 - }, - { - "epoch": 0.73664, - "grad_norm": 44.712608337402344, - "learning_rate": 1.4037333333333334e-05, - "loss": 1.6642, - "step": 23020 - }, - { - "epoch": 0.73696, - "grad_norm": 44.72639465332031, - "learning_rate": 1.403377777777778e-05, - "loss": 1.6722, - "step": 23030 - }, - { - "epoch": 0.73728, - "grad_norm": 43.99245071411133, - "learning_rate": 1.4030222222222223e-05, - "loss": 1.6658, - "step": 23040 - }, - { - "epoch": 0.7376, - "grad_norm": 43.95591354370117, - "learning_rate": 1.4026666666666669e-05, - "loss": 1.6606, - "step": 23050 - }, - { - "epoch": 0.73792, - "grad_norm": 46.36194610595703, - "learning_rate": 1.4023111111111112e-05, - "loss": 1.6521, - "step": 23060 - }, - { - "epoch": 0.73824, - "grad_norm": 44.344261169433594, - "learning_rate": 1.4019555555555558e-05, - "loss": 1.6683, - "step": 23070 - }, - { - "epoch": 0.73856, - "grad_norm": 44.6585807800293, - "learning_rate": 1.4016000000000001e-05, - "loss": 1.6445, - "step": 23080 - }, - { - "epoch": 0.73888, - "grad_norm": 45.141300201416016, - "learning_rate": 1.4012444444444446e-05, - "loss": 1.6738, - "step": 23090 - }, - { - "epoch": 0.7392, - "grad_norm": 44.751033782958984, - "learning_rate": 1.400888888888889e-05, - "loss": 1.6745, - "step": 23100 - }, - { - "epoch": 0.73952, - "grad_norm": 45.1855354309082, - "learning_rate": 1.4005333333333335e-05, - "loss": 1.6641, - "step": 23110 - }, - { - "epoch": 0.73984, - "grad_norm": 42.032108306884766, - "learning_rate": 1.400177777777778e-05, - "loss": 1.6633, - "step": 23120 - }, - { - "epoch": 0.74016, - "grad_norm": 43.98733139038086, - "learning_rate": 1.3998222222222224e-05, - "loss": 1.6436, - "step": 23130 - }, - { - "epoch": 0.74048, - "grad_norm": 44.74616622924805, - "learning_rate": 1.3994666666666668e-05, - "loss": 1.6801, - "step": 23140 - }, - { - "epoch": 0.7408, - "grad_norm": 42.1522102355957, - "learning_rate": 1.3991111111111113e-05, - "loss": 1.646, - "step": 23150 - }, - { - "epoch": 0.74112, - "grad_norm": 43.740299224853516, - "learning_rate": 1.3987555555555556e-05, - "loss": 1.6409, - "step": 23160 - }, - { - "epoch": 0.74144, - "grad_norm": 43.136390686035156, - "learning_rate": 1.3984000000000002e-05, - "loss": 1.668, - "step": 23170 - }, - { - "epoch": 0.74176, - "grad_norm": 43.768821716308594, - "learning_rate": 1.3980444444444445e-05, - "loss": 1.681, - "step": 23180 - }, - { - "epoch": 0.74208, - "grad_norm": 44.26373291015625, - "learning_rate": 1.3976888888888891e-05, - "loss": 1.6726, - "step": 23190 - }, - { - "epoch": 0.7424, - "grad_norm": 44.403743743896484, - "learning_rate": 1.3973333333333334e-05, - "loss": 1.6767, - "step": 23200 - }, - { - "epoch": 0.74272, - "grad_norm": 43.6219482421875, - "learning_rate": 1.396977777777778e-05, - "loss": 1.6611, - "step": 23210 - }, - { - "epoch": 0.74304, - "grad_norm": 43.950042724609375, - "learning_rate": 1.3966222222222223e-05, - "loss": 1.6651, - "step": 23220 - }, - { - "epoch": 0.74336, - "grad_norm": 44.61774444580078, - "learning_rate": 1.396266666666667e-05, - "loss": 1.6349, - "step": 23230 - }, - { - "epoch": 0.74368, - "grad_norm": 43.243309020996094, - "learning_rate": 1.3959111111111112e-05, - "loss": 1.6953, - "step": 23240 - }, - { - "epoch": 0.744, - "grad_norm": 43.35283279418945, - "learning_rate": 1.3955555555555558e-05, - "loss": 1.6393, - "step": 23250 - }, - { - "epoch": 0.74432, - "grad_norm": 43.880435943603516, - "learning_rate": 1.3952000000000001e-05, - "loss": 1.6631, - "step": 23260 - }, - { - "epoch": 0.74464, - "grad_norm": 45.62138366699219, - "learning_rate": 1.3948444444444448e-05, - "loss": 1.6765, - "step": 23270 - }, - { - "epoch": 0.74496, - "grad_norm": 43.951446533203125, - "learning_rate": 1.394488888888889e-05, - "loss": 1.6431, - "step": 23280 - }, - { - "epoch": 0.74528, - "grad_norm": 45.40299606323242, - "learning_rate": 1.3941333333333333e-05, - "loss": 1.6572, - "step": 23290 - }, - { - "epoch": 0.7456, - "grad_norm": 44.030738830566406, - "learning_rate": 1.393777777777778e-05, - "loss": 1.6635, - "step": 23300 - }, - { - "epoch": 0.74592, - "grad_norm": 43.0764274597168, - "learning_rate": 1.3934222222222222e-05, - "loss": 1.7001, - "step": 23310 - }, - { - "epoch": 0.74624, - "grad_norm": 44.9112548828125, - "learning_rate": 1.3930666666666669e-05, - "loss": 1.6681, - "step": 23320 - }, - { - "epoch": 0.74656, - "grad_norm": 42.09294509887695, - "learning_rate": 1.3927111111111111e-05, - "loss": 1.6978, - "step": 23330 - }, - { - "epoch": 0.74688, - "grad_norm": 42.35454177856445, - "learning_rate": 1.3923555555555556e-05, - "loss": 1.681, - "step": 23340 - }, - { - "epoch": 0.7472, - "grad_norm": 45.9742317199707, - "learning_rate": 1.392e-05, - "loss": 1.7039, - "step": 23350 - }, - { - "epoch": 0.74752, - "grad_norm": 43.095428466796875, - "learning_rate": 1.3916444444444445e-05, - "loss": 1.6668, - "step": 23360 - }, - { - "epoch": 0.74784, - "grad_norm": 44.52104187011719, - "learning_rate": 1.391288888888889e-05, - "loss": 1.6634, - "step": 23370 - }, - { - "epoch": 0.74816, - "grad_norm": 42.80414962768555, - "learning_rate": 1.3909333333333334e-05, - "loss": 1.6686, - "step": 23380 - }, - { - "epoch": 0.74848, - "grad_norm": 43.77104568481445, - "learning_rate": 1.3905777777777779e-05, - "loss": 1.667, - "step": 23390 - }, - { - "epoch": 0.7488, - "grad_norm": 44.292640686035156, - "learning_rate": 1.3902222222222223e-05, - "loss": 1.6475, - "step": 23400 - }, - { - "epoch": 0.74912, - "grad_norm": 45.744869232177734, - "learning_rate": 1.3898666666666666e-05, - "loss": 1.6634, - "step": 23410 - }, - { - "epoch": 0.74944, - "grad_norm": 45.20486831665039, - "learning_rate": 1.3895111111111112e-05, - "loss": 1.614, - "step": 23420 - }, - { - "epoch": 0.74976, - "grad_norm": 44.7941780090332, - "learning_rate": 1.3891555555555555e-05, - "loss": 1.6562, - "step": 23430 - }, - { - "epoch": 0.75008, - "grad_norm": 45.210330963134766, - "learning_rate": 1.3888000000000002e-05, - "loss": 1.6645, - "step": 23440 - }, - { - "epoch": 0.7504, - "grad_norm": 43.68789291381836, - "learning_rate": 1.3884444444444444e-05, - "loss": 1.6724, - "step": 23450 - }, - { - "epoch": 0.75072, - "grad_norm": 44.261295318603516, - "learning_rate": 1.388088888888889e-05, - "loss": 1.6311, - "step": 23460 - }, - { - "epoch": 0.75104, - "grad_norm": 43.10234832763672, - "learning_rate": 1.3877333333333334e-05, - "loss": 1.664, - "step": 23470 - }, - { - "epoch": 0.75136, - "grad_norm": 44.797393798828125, - "learning_rate": 1.387377777777778e-05, - "loss": 1.6604, - "step": 23480 - }, - { - "epoch": 0.75168, - "grad_norm": 44.64009094238281, - "learning_rate": 1.3870222222222223e-05, - "loss": 1.6917, - "step": 23490 - }, - { - "epoch": 0.752, - "grad_norm": 44.40959167480469, - "learning_rate": 1.3866666666666669e-05, - "loss": 1.6451, - "step": 23500 - }, - { - "epoch": 0.75232, - "grad_norm": 45.48454284667969, - "learning_rate": 1.3863111111111112e-05, - "loss": 1.6607, - "step": 23510 - }, - { - "epoch": 0.75264, - "grad_norm": 42.83772277832031, - "learning_rate": 1.3859555555555558e-05, - "loss": 1.6673, - "step": 23520 - }, - { - "epoch": 0.75296, - "grad_norm": 43.72276306152344, - "learning_rate": 1.3856e-05, - "loss": 1.6506, - "step": 23530 - }, - { - "epoch": 0.75328, - "grad_norm": 45.616329193115234, - "learning_rate": 1.3852444444444445e-05, - "loss": 1.6674, - "step": 23540 - }, - { - "epoch": 0.7536, - "grad_norm": 42.29286193847656, - "learning_rate": 1.384888888888889e-05, - "loss": 1.6449, - "step": 23550 - }, - { - "epoch": 0.75392, - "grad_norm": 43.88602066040039, - "learning_rate": 1.3845333333333334e-05, - "loss": 1.6775, - "step": 23560 - }, - { - "epoch": 0.75424, - "grad_norm": 41.9892578125, - "learning_rate": 1.3841777777777779e-05, - "loss": 1.6498, - "step": 23570 - }, - { - "epoch": 0.75456, - "grad_norm": 44.01221466064453, - "learning_rate": 1.3838222222222224e-05, - "loss": 1.6794, - "step": 23580 - }, - { - "epoch": 0.75488, - "grad_norm": 47.409400939941406, - "learning_rate": 1.3834666666666668e-05, - "loss": 1.692, - "step": 23590 - }, - { - "epoch": 0.7552, - "grad_norm": 47.6396484375, - "learning_rate": 1.3831111111111113e-05, - "loss": 1.652, - "step": 23600 - }, - { - "epoch": 0.75552, - "grad_norm": 44.32497024536133, - "learning_rate": 1.3827555555555556e-05, - "loss": 1.6507, - "step": 23610 - }, - { - "epoch": 0.75584, - "grad_norm": 43.5431022644043, - "learning_rate": 1.3824000000000002e-05, - "loss": 1.6976, - "step": 23620 - }, - { - "epoch": 0.75616, - "grad_norm": 42.28642272949219, - "learning_rate": 1.3820444444444445e-05, - "loss": 1.6915, - "step": 23630 - }, - { - "epoch": 0.75648, - "grad_norm": 42.09159469604492, - "learning_rate": 1.3816888888888891e-05, - "loss": 1.6478, - "step": 23640 - }, - { - "epoch": 0.7568, - "grad_norm": 44.362796783447266, - "learning_rate": 1.3813333333333334e-05, - "loss": 1.6668, - "step": 23650 - }, - { - "epoch": 0.75712, - "grad_norm": 43.37620544433594, - "learning_rate": 1.380977777777778e-05, - "loss": 1.6616, - "step": 23660 - }, - { - "epoch": 0.75744, - "grad_norm": 42.7562141418457, - "learning_rate": 1.3806222222222223e-05, - "loss": 1.6511, - "step": 23670 - }, - { - "epoch": 0.75776, - "grad_norm": 43.32087707519531, - "learning_rate": 1.3802666666666669e-05, - "loss": 1.6735, - "step": 23680 - }, - { - "epoch": 0.75808, - "grad_norm": 43.88164520263672, - "learning_rate": 1.3799111111111112e-05, - "loss": 1.6521, - "step": 23690 - }, - { - "epoch": 0.7584, - "grad_norm": 44.2587776184082, - "learning_rate": 1.3795555555555558e-05, - "loss": 1.6539, - "step": 23700 - }, - { - "epoch": 0.75872, - "grad_norm": 43.35541915893555, - "learning_rate": 1.3792000000000001e-05, - "loss": 1.6679, - "step": 23710 - }, - { - "epoch": 0.75904, - "grad_norm": 44.739402770996094, - "learning_rate": 1.3788444444444447e-05, - "loss": 1.6677, - "step": 23720 - }, - { - "epoch": 0.75936, - "grad_norm": 42.22761154174805, - "learning_rate": 1.378488888888889e-05, - "loss": 1.6598, - "step": 23730 - }, - { - "epoch": 0.75968, - "grad_norm": 45.30773162841797, - "learning_rate": 1.3781333333333335e-05, - "loss": 1.6386, - "step": 23740 - }, - { - "epoch": 0.76, - "grad_norm": 46.231231689453125, - "learning_rate": 1.377777777777778e-05, - "loss": 1.6652, - "step": 23750 - }, - { - "epoch": 0.76032, - "grad_norm": 43.435951232910156, - "learning_rate": 1.3774222222222224e-05, - "loss": 1.6611, - "step": 23760 - }, - { - "epoch": 0.76064, - "grad_norm": 45.195953369140625, - "learning_rate": 1.3770666666666668e-05, - "loss": 1.6879, - "step": 23770 - }, - { - "epoch": 0.76096, - "grad_norm": 44.517269134521484, - "learning_rate": 1.3767111111111113e-05, - "loss": 1.6266, - "step": 23780 - }, - { - "epoch": 0.76128, - "grad_norm": 47.46632766723633, - "learning_rate": 1.3763555555555556e-05, - "loss": 1.693, - "step": 23790 - }, - { - "epoch": 0.7616, - "grad_norm": 44.885990142822266, - "learning_rate": 1.376e-05, - "loss": 1.651, - "step": 23800 - }, - { - "epoch": 0.76192, - "grad_norm": 45.48068618774414, - "learning_rate": 1.3756444444444445e-05, - "loss": 1.6616, - "step": 23810 - }, - { - "epoch": 0.76224, - "grad_norm": 44.51014709472656, - "learning_rate": 1.375288888888889e-05, - "loss": 1.7018, - "step": 23820 - }, - { - "epoch": 0.76256, - "grad_norm": 43.273250579833984, - "learning_rate": 1.3749333333333334e-05, - "loss": 1.658, - "step": 23830 - }, - { - "epoch": 0.76288, - "grad_norm": 44.35025405883789, - "learning_rate": 1.3745777777777779e-05, - "loss": 1.6565, - "step": 23840 - }, - { - "epoch": 0.7632, - "grad_norm": 46.16026306152344, - "learning_rate": 1.3742222222222223e-05, - "loss": 1.6601, - "step": 23850 - }, - { - "epoch": 0.76352, - "grad_norm": 45.772403717041016, - "learning_rate": 1.3738666666666666e-05, - "loss": 1.6429, - "step": 23860 - }, - { - "epoch": 0.76384, - "grad_norm": 43.03438949584961, - "learning_rate": 1.3735111111111112e-05, - "loss": 1.6594, - "step": 23870 - }, - { - "epoch": 0.76416, - "grad_norm": 45.24376678466797, - "learning_rate": 1.3731555555555555e-05, - "loss": 1.6646, - "step": 23880 - }, - { - "epoch": 0.76448, - "grad_norm": 43.241172790527344, - "learning_rate": 1.3728000000000001e-05, - "loss": 1.652, - "step": 23890 - }, - { - "epoch": 0.7648, - "grad_norm": 43.52678298950195, - "learning_rate": 1.3724444444444444e-05, - "loss": 1.6588, - "step": 23900 - }, - { - "epoch": 0.76512, - "grad_norm": 44.89611053466797, - "learning_rate": 1.372088888888889e-05, - "loss": 1.6523, - "step": 23910 - }, - { - "epoch": 0.76544, - "grad_norm": 43.12721252441406, - "learning_rate": 1.3717333333333333e-05, - "loss": 1.6595, - "step": 23920 - }, - { - "epoch": 0.76576, - "grad_norm": 47.17388153076172, - "learning_rate": 1.371377777777778e-05, - "loss": 1.6648, - "step": 23930 - }, - { - "epoch": 0.76608, - "grad_norm": 43.1567268371582, - "learning_rate": 1.3710222222222222e-05, - "loss": 1.6707, - "step": 23940 - }, - { - "epoch": 0.7664, - "grad_norm": 44.261985778808594, - "learning_rate": 1.3706666666666669e-05, - "loss": 1.6733, - "step": 23950 - }, - { - "epoch": 0.76672, - "grad_norm": 46.473670959472656, - "learning_rate": 1.3703111111111112e-05, - "loss": 1.6944, - "step": 23960 - }, - { - "epoch": 0.76704, - "grad_norm": 44.448974609375, - "learning_rate": 1.3699555555555558e-05, - "loss": 1.6654, - "step": 23970 - }, - { - "epoch": 0.76736, - "grad_norm": 42.94993209838867, - "learning_rate": 1.3696e-05, - "loss": 1.6726, - "step": 23980 - }, - { - "epoch": 0.76768, - "grad_norm": 43.04637908935547, - "learning_rate": 1.3692444444444445e-05, - "loss": 1.6726, - "step": 23990 - }, - { - "epoch": 0.768, - "grad_norm": 45.355377197265625, - "learning_rate": 1.368888888888889e-05, - "loss": 1.6507, - "step": 24000 - }, - { - "epoch": 0.76832, - "grad_norm": 44.12913131713867, - "learning_rate": 1.3685333333333334e-05, - "loss": 1.6675, - "step": 24010 - }, - { - "epoch": 0.76864, - "grad_norm": 44.19668197631836, - "learning_rate": 1.3681777777777779e-05, - "loss": 1.6521, - "step": 24020 - }, - { - "epoch": 0.76896, - "grad_norm": 43.70733642578125, - "learning_rate": 1.3678222222222223e-05, - "loss": 1.6814, - "step": 24030 - }, - { - "epoch": 0.76928, - "grad_norm": 45.314842224121094, - "learning_rate": 1.3674666666666668e-05, - "loss": 1.6672, - "step": 24040 - }, - { - "epoch": 0.7696, - "grad_norm": 43.1247444152832, - "learning_rate": 1.3671111111111113e-05, - "loss": 1.6249, - "step": 24050 - }, - { - "epoch": 0.76992, - "grad_norm": 42.034088134765625, - "learning_rate": 1.3667555555555555e-05, - "loss": 1.618, - "step": 24060 - }, - { - "epoch": 0.77024, - "grad_norm": 42.96197509765625, - "learning_rate": 1.3664000000000002e-05, - "loss": 1.6626, - "step": 24070 - }, - { - "epoch": 0.77056, - "grad_norm": 44.40598678588867, - "learning_rate": 1.3660444444444445e-05, - "loss": 1.6692, - "step": 24080 - }, - { - "epoch": 0.77088, - "grad_norm": 44.56721878051758, - "learning_rate": 1.365688888888889e-05, - "loss": 1.6664, - "step": 24090 - }, - { - "epoch": 0.7712, - "grad_norm": 43.622642517089844, - "learning_rate": 1.3653333333333334e-05, - "loss": 1.6357, - "step": 24100 - }, - { - "epoch": 0.77152, - "grad_norm": 44.824275970458984, - "learning_rate": 1.364977777777778e-05, - "loss": 1.6538, - "step": 24110 - }, - { - "epoch": 0.77184, - "grad_norm": 46.225868225097656, - "learning_rate": 1.3646222222222223e-05, - "loss": 1.6541, - "step": 24120 - }, - { - "epoch": 0.77216, - "grad_norm": 44.35313034057617, - "learning_rate": 1.3642666666666669e-05, - "loss": 1.6592, - "step": 24130 - }, - { - "epoch": 0.77248, - "grad_norm": 43.661659240722656, - "learning_rate": 1.3639111111111112e-05, - "loss": 1.6533, - "step": 24140 - }, - { - "epoch": 0.7728, - "grad_norm": 42.956539154052734, - "learning_rate": 1.3635555555555558e-05, - "loss": 1.6678, - "step": 24150 - }, - { - "epoch": 0.77312, - "grad_norm": 42.41958236694336, - "learning_rate": 1.3632000000000001e-05, - "loss": 1.6558, - "step": 24160 - }, - { - "epoch": 0.77344, - "grad_norm": 44.25238037109375, - "learning_rate": 1.3628444444444447e-05, - "loss": 1.6258, - "step": 24170 - }, - { - "epoch": 0.77376, - "grad_norm": 44.08992004394531, - "learning_rate": 1.362488888888889e-05, - "loss": 1.669, - "step": 24180 - }, - { - "epoch": 0.77408, - "grad_norm": 44.67106246948242, - "learning_rate": 1.3621333333333335e-05, - "loss": 1.6765, - "step": 24190 - }, - { - "epoch": 0.7744, - "grad_norm": 43.50186538696289, - "learning_rate": 1.361777777777778e-05, - "loss": 1.6386, - "step": 24200 - }, - { - "epoch": 0.77472, - "grad_norm": 45.129730224609375, - "learning_rate": 1.3614222222222224e-05, - "loss": 1.6417, - "step": 24210 - }, - { - "epoch": 0.77504, - "grad_norm": 43.64735794067383, - "learning_rate": 1.3610666666666668e-05, - "loss": 1.6336, - "step": 24220 - }, - { - "epoch": 0.77536, - "grad_norm": 45.431678771972656, - "learning_rate": 1.3607111111111113e-05, - "loss": 1.6866, - "step": 24230 - }, - { - "epoch": 0.77568, - "grad_norm": 45.0831298828125, - "learning_rate": 1.3603555555555556e-05, - "loss": 1.6499, - "step": 24240 - }, - { - "epoch": 0.776, - "grad_norm": 43.368614196777344, - "learning_rate": 1.3600000000000002e-05, - "loss": 1.6655, - "step": 24250 - }, - { - "epoch": 0.77632, - "grad_norm": 42.33442687988281, - "learning_rate": 1.3596444444444445e-05, - "loss": 1.6556, - "step": 24260 - }, - { - "epoch": 0.77664, - "grad_norm": 45.05806350708008, - "learning_rate": 1.3592888888888891e-05, - "loss": 1.6383, - "step": 24270 - }, - { - "epoch": 0.77696, - "grad_norm": 43.16596603393555, - "learning_rate": 1.3589333333333334e-05, - "loss": 1.663, - "step": 24280 - }, - { - "epoch": 0.77728, - "grad_norm": 46.41241455078125, - "learning_rate": 1.358577777777778e-05, - "loss": 1.6907, - "step": 24290 - }, - { - "epoch": 0.7776, - "grad_norm": 46.05997848510742, - "learning_rate": 1.3582222222222223e-05, - "loss": 1.6556, - "step": 24300 - }, - { - "epoch": 0.77792, - "grad_norm": 44.43130111694336, - "learning_rate": 1.357866666666667e-05, - "loss": 1.6517, - "step": 24310 - }, - { - "epoch": 0.77824, - "grad_norm": 43.29718780517578, - "learning_rate": 1.3575111111111112e-05, - "loss": 1.651, - "step": 24320 - }, - { - "epoch": 0.77856, - "grad_norm": 45.74983596801758, - "learning_rate": 1.3571555555555555e-05, - "loss": 1.6475, - "step": 24330 - }, - { - "epoch": 0.77888, - "grad_norm": 43.65877914428711, - "learning_rate": 1.3568000000000001e-05, - "loss": 1.6843, - "step": 24340 - }, - { - "epoch": 0.7792, - "grad_norm": 47.665771484375, - "learning_rate": 1.3564444444444444e-05, - "loss": 1.6628, - "step": 24350 - }, - { - "epoch": 0.77952, - "grad_norm": 43.86433029174805, - "learning_rate": 1.356088888888889e-05, - "loss": 1.6532, - "step": 24360 - }, - { - "epoch": 0.77984, - "grad_norm": 43.74129867553711, - "learning_rate": 1.3557333333333333e-05, - "loss": 1.6488, - "step": 24370 - }, - { - "epoch": 0.78016, - "grad_norm": 43.49934387207031, - "learning_rate": 1.355377777777778e-05, - "loss": 1.6495, - "step": 24380 - }, - { - "epoch": 0.78048, - "grad_norm": 44.210689544677734, - "learning_rate": 1.3550222222222222e-05, - "loss": 1.6597, - "step": 24390 - }, - { - "epoch": 0.7808, - "grad_norm": 42.381004333496094, - "learning_rate": 1.3546666666666669e-05, - "loss": 1.6672, - "step": 24400 - }, - { - "epoch": 0.78112, - "grad_norm": 44.646244049072266, - "learning_rate": 1.3543111111111111e-05, - "loss": 1.6598, - "step": 24410 - }, - { - "epoch": 0.78144, - "grad_norm": 43.49140167236328, - "learning_rate": 1.3539555555555558e-05, - "loss": 1.6601, - "step": 24420 - }, - { - "epoch": 0.78176, - "grad_norm": 43.86427688598633, - "learning_rate": 1.3536e-05, - "loss": 1.625, - "step": 24430 - }, - { - "epoch": 0.78208, - "grad_norm": 44.45777893066406, - "learning_rate": 1.3532444444444445e-05, - "loss": 1.6933, - "step": 24440 - }, - { - "epoch": 0.7824, - "grad_norm": 45.80703353881836, - "learning_rate": 1.352888888888889e-05, - "loss": 1.6606, - "step": 24450 - }, - { - "epoch": 0.78272, - "grad_norm": 43.04664993286133, - "learning_rate": 1.3525333333333334e-05, - "loss": 1.6555, - "step": 24460 - }, - { - "epoch": 0.78304, - "grad_norm": 44.184017181396484, - "learning_rate": 1.3521777777777779e-05, - "loss": 1.6533, - "step": 24470 - }, - { - "epoch": 0.78336, - "grad_norm": 45.938289642333984, - "learning_rate": 1.3518222222222223e-05, - "loss": 1.6599, - "step": 24480 - }, - { - "epoch": 0.78368, - "grad_norm": 43.94887924194336, - "learning_rate": 1.3514666666666668e-05, - "loss": 1.6648, - "step": 24490 - }, - { - "epoch": 0.784, - "grad_norm": 43.67192459106445, - "learning_rate": 1.3511111111111112e-05, - "loss": 1.6821, - "step": 24500 - }, - { - "epoch": 0.78432, - "grad_norm": 44.66633987426758, - "learning_rate": 1.3507555555555555e-05, - "loss": 1.6611, - "step": 24510 - }, - { - "epoch": 0.78464, - "grad_norm": 43.301578521728516, - "learning_rate": 1.3504000000000001e-05, - "loss": 1.6827, - "step": 24520 - }, - { - "epoch": 0.78496, - "grad_norm": 44.07039260864258, - "learning_rate": 1.3500444444444444e-05, - "loss": 1.6637, - "step": 24530 - }, - { - "epoch": 0.78528, - "grad_norm": 43.60643768310547, - "learning_rate": 1.349688888888889e-05, - "loss": 1.6485, - "step": 24540 - }, - { - "epoch": 0.7856, - "grad_norm": 42.864803314208984, - "learning_rate": 1.3493333333333333e-05, - "loss": 1.6656, - "step": 24550 - }, - { - "epoch": 0.78592, - "grad_norm": 44.05122375488281, - "learning_rate": 1.348977777777778e-05, - "loss": 1.6528, - "step": 24560 - }, - { - "epoch": 0.78624, - "grad_norm": 44.83565139770508, - "learning_rate": 1.3486222222222223e-05, - "loss": 1.6626, - "step": 24570 - }, - { - "epoch": 0.78656, - "grad_norm": 44.86031723022461, - "learning_rate": 1.3482666666666669e-05, - "loss": 1.6658, - "step": 24580 - }, - { - "epoch": 0.78688, - "grad_norm": 43.807945251464844, - "learning_rate": 1.3479111111111112e-05, - "loss": 1.6325, - "step": 24590 - }, - { - "epoch": 0.7872, - "grad_norm": 43.93977355957031, - "learning_rate": 1.3475555555555558e-05, - "loss": 1.6677, - "step": 24600 - }, - { - "epoch": 0.78752, - "grad_norm": 43.10548400878906, - "learning_rate": 1.3472e-05, - "loss": 1.6611, - "step": 24610 - }, - { - "epoch": 0.78784, - "grad_norm": 42.78868865966797, - "learning_rate": 1.3468444444444447e-05, - "loss": 1.6303, - "step": 24620 - }, - { - "epoch": 0.78816, - "grad_norm": 46.41221618652344, - "learning_rate": 1.346488888888889e-05, - "loss": 1.643, - "step": 24630 - }, - { - "epoch": 0.78848, - "grad_norm": 43.43020248413086, - "learning_rate": 1.3461333333333334e-05, - "loss": 1.6406, - "step": 24640 - }, - { - "epoch": 0.7888, - "grad_norm": 45.2264404296875, - "learning_rate": 1.3457777777777779e-05, - "loss": 1.6771, - "step": 24650 - }, - { - "epoch": 0.78912, - "grad_norm": 41.3878288269043, - "learning_rate": 1.3454222222222224e-05, - "loss": 1.6309, - "step": 24660 - }, - { - "epoch": 0.78944, - "grad_norm": 44.629066467285156, - "learning_rate": 1.3450666666666668e-05, - "loss": 1.6698, - "step": 24670 - }, - { - "epoch": 0.78976, - "grad_norm": 43.920928955078125, - "learning_rate": 1.3447111111111113e-05, - "loss": 1.6432, - "step": 24680 - }, - { - "epoch": 0.79008, - "grad_norm": 44.86647033691406, - "learning_rate": 1.3443555555555556e-05, - "loss": 1.6829, - "step": 24690 - }, - { - "epoch": 0.7904, - "grad_norm": 44.51643371582031, - "learning_rate": 1.3440000000000002e-05, - "loss": 1.6599, - "step": 24700 - }, - { - "epoch": 0.79072, - "grad_norm": 42.92637252807617, - "learning_rate": 1.3436444444444445e-05, - "loss": 1.6457, - "step": 24710 - }, - { - "epoch": 0.79104, - "grad_norm": 44.864200592041016, - "learning_rate": 1.3432888888888891e-05, - "loss": 1.6647, - "step": 24720 - }, - { - "epoch": 0.79136, - "grad_norm": 43.50864028930664, - "learning_rate": 1.3429333333333334e-05, - "loss": 1.6836, - "step": 24730 - }, - { - "epoch": 0.79168, - "grad_norm": 44.62223434448242, - "learning_rate": 1.342577777777778e-05, - "loss": 1.6503, - "step": 24740 - }, - { - "epoch": 0.792, - "grad_norm": 44.60259246826172, - "learning_rate": 1.3422222222222223e-05, - "loss": 1.6764, - "step": 24750 - }, - { - "epoch": 0.79232, - "grad_norm": 43.178924560546875, - "learning_rate": 1.3418666666666669e-05, - "loss": 1.6601, - "step": 24760 - }, - { - "epoch": 0.79264, - "grad_norm": 44.4167594909668, - "learning_rate": 1.3415111111111112e-05, - "loss": 1.654, - "step": 24770 - }, - { - "epoch": 0.79296, - "grad_norm": 45.009437561035156, - "learning_rate": 1.3411555555555558e-05, - "loss": 1.6622, - "step": 24780 - }, - { - "epoch": 0.79328, - "grad_norm": 44.56413269042969, - "learning_rate": 1.3408000000000001e-05, - "loss": 1.6705, - "step": 24790 - }, - { - "epoch": 0.7936, - "grad_norm": 42.48627853393555, - "learning_rate": 1.3404444444444447e-05, - "loss": 1.6634, - "step": 24800 - }, - { - "epoch": 0.79392, - "grad_norm": 44.3420524597168, - "learning_rate": 1.340088888888889e-05, - "loss": 1.6268, - "step": 24810 - }, - { - "epoch": 0.79424, - "grad_norm": 42.205543518066406, - "learning_rate": 1.3397333333333335e-05, - "loss": 1.6493, - "step": 24820 - }, - { - "epoch": 0.79456, - "grad_norm": 44.827919006347656, - "learning_rate": 1.339377777777778e-05, - "loss": 1.637, - "step": 24830 - }, - { - "epoch": 0.79488, - "grad_norm": 43.14591979980469, - "learning_rate": 1.3390222222222222e-05, - "loss": 1.6362, - "step": 24840 - }, - { - "epoch": 0.7952, - "grad_norm": 43.567264556884766, - "learning_rate": 1.3386666666666668e-05, - "loss": 1.6551, - "step": 24850 - }, - { - "epoch": 0.79552, - "grad_norm": 44.323707580566406, - "learning_rate": 1.3383111111111111e-05, - "loss": 1.6591, - "step": 24860 - }, - { - "epoch": 0.79584, - "grad_norm": 41.868961334228516, - "learning_rate": 1.3379555555555557e-05, - "loss": 1.6621, - "step": 24870 - }, - { - "epoch": 0.79616, - "grad_norm": 43.79658126831055, - "learning_rate": 1.3376e-05, - "loss": 1.6515, - "step": 24880 - }, - { - "epoch": 0.79648, - "grad_norm": 48.700313568115234, - "learning_rate": 1.3372444444444445e-05, - "loss": 1.6455, - "step": 24890 - }, - { - "epoch": 0.7968, - "grad_norm": 42.93735122680664, - "learning_rate": 1.336888888888889e-05, - "loss": 1.6396, - "step": 24900 - }, - { - "epoch": 0.79712, - "grad_norm": 46.09784698486328, - "learning_rate": 1.3365333333333334e-05, - "loss": 1.6419, - "step": 24910 - }, - { - "epoch": 0.79744, - "grad_norm": 42.79138946533203, - "learning_rate": 1.3361777777777779e-05, - "loss": 1.6685, - "step": 24920 - }, - { - "epoch": 0.79776, - "grad_norm": 46.662940979003906, - "learning_rate": 1.3358222222222223e-05, - "loss": 1.6568, - "step": 24930 - }, - { - "epoch": 0.79808, - "grad_norm": 46.082244873046875, - "learning_rate": 1.3354666666666668e-05, - "loss": 1.6478, - "step": 24940 - }, - { - "epoch": 0.7984, - "grad_norm": 45.16201400756836, - "learning_rate": 1.3351111111111112e-05, - "loss": 1.6585, - "step": 24950 - }, - { - "epoch": 0.79872, - "grad_norm": 44.113380432128906, - "learning_rate": 1.3347555555555555e-05, - "loss": 1.6702, - "step": 24960 - }, - { - "epoch": 0.79904, - "grad_norm": 44.27860641479492, - "learning_rate": 1.3344000000000001e-05, - "loss": 1.642, - "step": 24970 - }, - { - "epoch": 0.79936, - "grad_norm": 45.675655364990234, - "learning_rate": 1.3340444444444444e-05, - "loss": 1.6243, - "step": 24980 - }, - { - "epoch": 0.79968, - "grad_norm": 45.01850891113281, - "learning_rate": 1.333688888888889e-05, - "loss": 1.6773, - "step": 24990 - }, - { - "epoch": 0.8, - "grad_norm": 44.070579528808594, - "learning_rate": 1.3333333333333333e-05, - "loss": 1.6678, - "step": 25000 - }, - { - "epoch": 0.80032, - "grad_norm": 44.78519058227539, - "learning_rate": 1.332977777777778e-05, - "loss": 1.6671, - "step": 25010 - }, - { - "epoch": 0.80064, - "grad_norm": 45.15095138549805, - "learning_rate": 1.3326222222222222e-05, - "loss": 1.6402, - "step": 25020 - }, - { - "epoch": 0.80096, - "grad_norm": 43.276493072509766, - "learning_rate": 1.3322666666666669e-05, - "loss": 1.6672, - "step": 25030 - }, - { - "epoch": 0.80128, - "grad_norm": 44.25715255737305, - "learning_rate": 1.3319111111111112e-05, - "loss": 1.6302, - "step": 25040 - }, - { - "epoch": 0.8016, - "grad_norm": 43.42182540893555, - "learning_rate": 1.3315555555555558e-05, - "loss": 1.6653, - "step": 25050 - }, - { - "epoch": 0.80192, - "grad_norm": 46.8543586730957, - "learning_rate": 1.3312e-05, - "loss": 1.687, - "step": 25060 - }, - { - "epoch": 0.80224, - "grad_norm": 44.426082611083984, - "learning_rate": 1.3308444444444447e-05, - "loss": 1.6328, - "step": 25070 - }, - { - "epoch": 0.80256, - "grad_norm": 46.286983489990234, - "learning_rate": 1.330488888888889e-05, - "loss": 1.6477, - "step": 25080 - }, - { - "epoch": 0.80288, - "grad_norm": 44.83964920043945, - "learning_rate": 1.3301333333333334e-05, - "loss": 1.6905, - "step": 25090 - }, - { - "epoch": 0.8032, - "grad_norm": 44.6434326171875, - "learning_rate": 1.3297777777777779e-05, - "loss": 1.6449, - "step": 25100 - }, - { - "epoch": 0.80352, - "grad_norm": 44.10059356689453, - "learning_rate": 1.3294222222222223e-05, - "loss": 1.6668, - "step": 25110 - }, - { - "epoch": 0.80384, - "grad_norm": 43.977718353271484, - "learning_rate": 1.3290666666666668e-05, - "loss": 1.6696, - "step": 25120 - }, - { - "epoch": 0.80416, - "grad_norm": 43.653648376464844, - "learning_rate": 1.3287111111111112e-05, - "loss": 1.6548, - "step": 25130 - }, - { - "epoch": 0.80448, - "grad_norm": 42.916500091552734, - "learning_rate": 1.3283555555555557e-05, - "loss": 1.6852, - "step": 25140 - }, - { - "epoch": 0.8048, - "grad_norm": 44.64195251464844, - "learning_rate": 1.3280000000000002e-05, - "loss": 1.6425, - "step": 25150 - }, - { - "epoch": 0.80512, - "grad_norm": 44.12642288208008, - "learning_rate": 1.3276444444444444e-05, - "loss": 1.6456, - "step": 25160 - }, - { - "epoch": 0.80544, - "grad_norm": 44.04334259033203, - "learning_rate": 1.327288888888889e-05, - "loss": 1.6704, - "step": 25170 - }, - { - "epoch": 0.80576, - "grad_norm": 43.638301849365234, - "learning_rate": 1.3269333333333334e-05, - "loss": 1.6545, - "step": 25180 - }, - { - "epoch": 0.80608, - "grad_norm": 44.577510833740234, - "learning_rate": 1.326577777777778e-05, - "loss": 1.6479, - "step": 25190 - }, - { - "epoch": 0.8064, - "grad_norm": 43.7669563293457, - "learning_rate": 1.3262222222222223e-05, - "loss": 1.6511, - "step": 25200 - }, - { - "epoch": 0.80672, - "grad_norm": 44.01200485229492, - "learning_rate": 1.3258666666666669e-05, - "loss": 1.6738, - "step": 25210 - }, - { - "epoch": 0.80704, - "grad_norm": 44.7772216796875, - "learning_rate": 1.3255111111111112e-05, - "loss": 1.6402, - "step": 25220 - }, - { - "epoch": 0.80736, - "grad_norm": 43.836692810058594, - "learning_rate": 1.3251555555555558e-05, - "loss": 1.6447, - "step": 25230 - }, - { - "epoch": 0.80768, - "grad_norm": 43.029361724853516, - "learning_rate": 1.3248000000000001e-05, - "loss": 1.6474, - "step": 25240 - }, - { - "epoch": 0.808, - "grad_norm": 44.712398529052734, - "learning_rate": 1.3244444444444447e-05, - "loss": 1.6796, - "step": 25250 - }, - { - "epoch": 0.80832, - "grad_norm": 44.133628845214844, - "learning_rate": 1.324088888888889e-05, - "loss": 1.6523, - "step": 25260 - }, - { - "epoch": 0.80864, - "grad_norm": 43.37736129760742, - "learning_rate": 1.3237333333333335e-05, - "loss": 1.659, - "step": 25270 - }, - { - "epoch": 0.80896, - "grad_norm": 45.66469955444336, - "learning_rate": 1.3233777777777779e-05, - "loss": 1.6422, - "step": 25280 - }, - { - "epoch": 0.80928, - "grad_norm": 44.68482208251953, - "learning_rate": 1.3230222222222224e-05, - "loss": 1.6417, - "step": 25290 - }, - { - "epoch": 0.8096, - "grad_norm": 45.07290267944336, - "learning_rate": 1.3226666666666668e-05, - "loss": 1.6818, - "step": 25300 - }, - { - "epoch": 0.80992, - "grad_norm": 44.1818962097168, - "learning_rate": 1.3223111111111113e-05, - "loss": 1.6762, - "step": 25310 - }, - { - "epoch": 0.81024, - "grad_norm": 44.05356216430664, - "learning_rate": 1.3219555555555557e-05, - "loss": 1.6383, - "step": 25320 - }, - { - "epoch": 0.81056, - "grad_norm": 43.593894958496094, - "learning_rate": 1.3216000000000002e-05, - "loss": 1.6475, - "step": 25330 - }, - { - "epoch": 0.81088, - "grad_norm": 44.107872009277344, - "learning_rate": 1.3212444444444445e-05, - "loss": 1.6635, - "step": 25340 - }, - { - "epoch": 0.8112, - "grad_norm": 45.551185607910156, - "learning_rate": 1.320888888888889e-05, - "loss": 1.66, - "step": 25350 - }, - { - "epoch": 0.81152, - "grad_norm": 44.834293365478516, - "learning_rate": 1.3205333333333334e-05, - "loss": 1.6653, - "step": 25360 - }, - { - "epoch": 0.81184, - "grad_norm": 44.376991271972656, - "learning_rate": 1.3201777777777778e-05, - "loss": 1.6607, - "step": 25370 - }, - { - "epoch": 0.81216, - "grad_norm": 46.16840744018555, - "learning_rate": 1.3198222222222223e-05, - "loss": 1.6611, - "step": 25380 - }, - { - "epoch": 0.81248, - "grad_norm": 43.237728118896484, - "learning_rate": 1.3194666666666668e-05, - "loss": 1.6563, - "step": 25390 - }, - { - "epoch": 0.8128, - "grad_norm": 46.14860153198242, - "learning_rate": 1.3191111111111112e-05, - "loss": 1.6526, - "step": 25400 - }, - { - "epoch": 0.81312, - "grad_norm": 43.493927001953125, - "learning_rate": 1.3187555555555555e-05, - "loss": 1.6353, - "step": 25410 - }, - { - "epoch": 0.81344, - "grad_norm": 43.593013763427734, - "learning_rate": 1.3184000000000001e-05, - "loss": 1.6373, - "step": 25420 - }, - { - "epoch": 0.81376, - "grad_norm": 44.05482864379883, - "learning_rate": 1.3180444444444444e-05, - "loss": 1.6472, - "step": 25430 - }, - { - "epoch": 0.81408, - "grad_norm": 45.95872116088867, - "learning_rate": 1.317688888888889e-05, - "loss": 1.6422, - "step": 25440 - }, - { - "epoch": 0.8144, - "grad_norm": 44.911746978759766, - "learning_rate": 1.3173333333333333e-05, - "loss": 1.635, - "step": 25450 - }, - { - "epoch": 0.81472, - "grad_norm": 44.14010238647461, - "learning_rate": 1.316977777777778e-05, - "loss": 1.6578, - "step": 25460 - }, - { - "epoch": 0.81504, - "grad_norm": 41.602718353271484, - "learning_rate": 1.3166222222222222e-05, - "loss": 1.6693, - "step": 25470 - }, - { - "epoch": 0.81536, - "grad_norm": 45.07976150512695, - "learning_rate": 1.3162666666666668e-05, - "loss": 1.6623, - "step": 25480 - }, - { - "epoch": 0.81568, - "grad_norm": 43.92379379272461, - "learning_rate": 1.3159111111111111e-05, - "loss": 1.6421, - "step": 25490 - }, - { - "epoch": 0.816, - "grad_norm": 44.13690948486328, - "learning_rate": 1.3155555555555558e-05, - "loss": 1.6493, - "step": 25500 - }, - { - "epoch": 0.81632, - "grad_norm": 47.86391067504883, - "learning_rate": 1.3152e-05, - "loss": 1.6477, - "step": 25510 - }, - { - "epoch": 0.81664, - "grad_norm": 43.6804084777832, - "learning_rate": 1.3148444444444447e-05, - "loss": 1.6467, - "step": 25520 - }, - { - "epoch": 0.81696, - "grad_norm": 46.26144790649414, - "learning_rate": 1.314488888888889e-05, - "loss": 1.6511, - "step": 25530 - }, - { - "epoch": 0.81728, - "grad_norm": 42.43048095703125, - "learning_rate": 1.3141333333333334e-05, - "loss": 1.6476, - "step": 25540 - }, - { - "epoch": 0.8176, - "grad_norm": 45.539634704589844, - "learning_rate": 1.3137777777777779e-05, - "loss": 1.6565, - "step": 25550 - }, - { - "epoch": 0.81792, - "grad_norm": 46.257110595703125, - "learning_rate": 1.3134222222222223e-05, - "loss": 1.6453, - "step": 25560 - }, - { - "epoch": 0.81824, - "grad_norm": 45.317142486572266, - "learning_rate": 1.3130666666666668e-05, - "loss": 1.6515, - "step": 25570 - }, - { - "epoch": 0.81856, - "grad_norm": 44.00784683227539, - "learning_rate": 1.3127111111111112e-05, - "loss": 1.6858, - "step": 25580 - }, - { - "epoch": 0.81888, - "grad_norm": 45.211429595947266, - "learning_rate": 1.3123555555555557e-05, - "loss": 1.6481, - "step": 25590 - }, - { - "epoch": 0.8192, - "grad_norm": 44.39881896972656, - "learning_rate": 1.3120000000000001e-05, - "loss": 1.6445, - "step": 25600 - }, - { - "epoch": 0.81952, - "grad_norm": 43.63643264770508, - "learning_rate": 1.3116444444444444e-05, - "loss": 1.6423, - "step": 25610 - }, - { - "epoch": 0.81984, - "grad_norm": 42.108158111572266, - "learning_rate": 1.311288888888889e-05, - "loss": 1.6704, - "step": 25620 - }, - { - "epoch": 0.82016, - "grad_norm": 43.87276840209961, - "learning_rate": 1.3109333333333333e-05, - "loss": 1.6401, - "step": 25630 - }, - { - "epoch": 0.82048, - "grad_norm": 43.294090270996094, - "learning_rate": 1.310577777777778e-05, - "loss": 1.688, - "step": 25640 - }, - { - "epoch": 0.8208, - "grad_norm": 42.391029357910156, - "learning_rate": 1.3102222222222223e-05, - "loss": 1.6719, - "step": 25650 - }, - { - "epoch": 0.82112, - "grad_norm": 44.66448211669922, - "learning_rate": 1.3098666666666669e-05, - "loss": 1.6376, - "step": 25660 - }, - { - "epoch": 0.82144, - "grad_norm": 42.991539001464844, - "learning_rate": 1.3095111111111112e-05, - "loss": 1.6522, - "step": 25670 - }, - { - "epoch": 0.82176, - "grad_norm": 46.60944366455078, - "learning_rate": 1.3091555555555558e-05, - "loss": 1.6269, - "step": 25680 - }, - { - "epoch": 0.82208, - "grad_norm": 44.123817443847656, - "learning_rate": 1.3088e-05, - "loss": 1.6325, - "step": 25690 - }, - { - "epoch": 0.8224, - "grad_norm": 44.38522720336914, - "learning_rate": 1.3084444444444447e-05, - "loss": 1.6804, - "step": 25700 - }, - { - "epoch": 0.82272, - "grad_norm": 44.40520477294922, - "learning_rate": 1.308088888888889e-05, - "loss": 1.6397, - "step": 25710 - }, - { - "epoch": 0.82304, - "grad_norm": 45.01036834716797, - "learning_rate": 1.3077333333333334e-05, - "loss": 1.6629, - "step": 25720 - }, - { - "epoch": 0.82336, - "grad_norm": 43.612449645996094, - "learning_rate": 1.3073777777777779e-05, - "loss": 1.6594, - "step": 25730 - }, - { - "epoch": 0.82368, - "grad_norm": 46.03797912597656, - "learning_rate": 1.3070222222222223e-05, - "loss": 1.6797, - "step": 25740 - }, - { - "epoch": 0.824, - "grad_norm": 42.819480895996094, - "learning_rate": 1.3066666666666668e-05, - "loss": 1.6234, - "step": 25750 - }, - { - "epoch": 0.82432, - "grad_norm": 45.527652740478516, - "learning_rate": 1.3063111111111113e-05, - "loss": 1.6523, - "step": 25760 - }, - { - "epoch": 0.82464, - "grad_norm": 45.140602111816406, - "learning_rate": 1.3059555555555557e-05, - "loss": 1.6684, - "step": 25770 - }, - { - "epoch": 0.82496, - "grad_norm": 43.334163665771484, - "learning_rate": 1.3056000000000002e-05, - "loss": 1.6393, - "step": 25780 - }, - { - "epoch": 0.82528, - "grad_norm": 44.33065414428711, - "learning_rate": 1.3052444444444445e-05, - "loss": 1.6711, - "step": 25790 - }, - { - "epoch": 0.8256, - "grad_norm": 43.01241683959961, - "learning_rate": 1.304888888888889e-05, - "loss": 1.6545, - "step": 25800 - }, - { - "epoch": 0.82592, - "grad_norm": 45.725337982177734, - "learning_rate": 1.3045333333333334e-05, - "loss": 1.6609, - "step": 25810 - }, - { - "epoch": 0.82624, - "grad_norm": 42.712215423583984, - "learning_rate": 1.304177777777778e-05, - "loss": 1.6755, - "step": 25820 - }, - { - "epoch": 0.82656, - "grad_norm": 45.29679489135742, - "learning_rate": 1.3038222222222223e-05, - "loss": 1.6409, - "step": 25830 - }, - { - "epoch": 0.82688, - "grad_norm": 44.52570724487305, - "learning_rate": 1.3034666666666669e-05, - "loss": 1.6313, - "step": 25840 - }, - { - "epoch": 0.8272, - "grad_norm": 44.2238883972168, - "learning_rate": 1.3031111111111112e-05, - "loss": 1.6631, - "step": 25850 - }, - { - "epoch": 0.82752, - "grad_norm": 42.84789276123047, - "learning_rate": 1.3027555555555558e-05, - "loss": 1.6233, - "step": 25860 - }, - { - "epoch": 0.82784, - "grad_norm": 43.984310150146484, - "learning_rate": 1.3024000000000001e-05, - "loss": 1.6362, - "step": 25870 - }, - { - "epoch": 0.82816, - "grad_norm": 42.9167594909668, - "learning_rate": 1.3020444444444444e-05, - "loss": 1.6166, - "step": 25880 - }, - { - "epoch": 0.82848, - "grad_norm": 44.87565612792969, - "learning_rate": 1.301688888888889e-05, - "loss": 1.6503, - "step": 25890 - }, - { - "epoch": 0.8288, - "grad_norm": 45.01673889160156, - "learning_rate": 1.3013333333333333e-05, - "loss": 1.6494, - "step": 25900 - }, - { - "epoch": 0.82912, - "grad_norm": 43.40441131591797, - "learning_rate": 1.300977777777778e-05, - "loss": 1.673, - "step": 25910 - }, - { - "epoch": 0.82944, - "grad_norm": 45.419349670410156, - "learning_rate": 1.3006222222222222e-05, - "loss": 1.6564, - "step": 25920 - }, - { - "epoch": 0.82976, - "grad_norm": 44.7979621887207, - "learning_rate": 1.3002666666666668e-05, - "loss": 1.6677, - "step": 25930 - }, - { - "epoch": 0.83008, - "grad_norm": 45.44395446777344, - "learning_rate": 1.2999111111111111e-05, - "loss": 1.6384, - "step": 25940 - }, - { - "epoch": 0.8304, - "grad_norm": 44.90794372558594, - "learning_rate": 1.2995555555555557e-05, - "loss": 1.6625, - "step": 25950 - }, - { - "epoch": 0.83072, - "grad_norm": 44.52241897583008, - "learning_rate": 1.2992e-05, - "loss": 1.6542, - "step": 25960 - }, - { - "epoch": 0.83104, - "grad_norm": 44.95444107055664, - "learning_rate": 1.2988444444444447e-05, - "loss": 1.6471, - "step": 25970 - }, - { - "epoch": 0.83136, - "grad_norm": 45.035301208496094, - "learning_rate": 1.298488888888889e-05, - "loss": 1.6533, - "step": 25980 - }, - { - "epoch": 0.83168, - "grad_norm": 45.0323371887207, - "learning_rate": 1.2981333333333334e-05, - "loss": 1.6419, - "step": 25990 - }, - { - "epoch": 0.832, - "grad_norm": 46.45552444458008, - "learning_rate": 1.2977777777777779e-05, - "loss": 1.6587, - "step": 26000 - }, - { - "epoch": 0.83232, - "grad_norm": 46.57162094116211, - "learning_rate": 1.2974222222222223e-05, - "loss": 1.6624, - "step": 26010 - }, - { - "epoch": 0.83264, - "grad_norm": 45.604042053222656, - "learning_rate": 1.2970666666666668e-05, - "loss": 1.6699, - "step": 26020 - }, - { - "epoch": 0.83296, - "grad_norm": 45.68147277832031, - "learning_rate": 1.2967111111111112e-05, - "loss": 1.6642, - "step": 26030 - }, - { - "epoch": 0.83328, - "grad_norm": 44.60567092895508, - "learning_rate": 1.2963555555555557e-05, - "loss": 1.6366, - "step": 26040 - }, - { - "epoch": 0.8336, - "grad_norm": 42.047977447509766, - "learning_rate": 1.2960000000000001e-05, - "loss": 1.6321, - "step": 26050 - }, - { - "epoch": 0.83392, - "grad_norm": 43.56084060668945, - "learning_rate": 1.2956444444444444e-05, - "loss": 1.66, - "step": 26060 - }, - { - "epoch": 0.83424, - "grad_norm": 45.93843460083008, - "learning_rate": 1.295288888888889e-05, - "loss": 1.6546, - "step": 26070 - }, - { - "epoch": 0.83456, - "grad_norm": 43.66746139526367, - "learning_rate": 1.2949333333333333e-05, - "loss": 1.631, - "step": 26080 - }, - { - "epoch": 0.83488, - "grad_norm": 44.373802185058594, - "learning_rate": 1.294577777777778e-05, - "loss": 1.6222, - "step": 26090 - }, - { - "epoch": 0.8352, - "grad_norm": 43.290977478027344, - "learning_rate": 1.2942222222222222e-05, - "loss": 1.6463, - "step": 26100 - }, - { - "epoch": 0.83552, - "grad_norm": 42.16154861450195, - "learning_rate": 1.2938666666666669e-05, - "loss": 1.6314, - "step": 26110 - }, - { - "epoch": 0.83584, - "grad_norm": 45.15300369262695, - "learning_rate": 1.2935111111111111e-05, - "loss": 1.6622, - "step": 26120 - }, - { - "epoch": 0.83616, - "grad_norm": 45.746482849121094, - "learning_rate": 1.2931555555555558e-05, - "loss": 1.6552, - "step": 26130 - }, - { - "epoch": 0.83648, - "grad_norm": 44.604976654052734, - "learning_rate": 1.2928e-05, - "loss": 1.6379, - "step": 26140 - }, - { - "epoch": 0.8368, - "grad_norm": 45.78082275390625, - "learning_rate": 1.2924444444444447e-05, - "loss": 1.656, - "step": 26150 - }, - { - "epoch": 0.83712, - "grad_norm": 42.38752746582031, - "learning_rate": 1.292088888888889e-05, - "loss": 1.6713, - "step": 26160 - }, - { - "epoch": 0.83744, - "grad_norm": 41.761966705322266, - "learning_rate": 1.2917333333333334e-05, - "loss": 1.6581, - "step": 26170 - }, - { - "epoch": 0.83776, - "grad_norm": 44.03702926635742, - "learning_rate": 1.2913777777777779e-05, - "loss": 1.6545, - "step": 26180 - }, - { - "epoch": 0.83808, - "grad_norm": 42.7269287109375, - "learning_rate": 1.2910222222222223e-05, - "loss": 1.6277, - "step": 26190 - }, - { - "epoch": 0.8384, - "grad_norm": 45.2806282043457, - "learning_rate": 1.2906666666666668e-05, - "loss": 1.6329, - "step": 26200 - }, - { - "epoch": 0.83872, - "grad_norm": 44.448692321777344, - "learning_rate": 1.2903111111111112e-05, - "loss": 1.6385, - "step": 26210 - }, - { - "epoch": 0.83904, - "grad_norm": 43.17620849609375, - "learning_rate": 1.2899555555555557e-05, - "loss": 1.6309, - "step": 26220 - }, - { - "epoch": 0.83936, - "grad_norm": 43.542423248291016, - "learning_rate": 1.2896000000000002e-05, - "loss": 1.6282, - "step": 26230 - }, - { - "epoch": 0.83968, - "grad_norm": 46.048954010009766, - "learning_rate": 1.2892444444444444e-05, - "loss": 1.6884, - "step": 26240 - }, - { - "epoch": 0.84, - "grad_norm": 43.61574935913086, - "learning_rate": 1.288888888888889e-05, - "loss": 1.6829, - "step": 26250 - }, - { - "epoch": 0.84032, - "grad_norm": 42.595706939697266, - "learning_rate": 1.2885333333333334e-05, - "loss": 1.6519, - "step": 26260 - }, - { - "epoch": 0.84064, - "grad_norm": 42.97865676879883, - "learning_rate": 1.288177777777778e-05, - "loss": 1.6544, - "step": 26270 - }, - { - "epoch": 0.84096, - "grad_norm": 47.38331604003906, - "learning_rate": 1.2878222222222223e-05, - "loss": 1.6439, - "step": 26280 - }, - { - "epoch": 0.84128, - "grad_norm": 45.50075149536133, - "learning_rate": 1.2874666666666669e-05, - "loss": 1.6359, - "step": 26290 - }, - { - "epoch": 0.8416, - "grad_norm": 44.4566764831543, - "learning_rate": 1.2871111111111112e-05, - "loss": 1.6667, - "step": 26300 - }, - { - "epoch": 0.84192, - "grad_norm": 43.335777282714844, - "learning_rate": 1.2867555555555558e-05, - "loss": 1.6672, - "step": 26310 - }, - { - "epoch": 0.84224, - "grad_norm": 47.7451171875, - "learning_rate": 1.2864e-05, - "loss": 1.6576, - "step": 26320 - }, - { - "epoch": 0.84256, - "grad_norm": 46.96812057495117, - "learning_rate": 1.2860444444444447e-05, - "loss": 1.6441, - "step": 26330 - }, - { - "epoch": 0.84288, - "grad_norm": 45.21785354614258, - "learning_rate": 1.285688888888889e-05, - "loss": 1.6437, - "step": 26340 - }, - { - "epoch": 0.8432, - "grad_norm": 41.97297668457031, - "learning_rate": 1.2853333333333336e-05, - "loss": 1.6436, - "step": 26350 - }, - { - "epoch": 0.84352, - "grad_norm": 44.169273376464844, - "learning_rate": 1.2849777777777779e-05, - "loss": 1.6387, - "step": 26360 - }, - { - "epoch": 0.84384, - "grad_norm": 45.00596618652344, - "learning_rate": 1.2846222222222224e-05, - "loss": 1.6272, - "step": 26370 - }, - { - "epoch": 0.84416, - "grad_norm": 45.01812744140625, - "learning_rate": 1.2842666666666668e-05, - "loss": 1.6506, - "step": 26380 - }, - { - "epoch": 0.84448, - "grad_norm": 44.44124984741211, - "learning_rate": 1.2839111111111111e-05, - "loss": 1.6627, - "step": 26390 - }, - { - "epoch": 0.8448, - "grad_norm": 45.47779846191406, - "learning_rate": 1.2835555555555557e-05, - "loss": 1.6626, - "step": 26400 - }, - { - "epoch": 0.84512, - "grad_norm": 46.43983459472656, - "learning_rate": 1.2832e-05, - "loss": 1.6717, - "step": 26410 - }, - { - "epoch": 0.84544, - "grad_norm": 45.822357177734375, - "learning_rate": 1.2828444444444446e-05, - "loss": 1.6639, - "step": 26420 - }, - { - "epoch": 0.84576, - "grad_norm": 45.2042121887207, - "learning_rate": 1.282488888888889e-05, - "loss": 1.6461, - "step": 26430 - }, - { - "epoch": 0.84608, - "grad_norm": 44.78562927246094, - "learning_rate": 1.2821333333333334e-05, - "loss": 1.6853, - "step": 26440 - }, - { - "epoch": 0.8464, - "grad_norm": 43.18932342529297, - "learning_rate": 1.2817777777777778e-05, - "loss": 1.6753, - "step": 26450 - }, - { - "epoch": 0.84672, - "grad_norm": 43.83230209350586, - "learning_rate": 1.2814222222222223e-05, - "loss": 1.6618, - "step": 26460 - }, - { - "epoch": 0.84704, - "grad_norm": 45.577362060546875, - "learning_rate": 1.2810666666666667e-05, - "loss": 1.6194, - "step": 26470 - }, - { - "epoch": 0.84736, - "grad_norm": 46.04071807861328, - "learning_rate": 1.2807111111111112e-05, - "loss": 1.6443, - "step": 26480 - }, - { - "epoch": 0.84768, - "grad_norm": 44.70589828491211, - "learning_rate": 1.2803555555555557e-05, - "loss": 1.6251, - "step": 26490 - }, - { - "epoch": 0.848, - "grad_norm": 44.009803771972656, - "learning_rate": 1.2800000000000001e-05, - "loss": 1.6241, - "step": 26500 - }, - { - "epoch": 0.84832, - "grad_norm": 43.644805908203125, - "learning_rate": 1.2796444444444444e-05, - "loss": 1.6336, - "step": 26510 - }, - { - "epoch": 0.84864, - "grad_norm": 45.69171142578125, - "learning_rate": 1.279288888888889e-05, - "loss": 1.6607, - "step": 26520 - }, - { - "epoch": 0.84896, - "grad_norm": 43.19560241699219, - "learning_rate": 1.2789333333333333e-05, - "loss": 1.6603, - "step": 26530 - }, - { - "epoch": 0.84928, - "grad_norm": 44.87495422363281, - "learning_rate": 1.278577777777778e-05, - "loss": 1.6498, - "step": 26540 - }, - { - "epoch": 0.8496, - "grad_norm": 42.37331008911133, - "learning_rate": 1.2782222222222222e-05, - "loss": 1.6671, - "step": 26550 - }, - { - "epoch": 0.84992, - "grad_norm": 43.74976348876953, - "learning_rate": 1.2778666666666668e-05, - "loss": 1.6441, - "step": 26560 - }, - { - "epoch": 0.85024, - "grad_norm": 42.69112777709961, - "learning_rate": 1.2775111111111111e-05, - "loss": 1.6505, - "step": 26570 - }, - { - "epoch": 0.85056, - "grad_norm": 43.077308654785156, - "learning_rate": 1.2771555555555558e-05, - "loss": 1.6585, - "step": 26580 - }, - { - "epoch": 0.85088, - "grad_norm": 44.00758361816406, - "learning_rate": 1.2768e-05, - "loss": 1.6302, - "step": 26590 - }, - { - "epoch": 0.8512, - "grad_norm": 45.46588134765625, - "learning_rate": 1.2764444444444447e-05, - "loss": 1.6416, - "step": 26600 - }, - { - "epoch": 0.85152, - "grad_norm": 44.98797607421875, - "learning_rate": 1.276088888888889e-05, - "loss": 1.667, - "step": 26610 - }, - { - "epoch": 0.85184, - "grad_norm": 46.968017578125, - "learning_rate": 1.2757333333333334e-05, - "loss": 1.6459, - "step": 26620 - }, - { - "epoch": 0.85216, - "grad_norm": 43.3302001953125, - "learning_rate": 1.2753777777777779e-05, - "loss": 1.6834, - "step": 26630 - }, - { - "epoch": 0.85248, - "grad_norm": 45.77449417114258, - "learning_rate": 1.2750222222222223e-05, - "loss": 1.673, - "step": 26640 - }, - { - "epoch": 0.8528, - "grad_norm": 44.07681655883789, - "learning_rate": 1.2746666666666668e-05, - "loss": 1.6479, - "step": 26650 - }, - { - "epoch": 0.85312, - "grad_norm": 44.1185188293457, - "learning_rate": 1.2743111111111112e-05, - "loss": 1.6679, - "step": 26660 - }, - { - "epoch": 0.85344, - "grad_norm": 45.55058670043945, - "learning_rate": 1.2739555555555557e-05, - "loss": 1.6729, - "step": 26670 - }, - { - "epoch": 0.85376, - "grad_norm": 44.186737060546875, - "learning_rate": 1.2736000000000001e-05, - "loss": 1.659, - "step": 26680 - }, - { - "epoch": 0.85408, - "grad_norm": 43.83502197265625, - "learning_rate": 1.2732444444444444e-05, - "loss": 1.668, - "step": 26690 - }, - { - "epoch": 0.8544, - "grad_norm": 44.22084045410156, - "learning_rate": 1.272888888888889e-05, - "loss": 1.6527, - "step": 26700 - }, - { - "epoch": 0.85472, - "grad_norm": 46.551456451416016, - "learning_rate": 1.2725333333333333e-05, - "loss": 1.6546, - "step": 26710 - }, - { - "epoch": 0.85504, - "grad_norm": 45.00933074951172, - "learning_rate": 1.272177777777778e-05, - "loss": 1.624, - "step": 26720 - }, - { - "epoch": 0.85536, - "grad_norm": 43.585227966308594, - "learning_rate": 1.2718222222222222e-05, - "loss": 1.6377, - "step": 26730 - }, - { - "epoch": 0.85568, - "grad_norm": 44.3875846862793, - "learning_rate": 1.2714666666666669e-05, - "loss": 1.6469, - "step": 26740 - }, - { - "epoch": 0.856, - "grad_norm": 42.42379379272461, - "learning_rate": 1.2711111111111112e-05, - "loss": 1.628, - "step": 26750 - }, - { - "epoch": 0.85632, - "grad_norm": 43.73078918457031, - "learning_rate": 1.2707555555555558e-05, - "loss": 1.6491, - "step": 26760 - }, - { - "epoch": 0.85664, - "grad_norm": 42.43165969848633, - "learning_rate": 1.2704e-05, - "loss": 1.6496, - "step": 26770 - }, - { - "epoch": 0.85696, - "grad_norm": 45.21994400024414, - "learning_rate": 1.2700444444444447e-05, - "loss": 1.662, - "step": 26780 - }, - { - "epoch": 0.85728, - "grad_norm": 43.44044876098633, - "learning_rate": 1.269688888888889e-05, - "loss": 1.6649, - "step": 26790 - }, - { - "epoch": 0.8576, - "grad_norm": 45.463809967041016, - "learning_rate": 1.2693333333333336e-05, - "loss": 1.6547, - "step": 26800 - }, - { - "epoch": 0.85792, - "grad_norm": 47.156829833984375, - "learning_rate": 1.2689777777777779e-05, - "loss": 1.6499, - "step": 26810 - }, - { - "epoch": 0.85824, - "grad_norm": 44.708038330078125, - "learning_rate": 1.2686222222222223e-05, - "loss": 1.6574, - "step": 26820 - }, - { - "epoch": 0.85856, - "grad_norm": 45.35110855102539, - "learning_rate": 1.2682666666666668e-05, - "loss": 1.6368, - "step": 26830 - }, - { - "epoch": 0.85888, - "grad_norm": 43.904296875, - "learning_rate": 1.2679111111111113e-05, - "loss": 1.6353, - "step": 26840 - }, - { - "epoch": 0.8592, - "grad_norm": 46.68516159057617, - "learning_rate": 1.2675555555555557e-05, - "loss": 1.6486, - "step": 26850 - }, - { - "epoch": 0.85952, - "grad_norm": 44.25490951538086, - "learning_rate": 1.2672000000000002e-05, - "loss": 1.6474, - "step": 26860 - }, - { - "epoch": 0.85984, - "grad_norm": 43.81814193725586, - "learning_rate": 1.2668444444444446e-05, - "loss": 1.6599, - "step": 26870 - }, - { - "epoch": 0.86016, - "grad_norm": 44.363643646240234, - "learning_rate": 1.266488888888889e-05, - "loss": 1.6494, - "step": 26880 - }, - { - "epoch": 0.86048, - "grad_norm": 45.49546432495117, - "learning_rate": 1.2661333333333334e-05, - "loss": 1.6594, - "step": 26890 - }, - { - "epoch": 0.8608, - "grad_norm": 41.69033432006836, - "learning_rate": 1.2657777777777778e-05, - "loss": 1.6266, - "step": 26900 - }, - { - "epoch": 0.86112, - "grad_norm": 43.465553283691406, - "learning_rate": 1.2654222222222223e-05, - "loss": 1.6648, - "step": 26910 - }, - { - "epoch": 0.86144, - "grad_norm": 44.84959030151367, - "learning_rate": 1.2650666666666667e-05, - "loss": 1.6632, - "step": 26920 - }, - { - "epoch": 0.86176, - "grad_norm": 43.108482360839844, - "learning_rate": 1.2647111111111112e-05, - "loss": 1.668, - "step": 26930 - }, - { - "epoch": 0.86208, - "grad_norm": 42.777320861816406, - "learning_rate": 1.2643555555555556e-05, - "loss": 1.656, - "step": 26940 - }, - { - "epoch": 0.8624, - "grad_norm": 44.51447296142578, - "learning_rate": 1.2640000000000001e-05, - "loss": 1.6395, - "step": 26950 - }, - { - "epoch": 0.86272, - "grad_norm": 47.7524299621582, - "learning_rate": 1.2636444444444444e-05, - "loss": 1.6489, - "step": 26960 - }, - { - "epoch": 0.86304, - "grad_norm": 42.68575668334961, - "learning_rate": 1.263288888888889e-05, - "loss": 1.6581, - "step": 26970 - }, - { - "epoch": 0.86336, - "grad_norm": 44.79243087768555, - "learning_rate": 1.2629333333333333e-05, - "loss": 1.6275, - "step": 26980 - }, - { - "epoch": 0.86368, - "grad_norm": 45.371253967285156, - "learning_rate": 1.262577777777778e-05, - "loss": 1.6663, - "step": 26990 - }, - { - "epoch": 0.864, - "grad_norm": 45.898128509521484, - "learning_rate": 1.2622222222222222e-05, - "loss": 1.6425, - "step": 27000 - }, - { - "epoch": 0.86432, - "grad_norm": 42.76716613769531, - "learning_rate": 1.2618666666666668e-05, - "loss": 1.6436, - "step": 27010 - }, - { - "epoch": 0.86464, - "grad_norm": 45.81767272949219, - "learning_rate": 1.2615111111111111e-05, - "loss": 1.6599, - "step": 27020 - }, - { - "epoch": 0.86496, - "grad_norm": 41.93647384643555, - "learning_rate": 1.2611555555555557e-05, - "loss": 1.6526, - "step": 27030 - }, - { - "epoch": 0.86528, - "grad_norm": 43.41840744018555, - "learning_rate": 1.2608e-05, - "loss": 1.6721, - "step": 27040 - }, - { - "epoch": 0.8656, - "grad_norm": 44.652591705322266, - "learning_rate": 1.2604444444444446e-05, - "loss": 1.6321, - "step": 27050 - }, - { - "epoch": 0.86592, - "grad_norm": 43.17992401123047, - "learning_rate": 1.260088888888889e-05, - "loss": 1.659, - "step": 27060 - }, - { - "epoch": 0.86624, - "grad_norm": 45.41444778442383, - "learning_rate": 1.2597333333333334e-05, - "loss": 1.6524, - "step": 27070 - }, - { - "epoch": 0.86656, - "grad_norm": 44.880409240722656, - "learning_rate": 1.2593777777777778e-05, - "loss": 1.6639, - "step": 27080 - }, - { - "epoch": 0.86688, - "grad_norm": 42.21232986450195, - "learning_rate": 1.2590222222222223e-05, - "loss": 1.6627, - "step": 27090 - }, - { - "epoch": 0.8672, - "grad_norm": 46.4969482421875, - "learning_rate": 1.2586666666666668e-05, - "loss": 1.6546, - "step": 27100 - }, - { - "epoch": 0.86752, - "grad_norm": 42.779335021972656, - "learning_rate": 1.2583111111111112e-05, - "loss": 1.6112, - "step": 27110 - }, - { - "epoch": 0.86784, - "grad_norm": 44.317108154296875, - "learning_rate": 1.2579555555555557e-05, - "loss": 1.6417, - "step": 27120 - }, - { - "epoch": 0.86816, - "grad_norm": 44.933109283447266, - "learning_rate": 1.2576000000000001e-05, - "loss": 1.6624, - "step": 27130 - }, - { - "epoch": 0.86848, - "grad_norm": 43.719783782958984, - "learning_rate": 1.2572444444444444e-05, - "loss": 1.6601, - "step": 27140 - }, - { - "epoch": 0.8688, - "grad_norm": 44.277103424072266, - "learning_rate": 1.256888888888889e-05, - "loss": 1.6492, - "step": 27150 - }, - { - "epoch": 0.86912, - "grad_norm": 43.31966781616211, - "learning_rate": 1.2565333333333333e-05, - "loss": 1.626, - "step": 27160 - }, - { - "epoch": 0.86944, - "grad_norm": 43.45155715942383, - "learning_rate": 1.256177777777778e-05, - "loss": 1.6536, - "step": 27170 - }, - { - "epoch": 0.86976, - "grad_norm": 43.08216094970703, - "learning_rate": 1.2558222222222222e-05, - "loss": 1.6415, - "step": 27180 - }, - { - "epoch": 0.87008, - "grad_norm": 44.816810607910156, - "learning_rate": 1.2554666666666669e-05, - "loss": 1.6711, - "step": 27190 - }, - { - "epoch": 0.8704, - "grad_norm": 44.163211822509766, - "learning_rate": 1.2551111111111111e-05, - "loss": 1.6593, - "step": 27200 - }, - { - "epoch": 0.87072, - "grad_norm": 47.842308044433594, - "learning_rate": 1.2547555555555558e-05, - "loss": 1.6535, - "step": 27210 - }, - { - "epoch": 0.87104, - "grad_norm": 43.351219177246094, - "learning_rate": 1.2544e-05, - "loss": 1.6299, - "step": 27220 - }, - { - "epoch": 0.87136, - "grad_norm": 44.729652404785156, - "learning_rate": 1.2540444444444447e-05, - "loss": 1.658, - "step": 27230 - }, - { - "epoch": 0.87168, - "grad_norm": 42.45663070678711, - "learning_rate": 1.253688888888889e-05, - "loss": 1.6354, - "step": 27240 - }, - { - "epoch": 0.872, - "grad_norm": 44.611331939697266, - "learning_rate": 1.2533333333333336e-05, - "loss": 1.6364, - "step": 27250 - }, - { - "epoch": 0.87232, - "grad_norm": 43.72536849975586, - "learning_rate": 1.2529777777777779e-05, - "loss": 1.6597, - "step": 27260 - }, - { - "epoch": 0.87264, - "grad_norm": 44.238258361816406, - "learning_rate": 1.2526222222222223e-05, - "loss": 1.6638, - "step": 27270 - }, - { - "epoch": 0.87296, - "grad_norm": 44.001956939697266, - "learning_rate": 1.2522666666666668e-05, - "loss": 1.6426, - "step": 27280 - }, - { - "epoch": 0.87328, - "grad_norm": 49.65640640258789, - "learning_rate": 1.2519111111111112e-05, - "loss": 1.6433, - "step": 27290 - }, - { - "epoch": 0.8736, - "grad_norm": 46.28944778442383, - "learning_rate": 1.2515555555555557e-05, - "loss": 1.662, - "step": 27300 - }, - { - "epoch": 0.87392, - "grad_norm": 44.71233367919922, - "learning_rate": 1.2512000000000002e-05, - "loss": 1.658, - "step": 27310 - }, - { - "epoch": 0.87424, - "grad_norm": 43.2108039855957, - "learning_rate": 1.2508444444444446e-05, - "loss": 1.6548, - "step": 27320 - }, - { - "epoch": 0.87456, - "grad_norm": 42.10308837890625, - "learning_rate": 1.250488888888889e-05, - "loss": 1.6557, - "step": 27330 - }, - { - "epoch": 0.87488, - "grad_norm": 46.05120849609375, - "learning_rate": 1.2501333333333333e-05, - "loss": 1.6751, - "step": 27340 - }, - { - "epoch": 0.8752, - "grad_norm": 42.84408950805664, - "learning_rate": 1.249777777777778e-05, - "loss": 1.6356, - "step": 27350 - }, - { - "epoch": 0.87552, - "grad_norm": 45.129425048828125, - "learning_rate": 1.2494222222222223e-05, - "loss": 1.6177, - "step": 27360 - }, - { - "epoch": 0.87584, - "grad_norm": 46.16420364379883, - "learning_rate": 1.2490666666666669e-05, - "loss": 1.6488, - "step": 27370 - }, - { - "epoch": 0.87616, - "grad_norm": 43.65485763549805, - "learning_rate": 1.2487111111111112e-05, - "loss": 1.6263, - "step": 27380 - }, - { - "epoch": 0.87648, - "grad_norm": 42.65005111694336, - "learning_rate": 1.2483555555555558e-05, - "loss": 1.6233, - "step": 27390 - }, - { - "epoch": 0.8768, - "grad_norm": 43.636959075927734, - "learning_rate": 1.248e-05, - "loss": 1.6207, - "step": 27400 - }, - { - "epoch": 0.87712, - "grad_norm": 43.3430290222168, - "learning_rate": 1.2476444444444444e-05, - "loss": 1.6531, - "step": 27410 - }, - { - "epoch": 0.87744, - "grad_norm": 44.20934295654297, - "learning_rate": 1.247288888888889e-05, - "loss": 1.6367, - "step": 27420 - }, - { - "epoch": 0.87776, - "grad_norm": 45.244789123535156, - "learning_rate": 1.2469333333333333e-05, - "loss": 1.6685, - "step": 27430 - }, - { - "epoch": 0.87808, - "grad_norm": 44.35482406616211, - "learning_rate": 1.2465777777777779e-05, - "loss": 1.645, - "step": 27440 - }, - { - "epoch": 0.8784, - "grad_norm": 46.52604675292969, - "learning_rate": 1.2462222222222222e-05, - "loss": 1.6398, - "step": 27450 - }, - { - "epoch": 0.87872, - "grad_norm": 44.6185302734375, - "learning_rate": 1.2458666666666668e-05, - "loss": 1.6597, - "step": 27460 - }, - { - "epoch": 0.87904, - "grad_norm": 42.396846771240234, - "learning_rate": 1.2455111111111111e-05, - "loss": 1.6788, - "step": 27470 - }, - { - "epoch": 0.87936, - "grad_norm": 45.505287170410156, - "learning_rate": 1.2451555555555557e-05, - "loss": 1.618, - "step": 27480 - }, - { - "epoch": 0.87968, - "grad_norm": 43.372291564941406, - "learning_rate": 1.2448e-05, - "loss": 1.6508, - "step": 27490 - }, - { - "epoch": 0.88, - "grad_norm": 42.66139221191406, - "learning_rate": 1.2444444444444446e-05, - "loss": 1.6607, - "step": 27500 - }, - { - "epoch": 0.88032, - "grad_norm": 44.73249435424805, - "learning_rate": 1.244088888888889e-05, - "loss": 1.6458, - "step": 27510 - }, - { - "epoch": 0.88064, - "grad_norm": 44.99702072143555, - "learning_rate": 1.2437333333333335e-05, - "loss": 1.6289, - "step": 27520 - }, - { - "epoch": 0.88096, - "grad_norm": 42.213645935058594, - "learning_rate": 1.2433777777777778e-05, - "loss": 1.6171, - "step": 27530 - }, - { - "epoch": 0.88128, - "grad_norm": 44.95713806152344, - "learning_rate": 1.2430222222222223e-05, - "loss": 1.6766, - "step": 27540 - }, - { - "epoch": 0.8816, - "grad_norm": 44.07640075683594, - "learning_rate": 1.2426666666666667e-05, - "loss": 1.6444, - "step": 27550 - }, - { - "epoch": 0.88192, - "grad_norm": 42.04390335083008, - "learning_rate": 1.2423111111111112e-05, - "loss": 1.6551, - "step": 27560 - }, - { - "epoch": 0.88224, - "grad_norm": 44.98395919799805, - "learning_rate": 1.2419555555555557e-05, - "loss": 1.6356, - "step": 27570 - }, - { - "epoch": 0.88256, - "grad_norm": 46.020111083984375, - "learning_rate": 1.2416000000000001e-05, - "loss": 1.6309, - "step": 27580 - }, - { - "epoch": 0.88288, - "grad_norm": 42.20390701293945, - "learning_rate": 1.2412444444444444e-05, - "loss": 1.6362, - "step": 27590 - }, - { - "epoch": 0.8832, - "grad_norm": 44.44026565551758, - "learning_rate": 1.240888888888889e-05, - "loss": 1.6688, - "step": 27600 - }, - { - "epoch": 0.88352, - "grad_norm": 45.78468322753906, - "learning_rate": 1.2405333333333333e-05, - "loss": 1.6402, - "step": 27610 - }, - { - "epoch": 0.88384, - "grad_norm": 43.677391052246094, - "learning_rate": 1.240177777777778e-05, - "loss": 1.6265, - "step": 27620 - }, - { - "epoch": 0.88416, - "grad_norm": 43.043800354003906, - "learning_rate": 1.2398222222222222e-05, - "loss": 1.6182, - "step": 27630 - }, - { - "epoch": 0.88448, - "grad_norm": 44.14324951171875, - "learning_rate": 1.2394666666666668e-05, - "loss": 1.6267, - "step": 27640 - }, - { - "epoch": 0.8848, - "grad_norm": 44.18344497680664, - "learning_rate": 1.2391111111111111e-05, - "loss": 1.6391, - "step": 27650 - }, - { - "epoch": 0.88512, - "grad_norm": 43.16364288330078, - "learning_rate": 1.2387555555555557e-05, - "loss": 1.6655, - "step": 27660 - }, - { - "epoch": 0.88544, - "grad_norm": 46.12438201904297, - "learning_rate": 1.2384e-05, - "loss": 1.6428, - "step": 27670 - }, - { - "epoch": 0.88576, - "grad_norm": 45.85373306274414, - "learning_rate": 1.2380444444444447e-05, - "loss": 1.6736, - "step": 27680 - }, - { - "epoch": 0.88608, - "grad_norm": 41.845664978027344, - "learning_rate": 1.237688888888889e-05, - "loss": 1.6536, - "step": 27690 - }, - { - "epoch": 0.8864, - "grad_norm": 43.84564208984375, - "learning_rate": 1.2373333333333336e-05, - "loss": 1.632, - "step": 27700 - }, - { - "epoch": 0.88672, - "grad_norm": 45.331031799316406, - "learning_rate": 1.2369777777777779e-05, - "loss": 1.6414, - "step": 27710 - }, - { - "epoch": 0.88704, - "grad_norm": 46.10980987548828, - "learning_rate": 1.2366222222222223e-05, - "loss": 1.6285, - "step": 27720 - }, - { - "epoch": 0.88736, - "grad_norm": 43.144920349121094, - "learning_rate": 1.2362666666666668e-05, - "loss": 1.6452, - "step": 27730 - }, - { - "epoch": 0.88768, - "grad_norm": 45.01664352416992, - "learning_rate": 1.2359111111111112e-05, - "loss": 1.652, - "step": 27740 - }, - { - "epoch": 0.888, - "grad_norm": 44.60953140258789, - "learning_rate": 1.2355555555555557e-05, - "loss": 1.6461, - "step": 27750 - }, - { - "epoch": 0.88832, - "grad_norm": 43.47231674194336, - "learning_rate": 1.2352000000000001e-05, - "loss": 1.6385, - "step": 27760 - }, - { - "epoch": 0.88864, - "grad_norm": 43.14651107788086, - "learning_rate": 1.2348444444444446e-05, - "loss": 1.6667, - "step": 27770 - }, - { - "epoch": 0.88896, - "grad_norm": 44.596031188964844, - "learning_rate": 1.234488888888889e-05, - "loss": 1.6606, - "step": 27780 - }, - { - "epoch": 0.88928, - "grad_norm": 43.93633270263672, - "learning_rate": 1.2341333333333333e-05, - "loss": 1.6703, - "step": 27790 - }, - { - "epoch": 0.8896, - "grad_norm": 45.12252426147461, - "learning_rate": 1.233777777777778e-05, - "loss": 1.6845, - "step": 27800 - }, - { - "epoch": 0.88992, - "grad_norm": 42.742916107177734, - "learning_rate": 1.2334222222222222e-05, - "loss": 1.6519, - "step": 27810 - }, - { - "epoch": 0.89024, - "grad_norm": 42.931983947753906, - "learning_rate": 1.2330666666666669e-05, - "loss": 1.6157, - "step": 27820 - }, - { - "epoch": 0.89056, - "grad_norm": 45.96379852294922, - "learning_rate": 1.2327111111111112e-05, - "loss": 1.659, - "step": 27830 - }, - { - "epoch": 0.89088, - "grad_norm": 43.690372467041016, - "learning_rate": 1.2323555555555558e-05, - "loss": 1.6571, - "step": 27840 - }, - { - "epoch": 0.8912, - "grad_norm": 43.9526252746582, - "learning_rate": 1.232e-05, - "loss": 1.6165, - "step": 27850 - }, - { - "epoch": 0.89152, - "grad_norm": 44.779090881347656, - "learning_rate": 1.2316444444444447e-05, - "loss": 1.6393, - "step": 27860 - }, - { - "epoch": 0.89184, - "grad_norm": 44.47468185424805, - "learning_rate": 1.231288888888889e-05, - "loss": 1.6405, - "step": 27870 - }, - { - "epoch": 0.89216, - "grad_norm": 43.439083099365234, - "learning_rate": 1.2309333333333336e-05, - "loss": 1.6563, - "step": 27880 - }, - { - "epoch": 0.89248, - "grad_norm": 43.92070770263672, - "learning_rate": 1.2305777777777779e-05, - "loss": 1.6624, - "step": 27890 - }, - { - "epoch": 0.8928, - "grad_norm": 44.40715408325195, - "learning_rate": 1.2302222222222225e-05, - "loss": 1.6531, - "step": 27900 - }, - { - "epoch": 0.89312, - "grad_norm": 44.94832229614258, - "learning_rate": 1.2298666666666668e-05, - "loss": 1.6473, - "step": 27910 - }, - { - "epoch": 0.89344, - "grad_norm": 44.329341888427734, - "learning_rate": 1.2295111111111113e-05, - "loss": 1.6501, - "step": 27920 - }, - { - "epoch": 0.89376, - "grad_norm": 44.398345947265625, - "learning_rate": 1.2291555555555557e-05, - "loss": 1.6479, - "step": 27930 - }, - { - "epoch": 0.89408, - "grad_norm": 42.575931549072266, - "learning_rate": 1.2288e-05, - "loss": 1.6516, - "step": 27940 - }, - { - "epoch": 0.8944, - "grad_norm": 46.583187103271484, - "learning_rate": 1.2284444444444446e-05, - "loss": 1.6903, - "step": 27950 - }, - { - "epoch": 0.89472, - "grad_norm": 44.53683853149414, - "learning_rate": 1.2280888888888889e-05, - "loss": 1.6201, - "step": 27960 - }, - { - "epoch": 0.89504, - "grad_norm": 47.129783630371094, - "learning_rate": 1.2277333333333335e-05, - "loss": 1.6381, - "step": 27970 - }, - { - "epoch": 0.89536, - "grad_norm": 44.966064453125, - "learning_rate": 1.2273777777777778e-05, - "loss": 1.6346, - "step": 27980 - }, - { - "epoch": 0.89568, - "grad_norm": 43.610904693603516, - "learning_rate": 1.2270222222222223e-05, - "loss": 1.6691, - "step": 27990 - }, - { - "epoch": 0.896, - "grad_norm": 44.72496032714844, - "learning_rate": 1.2266666666666667e-05, - "loss": 1.6484, - "step": 28000 - }, - { - "epoch": 0.89632, - "grad_norm": 43.81786346435547, - "learning_rate": 1.2263111111111112e-05, - "loss": 1.6518, - "step": 28010 - }, - { - "epoch": 0.89664, - "grad_norm": 41.949825286865234, - "learning_rate": 1.2259555555555556e-05, - "loss": 1.6408, - "step": 28020 - }, - { - "epoch": 0.89696, - "grad_norm": 45.50816345214844, - "learning_rate": 1.2256000000000001e-05, - "loss": 1.6762, - "step": 28030 - }, - { - "epoch": 0.89728, - "grad_norm": 44.04999923706055, - "learning_rate": 1.2252444444444444e-05, - "loss": 1.6387, - "step": 28040 - }, - { - "epoch": 0.8976, - "grad_norm": 43.951969146728516, - "learning_rate": 1.224888888888889e-05, - "loss": 1.6345, - "step": 28050 - }, - { - "epoch": 0.89792, - "grad_norm": 44.5511360168457, - "learning_rate": 1.2245333333333333e-05, - "loss": 1.5975, - "step": 28060 - }, - { - "epoch": 0.89824, - "grad_norm": 43.500633239746094, - "learning_rate": 1.2241777777777779e-05, - "loss": 1.6463, - "step": 28070 - }, - { - "epoch": 0.89856, - "grad_norm": 44.43678665161133, - "learning_rate": 1.2238222222222222e-05, - "loss": 1.6813, - "step": 28080 - }, - { - "epoch": 0.89888, - "grad_norm": 43.428714752197266, - "learning_rate": 1.2234666666666668e-05, - "loss": 1.6547, - "step": 28090 - }, - { - "epoch": 0.8992, - "grad_norm": 43.72435760498047, - "learning_rate": 1.2231111111111111e-05, - "loss": 1.621, - "step": 28100 - }, - { - "epoch": 0.89952, - "grad_norm": 43.099552154541016, - "learning_rate": 1.2227555555555557e-05, - "loss": 1.6399, - "step": 28110 - }, - { - "epoch": 0.89984, - "grad_norm": 42.11380386352539, - "learning_rate": 1.2224e-05, - "loss": 1.6225, - "step": 28120 - }, - { - "epoch": 0.90016, - "grad_norm": 42.5230827331543, - "learning_rate": 1.2220444444444446e-05, - "loss": 1.6506, - "step": 28130 - }, - { - "epoch": 0.90048, - "grad_norm": 44.5308952331543, - "learning_rate": 1.221688888888889e-05, - "loss": 1.6425, - "step": 28140 - }, - { - "epoch": 0.9008, - "grad_norm": 47.679595947265625, - "learning_rate": 1.2213333333333336e-05, - "loss": 1.6505, - "step": 28150 - }, - { - "epoch": 0.90112, - "grad_norm": 42.81779861450195, - "learning_rate": 1.2209777777777778e-05, - "loss": 1.6382, - "step": 28160 - }, - { - "epoch": 0.90144, - "grad_norm": 43.218013763427734, - "learning_rate": 1.2206222222222223e-05, - "loss": 1.6254, - "step": 28170 - }, - { - "epoch": 0.90176, - "grad_norm": 44.04819869995117, - "learning_rate": 1.2202666666666668e-05, - "loss": 1.6533, - "step": 28180 - }, - { - "epoch": 0.90208, - "grad_norm": 45.29868698120117, - "learning_rate": 1.2199111111111112e-05, - "loss": 1.6509, - "step": 28190 - }, - { - "epoch": 0.9024, - "grad_norm": 42.42465591430664, - "learning_rate": 1.2195555555555557e-05, - "loss": 1.6312, - "step": 28200 - }, - { - "epoch": 0.90272, - "grad_norm": 44.77098846435547, - "learning_rate": 1.2192000000000001e-05, - "loss": 1.6445, - "step": 28210 - }, - { - "epoch": 0.90304, - "grad_norm": 43.591880798339844, - "learning_rate": 1.2188444444444446e-05, - "loss": 1.6475, - "step": 28220 - }, - { - "epoch": 0.90336, - "grad_norm": 42.93756103515625, - "learning_rate": 1.218488888888889e-05, - "loss": 1.6411, - "step": 28230 - }, - { - "epoch": 0.90368, - "grad_norm": 45.212730407714844, - "learning_rate": 1.2181333333333333e-05, - "loss": 1.6137, - "step": 28240 - }, - { - "epoch": 0.904, - "grad_norm": 44.45610809326172, - "learning_rate": 1.217777777777778e-05, - "loss": 1.6571, - "step": 28250 - }, - { - "epoch": 0.90432, - "grad_norm": 43.78285217285156, - "learning_rate": 1.2174222222222222e-05, - "loss": 1.6473, - "step": 28260 - }, - { - "epoch": 0.90464, - "grad_norm": 44.92477035522461, - "learning_rate": 1.2170666666666668e-05, - "loss": 1.6356, - "step": 28270 - }, - { - "epoch": 0.90496, - "grad_norm": 44.4495849609375, - "learning_rate": 1.2167111111111111e-05, - "loss": 1.6697, - "step": 28280 - }, - { - "epoch": 0.90528, - "grad_norm": 43.52287292480469, - "learning_rate": 1.2163555555555558e-05, - "loss": 1.6756, - "step": 28290 - }, - { - "epoch": 0.9056, - "grad_norm": 45.75019454956055, - "learning_rate": 1.216e-05, - "loss": 1.6648, - "step": 28300 - }, - { - "epoch": 0.90592, - "grad_norm": 42.70742416381836, - "learning_rate": 1.2156444444444447e-05, - "loss": 1.6568, - "step": 28310 - }, - { - "epoch": 0.90624, - "grad_norm": 46.65618896484375, - "learning_rate": 1.215288888888889e-05, - "loss": 1.6157, - "step": 28320 - }, - { - "epoch": 0.90656, - "grad_norm": 44.10770797729492, - "learning_rate": 1.2149333333333336e-05, - "loss": 1.6449, - "step": 28330 - }, - { - "epoch": 0.90688, - "grad_norm": 44.807430267333984, - "learning_rate": 1.2145777777777779e-05, - "loss": 1.6285, - "step": 28340 - }, - { - "epoch": 0.9072, - "grad_norm": 46.65182113647461, - "learning_rate": 1.2142222222222225e-05, - "loss": 1.617, - "step": 28350 - }, - { - "epoch": 0.90752, - "grad_norm": 42.64336013793945, - "learning_rate": 1.2138666666666668e-05, - "loss": 1.6355, - "step": 28360 - }, - { - "epoch": 0.90784, - "grad_norm": 43.93117904663086, - "learning_rate": 1.2135111111111112e-05, - "loss": 1.648, - "step": 28370 - }, - { - "epoch": 0.90816, - "grad_norm": 45.63738250732422, - "learning_rate": 1.2131555555555557e-05, - "loss": 1.6653, - "step": 28380 - }, - { - "epoch": 0.90848, - "grad_norm": 45.124847412109375, - "learning_rate": 1.2128000000000001e-05, - "loss": 1.6288, - "step": 28390 - }, - { - "epoch": 0.9088, - "grad_norm": 43.86760330200195, - "learning_rate": 1.2124444444444446e-05, - "loss": 1.6478, - "step": 28400 - }, - { - "epoch": 0.90912, - "grad_norm": 43.364009857177734, - "learning_rate": 1.212088888888889e-05, - "loss": 1.6479, - "step": 28410 - }, - { - "epoch": 0.90944, - "grad_norm": 45.14009475708008, - "learning_rate": 1.2117333333333335e-05, - "loss": 1.6297, - "step": 28420 - }, - { - "epoch": 0.90976, - "grad_norm": 43.5945930480957, - "learning_rate": 1.211377777777778e-05, - "loss": 1.6466, - "step": 28430 - }, - { - "epoch": 0.91008, - "grad_norm": 43.39710998535156, - "learning_rate": 1.2110222222222223e-05, - "loss": 1.6827, - "step": 28440 - }, - { - "epoch": 0.9104, - "grad_norm": 43.625247955322266, - "learning_rate": 1.2106666666666667e-05, - "loss": 1.6457, - "step": 28450 - }, - { - "epoch": 0.91072, - "grad_norm": 43.71607971191406, - "learning_rate": 1.2103111111111112e-05, - "loss": 1.6719, - "step": 28460 - }, - { - "epoch": 0.91104, - "grad_norm": 43.85552215576172, - "learning_rate": 1.2099555555555556e-05, - "loss": 1.666, - "step": 28470 - }, - { - "epoch": 0.91136, - "grad_norm": 45.796871185302734, - "learning_rate": 1.2096e-05, - "loss": 1.6526, - "step": 28480 - }, - { - "epoch": 0.91168, - "grad_norm": 41.46237564086914, - "learning_rate": 1.2092444444444444e-05, - "loss": 1.6589, - "step": 28490 - }, - { - "epoch": 0.912, - "grad_norm": 46.35646438598633, - "learning_rate": 1.208888888888889e-05, - "loss": 1.6578, - "step": 28500 - }, - { - "epoch": 0.91232, - "grad_norm": 44.154850006103516, - "learning_rate": 1.2085333333333333e-05, - "loss": 1.622, - "step": 28510 - }, - { - "epoch": 0.91264, - "grad_norm": 45.82959747314453, - "learning_rate": 1.2081777777777779e-05, - "loss": 1.6247, - "step": 28520 - }, - { - "epoch": 0.91296, - "grad_norm": 44.784549713134766, - "learning_rate": 1.2078222222222222e-05, - "loss": 1.6267, - "step": 28530 - }, - { - "epoch": 0.91328, - "grad_norm": 98.08938598632812, - "learning_rate": 1.2074666666666668e-05, - "loss": 1.6385, - "step": 28540 - }, - { - "epoch": 0.9136, - "grad_norm": 45.04857635498047, - "learning_rate": 1.2071111111111111e-05, - "loss": 1.6629, - "step": 28550 - }, - { - "epoch": 0.91392, - "grad_norm": 45.639442443847656, - "learning_rate": 1.2067555555555557e-05, - "loss": 1.6443, - "step": 28560 - }, - { - "epoch": 0.91424, - "grad_norm": 44.02001190185547, - "learning_rate": 1.2064e-05, - "loss": 1.6545, - "step": 28570 - }, - { - "epoch": 0.91456, - "grad_norm": 44.048583984375, - "learning_rate": 1.2060444444444446e-05, - "loss": 1.6316, - "step": 28580 - }, - { - "epoch": 0.91488, - "grad_norm": 43.934024810791016, - "learning_rate": 1.2056888888888889e-05, - "loss": 1.6516, - "step": 28590 - }, - { - "epoch": 0.9152, - "grad_norm": 43.88330841064453, - "learning_rate": 1.2053333333333335e-05, - "loss": 1.6364, - "step": 28600 - }, - { - "epoch": 0.91552, - "grad_norm": 44.25266647338867, - "learning_rate": 1.2049777777777778e-05, - "loss": 1.6422, - "step": 28610 - }, - { - "epoch": 0.91584, - "grad_norm": 43.72382736206055, - "learning_rate": 1.2046222222222223e-05, - "loss": 1.6472, - "step": 28620 - }, - { - "epoch": 0.91616, - "grad_norm": 44.77427291870117, - "learning_rate": 1.2042666666666667e-05, - "loss": 1.6509, - "step": 28630 - }, - { - "epoch": 0.91648, - "grad_norm": 47.40642547607422, - "learning_rate": 1.2039111111111112e-05, - "loss": 1.6523, - "step": 28640 - }, - { - "epoch": 0.9168, - "grad_norm": 43.53240966796875, - "learning_rate": 1.2035555555555556e-05, - "loss": 1.6204, - "step": 28650 - }, - { - "epoch": 0.91712, - "grad_norm": 42.13488006591797, - "learning_rate": 1.2032000000000001e-05, - "loss": 1.6457, - "step": 28660 - }, - { - "epoch": 0.91744, - "grad_norm": 46.60955047607422, - "learning_rate": 1.2028444444444446e-05, - "loss": 1.6273, - "step": 28670 - }, - { - "epoch": 0.91776, - "grad_norm": 45.47731399536133, - "learning_rate": 1.202488888888889e-05, - "loss": 1.6316, - "step": 28680 - }, - { - "epoch": 0.91808, - "grad_norm": 43.740535736083984, - "learning_rate": 1.2021333333333333e-05, - "loss": 1.6317, - "step": 28690 - }, - { - "epoch": 0.9184, - "grad_norm": 44.68436050415039, - "learning_rate": 1.201777777777778e-05, - "loss": 1.6506, - "step": 28700 - }, - { - "epoch": 0.91872, - "grad_norm": 44.82673263549805, - "learning_rate": 1.2014222222222222e-05, - "loss": 1.6467, - "step": 28710 - }, - { - "epoch": 0.91904, - "grad_norm": 41.92259979248047, - "learning_rate": 1.2010666666666668e-05, - "loss": 1.6017, - "step": 28720 - }, - { - "epoch": 0.91936, - "grad_norm": 44.171173095703125, - "learning_rate": 1.2007111111111111e-05, - "loss": 1.6213, - "step": 28730 - }, - { - "epoch": 0.91968, - "grad_norm": 45.21971130371094, - "learning_rate": 1.2003555555555557e-05, - "loss": 1.6214, - "step": 28740 - }, - { - "epoch": 0.92, - "grad_norm": 44.320556640625, - "learning_rate": 1.2e-05, - "loss": 1.6314, - "step": 28750 - }, - { - "epoch": 0.92032, - "grad_norm": 42.63776779174805, - "learning_rate": 1.1996444444444447e-05, - "loss": 1.6227, - "step": 28760 - }, - { - "epoch": 0.92064, - "grad_norm": 43.34054183959961, - "learning_rate": 1.199288888888889e-05, - "loss": 1.6382, - "step": 28770 - }, - { - "epoch": 0.92096, - "grad_norm": 43.49864196777344, - "learning_rate": 1.1989333333333336e-05, - "loss": 1.6106, - "step": 28780 - }, - { - "epoch": 0.92128, - "grad_norm": 43.339412689208984, - "learning_rate": 1.1985777777777779e-05, - "loss": 1.6352, - "step": 28790 - }, - { - "epoch": 0.9216, - "grad_norm": 44.452125549316406, - "learning_rate": 1.1982222222222225e-05, - "loss": 1.6247, - "step": 28800 - }, - { - "epoch": 0.92192, - "grad_norm": 44.37428283691406, - "learning_rate": 1.1978666666666668e-05, - "loss": 1.6457, - "step": 28810 - }, - { - "epoch": 0.92224, - "grad_norm": 44.22825622558594, - "learning_rate": 1.1975111111111112e-05, - "loss": 1.6493, - "step": 28820 - }, - { - "epoch": 0.92256, - "grad_norm": 43.31019592285156, - "learning_rate": 1.1971555555555557e-05, - "loss": 1.6401, - "step": 28830 - }, - { - "epoch": 0.92288, - "grad_norm": 44.81446075439453, - "learning_rate": 1.1968000000000001e-05, - "loss": 1.6376, - "step": 28840 - }, - { - "epoch": 0.9232, - "grad_norm": 44.13412857055664, - "learning_rate": 1.1964444444444446e-05, - "loss": 1.6413, - "step": 28850 - }, - { - "epoch": 0.92352, - "grad_norm": 44.80860900878906, - "learning_rate": 1.196088888888889e-05, - "loss": 1.6442, - "step": 28860 - }, - { - "epoch": 0.92384, - "grad_norm": 43.5064582824707, - "learning_rate": 1.1957333333333335e-05, - "loss": 1.6279, - "step": 28870 - }, - { - "epoch": 0.92416, - "grad_norm": 42.37964630126953, - "learning_rate": 1.195377777777778e-05, - "loss": 1.6716, - "step": 28880 - }, - { - "epoch": 0.92448, - "grad_norm": 43.18762969970703, - "learning_rate": 1.1950222222222222e-05, - "loss": 1.6494, - "step": 28890 - }, - { - "epoch": 0.9248, - "grad_norm": 44.32823181152344, - "learning_rate": 1.1946666666666669e-05, - "loss": 1.6419, - "step": 28900 - }, - { - "epoch": 0.92512, - "grad_norm": 43.62746810913086, - "learning_rate": 1.1943111111111111e-05, - "loss": 1.6703, - "step": 28910 - }, - { - "epoch": 0.92544, - "grad_norm": 45.503726959228516, - "learning_rate": 1.1939555555555558e-05, - "loss": 1.6358, - "step": 28920 - }, - { - "epoch": 0.92576, - "grad_norm": 43.485008239746094, - "learning_rate": 1.1936e-05, - "loss": 1.6333, - "step": 28930 - }, - { - "epoch": 0.92608, - "grad_norm": 41.778106689453125, - "learning_rate": 1.1932444444444447e-05, - "loss": 1.6323, - "step": 28940 - }, - { - "epoch": 0.9264, - "grad_norm": 43.94820785522461, - "learning_rate": 1.192888888888889e-05, - "loss": 1.6447, - "step": 28950 - }, - { - "epoch": 0.92672, - "grad_norm": 43.780738830566406, - "learning_rate": 1.1925333333333333e-05, - "loss": 1.625, - "step": 28960 - }, - { - "epoch": 0.92704, - "grad_norm": 44.629703521728516, - "learning_rate": 1.1921777777777779e-05, - "loss": 1.6408, - "step": 28970 - }, - { - "epoch": 0.92736, - "grad_norm": 41.52702713012695, - "learning_rate": 1.1918222222222222e-05, - "loss": 1.6233, - "step": 28980 - }, - { - "epoch": 0.92768, - "grad_norm": 43.539798736572266, - "learning_rate": 1.1914666666666668e-05, - "loss": 1.625, - "step": 28990 - }, - { - "epoch": 0.928, - "grad_norm": 43.91656494140625, - "learning_rate": 1.191111111111111e-05, - "loss": 1.6765, - "step": 29000 - }, - { - "epoch": 0.92832, - "grad_norm": 45.65538024902344, - "learning_rate": 1.1907555555555557e-05, - "loss": 1.6276, - "step": 29010 - }, - { - "epoch": 0.92864, - "grad_norm": 42.96657180786133, - "learning_rate": 1.1904e-05, - "loss": 1.6342, - "step": 29020 - }, - { - "epoch": 0.92896, - "grad_norm": 43.52238464355469, - "learning_rate": 1.1900444444444446e-05, - "loss": 1.6234, - "step": 29030 - }, - { - "epoch": 0.92928, - "grad_norm": 44.23517608642578, - "learning_rate": 1.1896888888888889e-05, - "loss": 1.6552, - "step": 29040 - }, - { - "epoch": 0.9296, - "grad_norm": 43.24574661254883, - "learning_rate": 1.1893333333333335e-05, - "loss": 1.6266, - "step": 29050 - }, - { - "epoch": 0.92992, - "grad_norm": 46.02854919433594, - "learning_rate": 1.1889777777777778e-05, - "loss": 1.6376, - "step": 29060 - }, - { - "epoch": 0.93024, - "grad_norm": 44.43922424316406, - "learning_rate": 1.1886222222222223e-05, - "loss": 1.6347, - "step": 29070 - }, - { - "epoch": 0.93056, - "grad_norm": 42.341453552246094, - "learning_rate": 1.1882666666666667e-05, - "loss": 1.6311, - "step": 29080 - }, - { - "epoch": 0.93088, - "grad_norm": 43.742149353027344, - "learning_rate": 1.1879111111111112e-05, - "loss": 1.5966, - "step": 29090 - }, - { - "epoch": 0.9312, - "grad_norm": 45.52425765991211, - "learning_rate": 1.1875555555555556e-05, - "loss": 1.6459, - "step": 29100 - }, - { - "epoch": 0.93152, - "grad_norm": 42.77261734008789, - "learning_rate": 1.1872000000000001e-05, - "loss": 1.5989, - "step": 29110 - }, - { - "epoch": 0.93184, - "grad_norm": 43.86947250366211, - "learning_rate": 1.1868444444444445e-05, - "loss": 1.6352, - "step": 29120 - }, - { - "epoch": 0.93216, - "grad_norm": 43.85417175292969, - "learning_rate": 1.186488888888889e-05, - "loss": 1.6306, - "step": 29130 - }, - { - "epoch": 0.93248, - "grad_norm": 46.29254150390625, - "learning_rate": 1.1861333333333333e-05, - "loss": 1.6471, - "step": 29140 - }, - { - "epoch": 0.9328, - "grad_norm": 45.65533447265625, - "learning_rate": 1.1857777777777779e-05, - "loss": 1.6235, - "step": 29150 - }, - { - "epoch": 0.93312, - "grad_norm": 42.154380798339844, - "learning_rate": 1.1854222222222222e-05, - "loss": 1.6275, - "step": 29160 - }, - { - "epoch": 0.93344, - "grad_norm": 43.51826858520508, - "learning_rate": 1.1850666666666668e-05, - "loss": 1.6388, - "step": 29170 - }, - { - "epoch": 0.93376, - "grad_norm": 45.132057189941406, - "learning_rate": 1.1847111111111111e-05, - "loss": 1.6193, - "step": 29180 - }, - { - "epoch": 0.93408, - "grad_norm": 45.140865325927734, - "learning_rate": 1.1843555555555557e-05, - "loss": 1.6266, - "step": 29190 - }, - { - "epoch": 0.9344, - "grad_norm": 44.52220153808594, - "learning_rate": 1.184e-05, - "loss": 1.6444, - "step": 29200 - }, - { - "epoch": 0.93472, - "grad_norm": 43.61417770385742, - "learning_rate": 1.1836444444444446e-05, - "loss": 1.6247, - "step": 29210 - }, - { - "epoch": 0.93504, - "grad_norm": 43.42637252807617, - "learning_rate": 1.183288888888889e-05, - "loss": 1.6392, - "step": 29220 - }, - { - "epoch": 0.93536, - "grad_norm": 42.174129486083984, - "learning_rate": 1.1829333333333335e-05, - "loss": 1.6534, - "step": 29230 - }, - { - "epoch": 0.93568, - "grad_norm": 42.143123626708984, - "learning_rate": 1.1825777777777778e-05, - "loss": 1.6345, - "step": 29240 - }, - { - "epoch": 0.936, - "grad_norm": 46.554283142089844, - "learning_rate": 1.1822222222222225e-05, - "loss": 1.641, - "step": 29250 - }, - { - "epoch": 0.93632, - "grad_norm": 44.24030685424805, - "learning_rate": 1.1818666666666667e-05, - "loss": 1.6171, - "step": 29260 - }, - { - "epoch": 0.93664, - "grad_norm": 42.74769973754883, - "learning_rate": 1.1815111111111112e-05, - "loss": 1.6048, - "step": 29270 - }, - { - "epoch": 0.93696, - "grad_norm": 44.60746383666992, - "learning_rate": 1.1811555555555557e-05, - "loss": 1.6614, - "step": 29280 - }, - { - "epoch": 0.93728, - "grad_norm": 44.492679595947266, - "learning_rate": 1.1808000000000001e-05, - "loss": 1.6266, - "step": 29290 - }, - { - "epoch": 0.9376, - "grad_norm": 44.245113372802734, - "learning_rate": 1.1804444444444446e-05, - "loss": 1.637, - "step": 29300 - }, - { - "epoch": 0.93792, - "grad_norm": 43.29434585571289, - "learning_rate": 1.180088888888889e-05, - "loss": 1.6287, - "step": 29310 - }, - { - "epoch": 0.93824, - "grad_norm": 45.088443756103516, - "learning_rate": 1.1797333333333335e-05, - "loss": 1.6463, - "step": 29320 - }, - { - "epoch": 0.93856, - "grad_norm": 43.38103103637695, - "learning_rate": 1.179377777777778e-05, - "loss": 1.6177, - "step": 29330 - }, - { - "epoch": 0.93888, - "grad_norm": 45.160545349121094, - "learning_rate": 1.1790222222222222e-05, - "loss": 1.6182, - "step": 29340 - }, - { - "epoch": 0.9392, - "grad_norm": 43.422874450683594, - "learning_rate": 1.1786666666666668e-05, - "loss": 1.6388, - "step": 29350 - }, - { - "epoch": 0.93952, - "grad_norm": 45.420005798339844, - "learning_rate": 1.1783111111111111e-05, - "loss": 1.649, - "step": 29360 - }, - { - "epoch": 0.93984, - "grad_norm": 45.20408630371094, - "learning_rate": 1.1779555555555558e-05, - "loss": 1.6424, - "step": 29370 - }, - { - "epoch": 0.94016, - "grad_norm": 45.90959548950195, - "learning_rate": 1.1776e-05, - "loss": 1.629, - "step": 29380 - }, - { - "epoch": 0.94048, - "grad_norm": 43.6275520324707, - "learning_rate": 1.1772444444444447e-05, - "loss": 1.6303, - "step": 29390 - }, - { - "epoch": 0.9408, - "grad_norm": 44.0329704284668, - "learning_rate": 1.176888888888889e-05, - "loss": 1.6673, - "step": 29400 - }, - { - "epoch": 0.94112, - "grad_norm": 45.125205993652344, - "learning_rate": 1.1765333333333336e-05, - "loss": 1.6487, - "step": 29410 - }, - { - "epoch": 0.94144, - "grad_norm": 44.226348876953125, - "learning_rate": 1.1761777777777779e-05, - "loss": 1.6489, - "step": 29420 - }, - { - "epoch": 0.94176, - "grad_norm": 45.3155403137207, - "learning_rate": 1.1758222222222225e-05, - "loss": 1.6348, - "step": 29430 - }, - { - "epoch": 0.94208, - "grad_norm": 43.43427276611328, - "learning_rate": 1.1754666666666668e-05, - "loss": 1.6218, - "step": 29440 - }, - { - "epoch": 0.9424, - "grad_norm": 44.82199478149414, - "learning_rate": 1.1751111111111112e-05, - "loss": 1.6213, - "step": 29450 - }, - { - "epoch": 0.94272, - "grad_norm": 43.701515197753906, - "learning_rate": 1.1747555555555557e-05, - "loss": 1.675, - "step": 29460 - }, - { - "epoch": 0.94304, - "grad_norm": 44.70633316040039, - "learning_rate": 1.1744000000000001e-05, - "loss": 1.6333, - "step": 29470 - }, - { - "epoch": 0.94336, - "grad_norm": 45.40119552612305, - "learning_rate": 1.1740444444444446e-05, - "loss": 1.6605, - "step": 29480 - }, - { - "epoch": 0.94368, - "grad_norm": 42.81822967529297, - "learning_rate": 1.1736888888888889e-05, - "loss": 1.6182, - "step": 29490 - }, - { - "epoch": 0.944, - "grad_norm": 43.596412658691406, - "learning_rate": 1.1733333333333335e-05, - "loss": 1.6614, - "step": 29500 - }, - { - "epoch": 0.94432, - "grad_norm": 43.47703170776367, - "learning_rate": 1.1729777777777778e-05, - "loss": 1.6031, - "step": 29510 - }, - { - "epoch": 0.94464, - "grad_norm": 42.71162414550781, - "learning_rate": 1.1726222222222222e-05, - "loss": 1.6587, - "step": 29520 - }, - { - "epoch": 0.94496, - "grad_norm": 43.19927215576172, - "learning_rate": 1.1722666666666667e-05, - "loss": 1.6457, - "step": 29530 - }, - { - "epoch": 0.94528, - "grad_norm": 44.56126022338867, - "learning_rate": 1.1719111111111112e-05, - "loss": 1.6242, - "step": 29540 - }, - { - "epoch": 0.9456, - "grad_norm": 44.282997131347656, - "learning_rate": 1.1715555555555556e-05, - "loss": 1.6344, - "step": 29550 - }, - { - "epoch": 0.94592, - "grad_norm": 44.068756103515625, - "learning_rate": 1.1712e-05, - "loss": 1.6445, - "step": 29560 - }, - { - "epoch": 0.94624, - "grad_norm": 43.77211380004883, - "learning_rate": 1.1708444444444445e-05, - "loss": 1.6255, - "step": 29570 - }, - { - "epoch": 0.94656, - "grad_norm": 44.08302307128906, - "learning_rate": 1.170488888888889e-05, - "loss": 1.673, - "step": 29580 - }, - { - "epoch": 0.94688, - "grad_norm": 43.33510208129883, - "learning_rate": 1.1701333333333333e-05, - "loss": 1.5995, - "step": 29590 - }, - { - "epoch": 0.9472, - "grad_norm": 42.72581100463867, - "learning_rate": 1.1697777777777779e-05, - "loss": 1.6474, - "step": 29600 - }, - { - "epoch": 0.94752, - "grad_norm": 43.55382537841797, - "learning_rate": 1.1694222222222222e-05, - "loss": 1.64, - "step": 29610 - }, - { - "epoch": 0.94784, - "grad_norm": 42.47932815551758, - "learning_rate": 1.1690666666666668e-05, - "loss": 1.5957, - "step": 29620 - }, - { - "epoch": 0.94816, - "grad_norm": 44.38590621948242, - "learning_rate": 1.1687111111111111e-05, - "loss": 1.6377, - "step": 29630 - }, - { - "epoch": 0.94848, - "grad_norm": 42.693572998046875, - "learning_rate": 1.1683555555555557e-05, - "loss": 1.6525, - "step": 29640 - }, - { - "epoch": 0.9488, - "grad_norm": 46.156307220458984, - "learning_rate": 1.168e-05, - "loss": 1.6412, - "step": 29650 - }, - { - "epoch": 0.94912, - "grad_norm": 44.097286224365234, - "learning_rate": 1.1676444444444446e-05, - "loss": 1.6264, - "step": 29660 - }, - { - "epoch": 0.94944, - "grad_norm": 44.33402633666992, - "learning_rate": 1.1672888888888889e-05, - "loss": 1.6095, - "step": 29670 - }, - { - "epoch": 0.94976, - "grad_norm": 44.41627502441406, - "learning_rate": 1.1669333333333335e-05, - "loss": 1.6228, - "step": 29680 - }, - { - "epoch": 0.95008, - "grad_norm": 42.222347259521484, - "learning_rate": 1.1665777777777778e-05, - "loss": 1.6349, - "step": 29690 - }, - { - "epoch": 0.9504, - "grad_norm": 42.85820007324219, - "learning_rate": 1.1662222222222224e-05, - "loss": 1.6481, - "step": 29700 - }, - { - "epoch": 0.95072, - "grad_norm": 44.937313079833984, - "learning_rate": 1.1658666666666667e-05, - "loss": 1.6529, - "step": 29710 - }, - { - "epoch": 0.95104, - "grad_norm": 44.15386962890625, - "learning_rate": 1.1655111111111112e-05, - "loss": 1.6421, - "step": 29720 - }, - { - "epoch": 0.95136, - "grad_norm": 45.95855712890625, - "learning_rate": 1.1651555555555556e-05, - "loss": 1.6265, - "step": 29730 - }, - { - "epoch": 0.95168, - "grad_norm": 43.64844512939453, - "learning_rate": 1.1648000000000001e-05, - "loss": 1.6553, - "step": 29740 - }, - { - "epoch": 0.952, - "grad_norm": 45.064292907714844, - "learning_rate": 1.1644444444444446e-05, - "loss": 1.6425, - "step": 29750 - }, - { - "epoch": 0.95232, - "grad_norm": 42.177894592285156, - "learning_rate": 1.164088888888889e-05, - "loss": 1.6312, - "step": 29760 - }, - { - "epoch": 0.95264, - "grad_norm": 43.947303771972656, - "learning_rate": 1.1637333333333335e-05, - "loss": 1.6438, - "step": 29770 - }, - { - "epoch": 0.95296, - "grad_norm": 43.64456558227539, - "learning_rate": 1.163377777777778e-05, - "loss": 1.6369, - "step": 29780 - }, - { - "epoch": 0.95328, - "grad_norm": 42.0169792175293, - "learning_rate": 1.1630222222222222e-05, - "loss": 1.6157, - "step": 29790 - }, - { - "epoch": 0.9536, - "grad_norm": 45.208892822265625, - "learning_rate": 1.1626666666666668e-05, - "loss": 1.6239, - "step": 29800 - }, - { - "epoch": 0.95392, - "grad_norm": 45.014469146728516, - "learning_rate": 1.1623111111111111e-05, - "loss": 1.6342, - "step": 29810 - }, - { - "epoch": 0.95424, - "grad_norm": 43.519657135009766, - "learning_rate": 1.1619555555555557e-05, - "loss": 1.662, - "step": 29820 - }, - { - "epoch": 0.95456, - "grad_norm": 42.91891098022461, - "learning_rate": 1.1616e-05, - "loss": 1.6255, - "step": 29830 - }, - { - "epoch": 0.95488, - "grad_norm": 43.61880111694336, - "learning_rate": 1.1612444444444447e-05, - "loss": 1.6335, - "step": 29840 - }, - { - "epoch": 0.9552, - "grad_norm": 43.08018493652344, - "learning_rate": 1.160888888888889e-05, - "loss": 1.6338, - "step": 29850 - }, - { - "epoch": 0.95552, - "grad_norm": 46.001365661621094, - "learning_rate": 1.1605333333333336e-05, - "loss": 1.6812, - "step": 29860 - }, - { - "epoch": 0.95584, - "grad_norm": 45.59417724609375, - "learning_rate": 1.1601777777777778e-05, - "loss": 1.6063, - "step": 29870 - }, - { - "epoch": 0.95616, - "grad_norm": 43.582889556884766, - "learning_rate": 1.1598222222222225e-05, - "loss": 1.6427, - "step": 29880 - }, - { - "epoch": 0.95648, - "grad_norm": 43.61624526977539, - "learning_rate": 1.1594666666666668e-05, - "loss": 1.6171, - "step": 29890 - }, - { - "epoch": 0.9568, - "grad_norm": 46.41237258911133, - "learning_rate": 1.1591111111111114e-05, - "loss": 1.6525, - "step": 29900 - }, - { - "epoch": 0.95712, - "grad_norm": 43.611732482910156, - "learning_rate": 1.1587555555555557e-05, - "loss": 1.6371, - "step": 29910 - }, - { - "epoch": 0.95744, - "grad_norm": 43.99297332763672, - "learning_rate": 1.1584000000000001e-05, - "loss": 1.6593, - "step": 29920 - }, - { - "epoch": 0.95776, - "grad_norm": 44.05895233154297, - "learning_rate": 1.1580444444444446e-05, - "loss": 1.6381, - "step": 29930 - }, - { - "epoch": 0.95808, - "grad_norm": 43.11012649536133, - "learning_rate": 1.157688888888889e-05, - "loss": 1.6465, - "step": 29940 - }, - { - "epoch": 0.9584, - "grad_norm": 46.63924026489258, - "learning_rate": 1.1573333333333335e-05, - "loss": 1.6507, - "step": 29950 - }, - { - "epoch": 0.95872, - "grad_norm": 44.59290313720703, - "learning_rate": 1.156977777777778e-05, - "loss": 1.6696, - "step": 29960 - }, - { - "epoch": 0.95904, - "grad_norm": 45.51963424682617, - "learning_rate": 1.1566222222222222e-05, - "loss": 1.6326, - "step": 29970 - }, - { - "epoch": 0.95936, - "grad_norm": 44.144065856933594, - "learning_rate": 1.1562666666666669e-05, - "loss": 1.6533, - "step": 29980 - }, - { - "epoch": 0.95968, - "grad_norm": 44.824337005615234, - "learning_rate": 1.1559111111111111e-05, - "loss": 1.6399, - "step": 29990 - }, - { - "epoch": 0.96, - "grad_norm": 44.10457229614258, - "learning_rate": 1.1555555555555556e-05, - "loss": 1.6111, - "step": 30000 - }, - { - "epoch": 0.96032, - "grad_norm": 44.56742858886719, - "learning_rate": 1.1552e-05, - "loss": 1.6558, - "step": 30010 - }, - { - "epoch": 0.96064, - "grad_norm": 43.97772216796875, - "learning_rate": 1.1548444444444445e-05, - "loss": 1.659, - "step": 30020 - }, - { - "epoch": 0.96096, - "grad_norm": 44.7454948425293, - "learning_rate": 1.154488888888889e-05, - "loss": 1.6539, - "step": 30030 - }, - { - "epoch": 0.96128, - "grad_norm": 45.64804458618164, - "learning_rate": 1.1541333333333332e-05, - "loss": 1.6259, - "step": 30040 - }, - { - "epoch": 0.9616, - "grad_norm": 48.72989273071289, - "learning_rate": 1.1537777777777779e-05, - "loss": 1.6336, - "step": 30050 - }, - { - "epoch": 0.96192, - "grad_norm": 46.82978439331055, - "learning_rate": 1.1534222222222222e-05, - "loss": 1.6409, - "step": 30060 - }, - { - "epoch": 0.96224, - "grad_norm": 43.353065490722656, - "learning_rate": 1.1530666666666668e-05, - "loss": 1.6433, - "step": 30070 - }, - { - "epoch": 0.96256, - "grad_norm": 42.573368072509766, - "learning_rate": 1.152711111111111e-05, - "loss": 1.6357, - "step": 30080 - }, - { - "epoch": 0.96288, - "grad_norm": 42.796142578125, - "learning_rate": 1.1523555555555557e-05, - "loss": 1.6457, - "step": 30090 - }, - { - "epoch": 0.9632, - "grad_norm": 45.9584846496582, - "learning_rate": 1.152e-05, - "loss": 1.6248, - "step": 30100 - }, - { - "epoch": 0.96352, - "grad_norm": 44.436767578125, - "learning_rate": 1.1516444444444446e-05, - "loss": 1.6573, - "step": 30110 - }, - { - "epoch": 0.96384, - "grad_norm": 44.212158203125, - "learning_rate": 1.1512888888888889e-05, - "loss": 1.6392, - "step": 30120 - }, - { - "epoch": 0.96416, - "grad_norm": 44.3819465637207, - "learning_rate": 1.1509333333333335e-05, - "loss": 1.6345, - "step": 30130 - }, - { - "epoch": 0.96448, - "grad_norm": 44.8933219909668, - "learning_rate": 1.1505777777777778e-05, - "loss": 1.6561, - "step": 30140 - }, - { - "epoch": 0.9648, - "grad_norm": 44.94137191772461, - "learning_rate": 1.1502222222222224e-05, - "loss": 1.6266, - "step": 30150 - }, - { - "epoch": 0.96512, - "grad_norm": 42.0122184753418, - "learning_rate": 1.1498666666666667e-05, - "loss": 1.6291, - "step": 30160 - }, - { - "epoch": 0.96544, - "grad_norm": 43.78432083129883, - "learning_rate": 1.1495111111111112e-05, - "loss": 1.676, - "step": 30170 - }, - { - "epoch": 0.96576, - "grad_norm": 43.573978424072266, - "learning_rate": 1.1491555555555556e-05, - "loss": 1.6278, - "step": 30180 - }, - { - "epoch": 0.96608, - "grad_norm": 44.11012268066406, - "learning_rate": 1.1488e-05, - "loss": 1.6484, - "step": 30190 - }, - { - "epoch": 0.9664, - "grad_norm": 43.47837448120117, - "learning_rate": 1.1484444444444445e-05, - "loss": 1.6247, - "step": 30200 - }, - { - "epoch": 0.96672, - "grad_norm": 44.80249786376953, - "learning_rate": 1.148088888888889e-05, - "loss": 1.6512, - "step": 30210 - }, - { - "epoch": 0.96704, - "grad_norm": 43.91743087768555, - "learning_rate": 1.1477333333333334e-05, - "loss": 1.644, - "step": 30220 - }, - { - "epoch": 0.96736, - "grad_norm": 42.66202926635742, - "learning_rate": 1.1473777777777779e-05, - "loss": 1.6557, - "step": 30230 - }, - { - "epoch": 0.96768, - "grad_norm": 43.54933547973633, - "learning_rate": 1.1470222222222222e-05, - "loss": 1.6268, - "step": 30240 - }, - { - "epoch": 0.968, - "grad_norm": 43.00261306762695, - "learning_rate": 1.1466666666666668e-05, - "loss": 1.6504, - "step": 30250 - }, - { - "epoch": 0.96832, - "grad_norm": 44.712650299072266, - "learning_rate": 1.1463111111111111e-05, - "loss": 1.6051, - "step": 30260 - }, - { - "epoch": 0.96864, - "grad_norm": 44.88301467895508, - "learning_rate": 1.1459555555555557e-05, - "loss": 1.6384, - "step": 30270 - }, - { - "epoch": 0.96896, - "grad_norm": 43.14665985107422, - "learning_rate": 1.1456e-05, - "loss": 1.6284, - "step": 30280 - }, - { - "epoch": 0.96928, - "grad_norm": 46.33767318725586, - "learning_rate": 1.1452444444444446e-05, - "loss": 1.649, - "step": 30290 - }, - { - "epoch": 0.9696, - "grad_norm": 43.687705993652344, - "learning_rate": 1.144888888888889e-05, - "loss": 1.6431, - "step": 30300 - }, - { - "epoch": 0.96992, - "grad_norm": 44.642269134521484, - "learning_rate": 1.1445333333333335e-05, - "loss": 1.6555, - "step": 30310 - }, - { - "epoch": 0.97024, - "grad_norm": 44.56669235229492, - "learning_rate": 1.1441777777777778e-05, - "loss": 1.643, - "step": 30320 - }, - { - "epoch": 0.97056, - "grad_norm": 45.14842224121094, - "learning_rate": 1.1438222222222225e-05, - "loss": 1.6202, - "step": 30330 - }, - { - "epoch": 0.97088, - "grad_norm": 47.29034423828125, - "learning_rate": 1.1434666666666667e-05, - "loss": 1.6382, - "step": 30340 - }, - { - "epoch": 0.9712, - "grad_norm": 44.174095153808594, - "learning_rate": 1.1431111111111114e-05, - "loss": 1.6335, - "step": 30350 - }, - { - "epoch": 0.97152, - "grad_norm": 44.7418098449707, - "learning_rate": 1.1427555555555557e-05, - "loss": 1.6421, - "step": 30360 - }, - { - "epoch": 0.97184, - "grad_norm": 42.68954849243164, - "learning_rate": 1.1424000000000001e-05, - "loss": 1.6339, - "step": 30370 - }, - { - "epoch": 0.97216, - "grad_norm": 43.91781997680664, - "learning_rate": 1.1420444444444446e-05, - "loss": 1.6163, - "step": 30380 - }, - { - "epoch": 0.97248, - "grad_norm": 42.75111770629883, - "learning_rate": 1.141688888888889e-05, - "loss": 1.63, - "step": 30390 - }, - { - "epoch": 0.9728, - "grad_norm": 44.555721282958984, - "learning_rate": 1.1413333333333335e-05, - "loss": 1.6256, - "step": 30400 - }, - { - "epoch": 0.97312, - "grad_norm": 46.852989196777344, - "learning_rate": 1.140977777777778e-05, - "loss": 1.6379, - "step": 30410 - }, - { - "epoch": 0.97344, - "grad_norm": 46.11513137817383, - "learning_rate": 1.1406222222222222e-05, - "loss": 1.6444, - "step": 30420 - }, - { - "epoch": 0.97376, - "grad_norm": 49.00882339477539, - "learning_rate": 1.1402666666666668e-05, - "loss": 1.6371, - "step": 30430 - }, - { - "epoch": 0.97408, - "grad_norm": 44.7541389465332, - "learning_rate": 1.1399111111111111e-05, - "loss": 1.6025, - "step": 30440 - }, - { - "epoch": 0.9744, - "grad_norm": 42.61855697631836, - "learning_rate": 1.1395555555555558e-05, - "loss": 1.6189, - "step": 30450 - }, - { - "epoch": 0.97472, - "grad_norm": 44.200260162353516, - "learning_rate": 1.1392e-05, - "loss": 1.6056, - "step": 30460 - }, - { - "epoch": 0.97504, - "grad_norm": 44.01932144165039, - "learning_rate": 1.1388444444444447e-05, - "loss": 1.6664, - "step": 30470 - }, - { - "epoch": 0.97536, - "grad_norm": 44.93379211425781, - "learning_rate": 1.138488888888889e-05, - "loss": 1.6603, - "step": 30480 - }, - { - "epoch": 0.97568, - "grad_norm": 42.41716766357422, - "learning_rate": 1.1381333333333336e-05, - "loss": 1.6235, - "step": 30490 - }, - { - "epoch": 0.976, - "grad_norm": 46.20936584472656, - "learning_rate": 1.1377777777777779e-05, - "loss": 1.6323, - "step": 30500 - }, - { - "epoch": 0.97632, - "grad_norm": 41.390140533447266, - "learning_rate": 1.1374222222222221e-05, - "loss": 1.6059, - "step": 30510 - }, - { - "epoch": 0.97664, - "grad_norm": 44.24008560180664, - "learning_rate": 1.1370666666666668e-05, - "loss": 1.6441, - "step": 30520 - }, - { - "epoch": 0.97696, - "grad_norm": 44.89662170410156, - "learning_rate": 1.136711111111111e-05, - "loss": 1.6322, - "step": 30530 - }, - { - "epoch": 0.97728, - "grad_norm": 45.3996696472168, - "learning_rate": 1.1363555555555557e-05, - "loss": 1.6557, - "step": 30540 - }, - { - "epoch": 0.9776, - "grad_norm": 43.94699478149414, - "learning_rate": 1.136e-05, - "loss": 1.6289, - "step": 30550 - }, - { - "epoch": 0.97792, - "grad_norm": 44.350345611572266, - "learning_rate": 1.1356444444444446e-05, - "loss": 1.6466, - "step": 30560 - }, - { - "epoch": 0.97824, - "grad_norm": 45.596744537353516, - "learning_rate": 1.1352888888888889e-05, - "loss": 1.6226, - "step": 30570 - }, - { - "epoch": 0.97856, - "grad_norm": 43.384891510009766, - "learning_rate": 1.1349333333333335e-05, - "loss": 1.6647, - "step": 30580 - }, - { - "epoch": 0.97888, - "grad_norm": 44.46726608276367, - "learning_rate": 1.1345777777777778e-05, - "loss": 1.6085, - "step": 30590 - }, - { - "epoch": 0.9792, - "grad_norm": 42.867740631103516, - "learning_rate": 1.1342222222222224e-05, - "loss": 1.6466, - "step": 30600 - }, - { - "epoch": 0.97952, - "grad_norm": 43.6484260559082, - "learning_rate": 1.1338666666666667e-05, - "loss": 1.6434, - "step": 30610 - }, - { - "epoch": 0.97984, - "grad_norm": 42.653663635253906, - "learning_rate": 1.1335111111111112e-05, - "loss": 1.6357, - "step": 30620 - }, - { - "epoch": 0.98016, - "grad_norm": 44.374412536621094, - "learning_rate": 1.1331555555555556e-05, - "loss": 1.6397, - "step": 30630 - }, - { - "epoch": 0.98048, - "grad_norm": 44.84930419921875, - "learning_rate": 1.1328e-05, - "loss": 1.6097, - "step": 30640 - }, - { - "epoch": 0.9808, - "grad_norm": 43.1632194519043, - "learning_rate": 1.1324444444444445e-05, - "loss": 1.624, - "step": 30650 - }, - { - "epoch": 0.98112, - "grad_norm": 44.800209045410156, - "learning_rate": 1.132088888888889e-05, - "loss": 1.6803, - "step": 30660 - }, - { - "epoch": 0.98144, - "grad_norm": 44.4241943359375, - "learning_rate": 1.1317333333333334e-05, - "loss": 1.6252, - "step": 30670 - }, - { - "epoch": 0.98176, - "grad_norm": 43.48160934448242, - "learning_rate": 1.1313777777777779e-05, - "loss": 1.622, - "step": 30680 - }, - { - "epoch": 0.98208, - "grad_norm": 42.53988265991211, - "learning_rate": 1.1310222222222222e-05, - "loss": 1.6255, - "step": 30690 - }, - { - "epoch": 0.9824, - "grad_norm": 43.95156478881836, - "learning_rate": 1.1306666666666668e-05, - "loss": 1.6437, - "step": 30700 - }, - { - "epoch": 0.98272, - "grad_norm": 42.917320251464844, - "learning_rate": 1.130311111111111e-05, - "loss": 1.5937, - "step": 30710 - }, - { - "epoch": 0.98304, - "grad_norm": 44.980560302734375, - "learning_rate": 1.1299555555555557e-05, - "loss": 1.6841, - "step": 30720 - }, - { - "epoch": 0.98336, - "grad_norm": 45.12786102294922, - "learning_rate": 1.1296e-05, - "loss": 1.6192, - "step": 30730 - }, - { - "epoch": 0.98368, - "grad_norm": 43.9362678527832, - "learning_rate": 1.1292444444444446e-05, - "loss": 1.6137, - "step": 30740 - }, - { - "epoch": 0.984, - "grad_norm": 43.08484649658203, - "learning_rate": 1.1288888888888889e-05, - "loss": 1.6136, - "step": 30750 - }, - { - "epoch": 0.98432, - "grad_norm": 44.23360061645508, - "learning_rate": 1.1285333333333335e-05, - "loss": 1.6281, - "step": 30760 - }, - { - "epoch": 0.98464, - "grad_norm": 43.02987289428711, - "learning_rate": 1.1281777777777778e-05, - "loss": 1.6106, - "step": 30770 - }, - { - "epoch": 0.98496, - "grad_norm": 42.354278564453125, - "learning_rate": 1.1278222222222224e-05, - "loss": 1.6278, - "step": 30780 - }, - { - "epoch": 0.98528, - "grad_norm": 43.63727951049805, - "learning_rate": 1.1274666666666667e-05, - "loss": 1.624, - "step": 30790 - }, - { - "epoch": 0.9856, - "grad_norm": 44.27710723876953, - "learning_rate": 1.1271111111111113e-05, - "loss": 1.6167, - "step": 30800 - }, - { - "epoch": 0.98592, - "grad_norm": 46.397186279296875, - "learning_rate": 1.1267555555555556e-05, - "loss": 1.6536, - "step": 30810 - }, - { - "epoch": 0.98624, - "grad_norm": 44.01591110229492, - "learning_rate": 1.1264000000000001e-05, - "loss": 1.5924, - "step": 30820 - }, - { - "epoch": 0.98656, - "grad_norm": 44.404296875, - "learning_rate": 1.1260444444444445e-05, - "loss": 1.6529, - "step": 30830 - }, - { - "epoch": 0.98688, - "grad_norm": 45.859962463378906, - "learning_rate": 1.125688888888889e-05, - "loss": 1.6152, - "step": 30840 - }, - { - "epoch": 0.9872, - "grad_norm": 45.23036193847656, - "learning_rate": 1.1253333333333335e-05, - "loss": 1.6502, - "step": 30850 - }, - { - "epoch": 0.98752, - "grad_norm": 44.69296646118164, - "learning_rate": 1.1249777777777779e-05, - "loss": 1.6376, - "step": 30860 - }, - { - "epoch": 0.98784, - "grad_norm": 45.085567474365234, - "learning_rate": 1.1246222222222222e-05, - "loss": 1.6334, - "step": 30870 - }, - { - "epoch": 0.98816, - "grad_norm": 44.13175582885742, - "learning_rate": 1.1242666666666668e-05, - "loss": 1.6392, - "step": 30880 - }, - { - "epoch": 0.98848, - "grad_norm": 45.575565338134766, - "learning_rate": 1.1239111111111111e-05, - "loss": 1.6456, - "step": 30890 - }, - { - "epoch": 0.9888, - "grad_norm": 461.7893371582031, - "learning_rate": 1.1235555555555557e-05, - "loss": 1.6544, - "step": 30900 - }, - { - "epoch": 0.98912, - "grad_norm": 44.190555572509766, - "learning_rate": 1.1232e-05, - "loss": 1.6594, - "step": 30910 - }, - { - "epoch": 0.98944, - "grad_norm": 46.13947677612305, - "learning_rate": 1.1228444444444446e-05, - "loss": 1.6127, - "step": 30920 - }, - { - "epoch": 0.98976, - "grad_norm": 44.94755554199219, - "learning_rate": 1.122488888888889e-05, - "loss": 1.6411, - "step": 30930 - }, - { - "epoch": 0.99008, - "grad_norm": 44.4129524230957, - "learning_rate": 1.1221333333333336e-05, - "loss": 1.6209, - "step": 30940 - }, - { - "epoch": 0.9904, - "grad_norm": 44.70603942871094, - "learning_rate": 1.1217777777777778e-05, - "loss": 1.6358, - "step": 30950 - }, - { - "epoch": 0.99072, - "grad_norm": 45.2507209777832, - "learning_rate": 1.1214222222222225e-05, - "loss": 1.651, - "step": 30960 - }, - { - "epoch": 0.99104, - "grad_norm": 45.008705139160156, - "learning_rate": 1.1210666666666668e-05, - "loss": 1.6225, - "step": 30970 - }, - { - "epoch": 0.99136, - "grad_norm": 45.01123046875, - "learning_rate": 1.1207111111111114e-05, - "loss": 1.6806, - "step": 30980 - }, - { - "epoch": 0.99168, - "grad_norm": 45.39712142944336, - "learning_rate": 1.1203555555555557e-05, - "loss": 1.6466, - "step": 30990 - }, - { - "epoch": 0.992, - "grad_norm": 43.38059616088867, - "learning_rate": 1.1200000000000001e-05, - "loss": 1.6306, - "step": 31000 - }, - { - "epoch": 0.99232, - "grad_norm": 44.428375244140625, - "learning_rate": 1.1196444444444446e-05, - "loss": 1.6364, - "step": 31010 - }, - { - "epoch": 0.99264, - "grad_norm": 44.255958557128906, - "learning_rate": 1.119288888888889e-05, - "loss": 1.6408, - "step": 31020 - }, - { - "epoch": 0.99296, - "grad_norm": 43.42549514770508, - "learning_rate": 1.1189333333333335e-05, - "loss": 1.6308, - "step": 31030 - }, - { - "epoch": 0.99328, - "grad_norm": 43.82905197143555, - "learning_rate": 1.1185777777777778e-05, - "loss": 1.6463, - "step": 31040 - }, - { - "epoch": 0.9936, - "grad_norm": 43.67567443847656, - "learning_rate": 1.1182222222222224e-05, - "loss": 1.651, - "step": 31050 - }, - { - "epoch": 0.99392, - "grad_norm": 43.85862350463867, - "learning_rate": 1.1178666666666667e-05, - "loss": 1.6369, - "step": 31060 - }, - { - "epoch": 0.99424, - "grad_norm": 45.972511291503906, - "learning_rate": 1.1175111111111111e-05, - "loss": 1.6217, - "step": 31070 - }, - { - "epoch": 0.99456, - "grad_norm": 44.29342269897461, - "learning_rate": 1.1171555555555556e-05, - "loss": 1.6585, - "step": 31080 - }, - { - "epoch": 0.99488, - "grad_norm": 45.08591079711914, - "learning_rate": 1.1168e-05, - "loss": 1.6769, - "step": 31090 - }, - { - "epoch": 0.9952, - "grad_norm": 45.8797721862793, - "learning_rate": 1.1164444444444445e-05, - "loss": 1.654, - "step": 31100 - }, - { - "epoch": 0.99552, - "grad_norm": 46.71592712402344, - "learning_rate": 1.116088888888889e-05, - "loss": 1.5952, - "step": 31110 - }, - { - "epoch": 0.99584, - "grad_norm": 42.47768020629883, - "learning_rate": 1.1157333333333334e-05, - "loss": 1.608, - "step": 31120 - }, - { - "epoch": 0.99616, - "grad_norm": 46.937469482421875, - "learning_rate": 1.1153777777777779e-05, - "loss": 1.6215, - "step": 31130 - }, - { - "epoch": 0.99648, - "grad_norm": 46.01224899291992, - "learning_rate": 1.1150222222222222e-05, - "loss": 1.6357, - "step": 31140 - }, - { - "epoch": 0.9968, - "grad_norm": 44.50090026855469, - "learning_rate": 1.1146666666666668e-05, - "loss": 1.6372, - "step": 31150 - }, - { - "epoch": 0.99712, - "grad_norm": 44.25981140136719, - "learning_rate": 1.114311111111111e-05, - "loss": 1.6213, - "step": 31160 - }, - { - "epoch": 0.99744, - "grad_norm": 47.06044006347656, - "learning_rate": 1.1139555555555557e-05, - "loss": 1.6116, - "step": 31170 - }, - { - "epoch": 0.99776, - "grad_norm": 45.36325454711914, - "learning_rate": 1.1136e-05, - "loss": 1.6098, - "step": 31180 - }, - { - "epoch": 0.99808, - "grad_norm": 43.87416076660156, - "learning_rate": 1.1132444444444446e-05, - "loss": 1.6587, - "step": 31190 - }, - { - "epoch": 0.9984, - "grad_norm": 45.90060806274414, - "learning_rate": 1.1128888888888889e-05, - "loss": 1.6715, - "step": 31200 - }, - { - "epoch": 0.99872, - "grad_norm": 44.73828125, - "learning_rate": 1.1125333333333335e-05, - "loss": 1.6297, - "step": 31210 - }, - { - "epoch": 0.99904, - "grad_norm": 45.52794647216797, - "learning_rate": 1.1121777777777778e-05, - "loss": 1.655, - "step": 31220 - }, - { - "epoch": 0.99936, - "grad_norm": 44.59988784790039, - "learning_rate": 1.1118222222222224e-05, - "loss": 1.6252, - "step": 31230 - }, - { - "epoch": 0.99968, - "grad_norm": 45.505130767822266, - "learning_rate": 1.1114666666666667e-05, - "loss": 1.6094, - "step": 31240 - }, - { - "epoch": 1.0, - "grad_norm": 43.133392333984375, - "learning_rate": 1.1111111111111113e-05, - "loss": 1.6227, - "step": 31250 - }, - { - "epoch": 1.00032, - "grad_norm": 46.064205169677734, - "learning_rate": 1.1107555555555556e-05, - "loss": 1.6462, - "step": 31260 - }, - { - "epoch": 1.00064, - "grad_norm": 43.47576904296875, - "learning_rate": 1.1104e-05, - "loss": 1.6394, - "step": 31270 - }, - { - "epoch": 1.00096, - "grad_norm": 44.37240219116211, - "learning_rate": 1.1100444444444445e-05, - "loss": 1.68, - "step": 31280 - }, - { - "epoch": 1.00128, - "grad_norm": 46.296592712402344, - "learning_rate": 1.109688888888889e-05, - "loss": 1.6122, - "step": 31290 - }, - { - "epoch": 1.0016, - "grad_norm": 45.33930206298828, - "learning_rate": 1.1093333333333334e-05, - "loss": 1.6252, - "step": 31300 - }, - { - "epoch": 1.00192, - "grad_norm": 45.15934753417969, - "learning_rate": 1.1089777777777779e-05, - "loss": 1.679, - "step": 31310 - }, - { - "epoch": 1.00224, - "grad_norm": 43.59319305419922, - "learning_rate": 1.1086222222222222e-05, - "loss": 1.6413, - "step": 31320 - }, - { - "epoch": 1.00256, - "grad_norm": 44.4049072265625, - "learning_rate": 1.1082666666666668e-05, - "loss": 1.6278, - "step": 31330 - }, - { - "epoch": 1.00288, - "grad_norm": 62.05976486206055, - "learning_rate": 1.1079111111111111e-05, - "loss": 1.6498, - "step": 31340 - }, - { - "epoch": 1.0032, - "grad_norm": 45.33911895751953, - "learning_rate": 1.1075555555555557e-05, - "loss": 1.6542, - "step": 31350 - }, - { - "epoch": 1.00352, - "grad_norm": 42.654972076416016, - "learning_rate": 1.1072e-05, - "loss": 1.6216, - "step": 31360 - }, - { - "epoch": 1.00384, - "grad_norm": 45.3847770690918, - "learning_rate": 1.1068444444444446e-05, - "loss": 1.6225, - "step": 31370 - }, - { - "epoch": 1.00416, - "grad_norm": 43.77812576293945, - "learning_rate": 1.1064888888888889e-05, - "loss": 1.6216, - "step": 31380 - }, - { - "epoch": 1.00448, - "grad_norm": 43.009376525878906, - "learning_rate": 1.1061333333333335e-05, - "loss": 1.6285, - "step": 31390 - }, - { - "epoch": 1.0048, - "grad_norm": 43.0247802734375, - "learning_rate": 1.1057777777777778e-05, - "loss": 1.6343, - "step": 31400 - }, - { - "epoch": 1.00512, - "grad_norm": 45.1480827331543, - "learning_rate": 1.1054222222222225e-05, - "loss": 1.6199, - "step": 31410 - }, - { - "epoch": 1.0054400000000001, - "grad_norm": 42.25900650024414, - "learning_rate": 1.1050666666666667e-05, - "loss": 1.6211, - "step": 31420 - }, - { - "epoch": 1.00576, - "grad_norm": 44.692604064941406, - "learning_rate": 1.1047111111111114e-05, - "loss": 1.6498, - "step": 31430 - }, - { - "epoch": 1.00608, - "grad_norm": 44.6058235168457, - "learning_rate": 1.1043555555555556e-05, - "loss": 1.6691, - "step": 31440 - }, - { - "epoch": 1.0064, - "grad_norm": 43.9179573059082, - "learning_rate": 1.1040000000000001e-05, - "loss": 1.6219, - "step": 31450 - }, - { - "epoch": 1.00672, - "grad_norm": 42.16164779663086, - "learning_rate": 1.1036444444444446e-05, - "loss": 1.6215, - "step": 31460 - }, - { - "epoch": 1.00704, - "grad_norm": 44.84690856933594, - "learning_rate": 1.103288888888889e-05, - "loss": 1.6117, - "step": 31470 - }, - { - "epoch": 1.00736, - "grad_norm": 44.58945083618164, - "learning_rate": 1.1029333333333335e-05, - "loss": 1.6571, - "step": 31480 - }, - { - "epoch": 1.00768, - "grad_norm": 43.47794723510742, - "learning_rate": 1.102577777777778e-05, - "loss": 1.6731, - "step": 31490 - }, - { - "epoch": 1.008, - "grad_norm": 44.99798583984375, - "learning_rate": 1.1022222222222224e-05, - "loss": 1.6104, - "step": 31500 - }, - { - "epoch": 1.00832, - "grad_norm": 44.819461822509766, - "learning_rate": 1.1018666666666668e-05, - "loss": 1.6118, - "step": 31510 - }, - { - "epoch": 1.00864, - "grad_norm": 44.44795608520508, - "learning_rate": 1.1015111111111111e-05, - "loss": 1.6252, - "step": 31520 - }, - { - "epoch": 1.00896, - "grad_norm": 45.462520599365234, - "learning_rate": 1.1011555555555557e-05, - "loss": 1.6215, - "step": 31530 - }, - { - "epoch": 1.00928, - "grad_norm": 44.72943878173828, - "learning_rate": 1.1008e-05, - "loss": 1.6085, - "step": 31540 - }, - { - "epoch": 1.0096, - "grad_norm": 42.78067398071289, - "learning_rate": 1.1004444444444445e-05, - "loss": 1.622, - "step": 31550 - }, - { - "epoch": 1.00992, - "grad_norm": 44.57320785522461, - "learning_rate": 1.100088888888889e-05, - "loss": 1.6739, - "step": 31560 - }, - { - "epoch": 1.01024, - "grad_norm": 46.82978439331055, - "learning_rate": 1.0997333333333334e-05, - "loss": 1.6334, - "step": 31570 - }, - { - "epoch": 1.01056, - "grad_norm": 44.21915817260742, - "learning_rate": 1.0993777777777779e-05, - "loss": 1.6436, - "step": 31580 - }, - { - "epoch": 1.01088, - "grad_norm": 44.011409759521484, - "learning_rate": 1.0990222222222221e-05, - "loss": 1.6548, - "step": 31590 - }, - { - "epoch": 1.0112, - "grad_norm": 46.1209716796875, - "learning_rate": 1.0986666666666668e-05, - "loss": 1.6127, - "step": 31600 - }, - { - "epoch": 1.01152, - "grad_norm": 43.016326904296875, - "learning_rate": 1.098311111111111e-05, - "loss": 1.6074, - "step": 31610 - }, - { - "epoch": 1.01184, - "grad_norm": 46.83198928833008, - "learning_rate": 1.0979555555555557e-05, - "loss": 1.6347, - "step": 31620 - }, - { - "epoch": 1.01216, - "grad_norm": 44.099212646484375, - "learning_rate": 1.0976e-05, - "loss": 1.6203, - "step": 31630 - }, - { - "epoch": 1.01248, - "grad_norm": 44.47719955444336, - "learning_rate": 1.0972444444444446e-05, - "loss": 1.6414, - "step": 31640 - }, - { - "epoch": 1.0128, - "grad_norm": 44.10932922363281, - "learning_rate": 1.0968888888888889e-05, - "loss": 1.6289, - "step": 31650 - }, - { - "epoch": 1.01312, - "grad_norm": 42.07052230834961, - "learning_rate": 1.0965333333333335e-05, - "loss": 1.6272, - "step": 31660 - }, - { - "epoch": 1.01344, - "grad_norm": 47.4359130859375, - "learning_rate": 1.0961777777777778e-05, - "loss": 1.6344, - "step": 31670 - }, - { - "epoch": 1.01376, - "grad_norm": 47.179473876953125, - "learning_rate": 1.0958222222222224e-05, - "loss": 1.6272, - "step": 31680 - }, - { - "epoch": 1.01408, - "grad_norm": 43.768314361572266, - "learning_rate": 1.0954666666666667e-05, - "loss": 1.6009, - "step": 31690 - }, - { - "epoch": 1.0144, - "grad_norm": 45.94318771362305, - "learning_rate": 1.0951111111111113e-05, - "loss": 1.6192, - "step": 31700 - }, - { - "epoch": 1.01472, - "grad_norm": 44.83135223388672, - "learning_rate": 1.0947555555555556e-05, - "loss": 1.6039, - "step": 31710 - }, - { - "epoch": 1.01504, - "grad_norm": 45.00038528442383, - "learning_rate": 1.0944e-05, - "loss": 1.6236, - "step": 31720 - }, - { - "epoch": 1.01536, - "grad_norm": 45.91115951538086, - "learning_rate": 1.0940444444444445e-05, - "loss": 1.6407, - "step": 31730 - }, - { - "epoch": 1.01568, - "grad_norm": 43.440773010253906, - "learning_rate": 1.093688888888889e-05, - "loss": 1.6389, - "step": 31740 - }, - { - "epoch": 1.016, - "grad_norm": 47.18890380859375, - "learning_rate": 1.0933333333333334e-05, - "loss": 1.6178, - "step": 31750 - }, - { - "epoch": 1.01632, - "grad_norm": 44.28126525878906, - "learning_rate": 1.0929777777777779e-05, - "loss": 1.6278, - "step": 31760 - }, - { - "epoch": 1.01664, - "grad_norm": 43.794429779052734, - "learning_rate": 1.0926222222222222e-05, - "loss": 1.6526, - "step": 31770 - }, - { - "epoch": 1.01696, - "grad_norm": 44.310943603515625, - "learning_rate": 1.0922666666666668e-05, - "loss": 1.6271, - "step": 31780 - }, - { - "epoch": 1.01728, - "grad_norm": 42.87227249145508, - "learning_rate": 1.091911111111111e-05, - "loss": 1.629, - "step": 31790 - }, - { - "epoch": 1.0176, - "grad_norm": 42.844871520996094, - "learning_rate": 1.0915555555555557e-05, - "loss": 1.6304, - "step": 31800 - }, - { - "epoch": 1.01792, - "grad_norm": 45.118019104003906, - "learning_rate": 1.0912e-05, - "loss": 1.6531, - "step": 31810 - }, - { - "epoch": 1.01824, - "grad_norm": 43.47876739501953, - "learning_rate": 1.0908444444444446e-05, - "loss": 1.6381, - "step": 31820 - }, - { - "epoch": 1.01856, - "grad_norm": 43.193843841552734, - "learning_rate": 1.0904888888888889e-05, - "loss": 1.6264, - "step": 31830 - }, - { - "epoch": 1.01888, - "grad_norm": 42.943599700927734, - "learning_rate": 1.0901333333333335e-05, - "loss": 1.6012, - "step": 31840 - }, - { - "epoch": 1.0192, - "grad_norm": 45.247562408447266, - "learning_rate": 1.0897777777777778e-05, - "loss": 1.6206, - "step": 31850 - }, - { - "epoch": 1.01952, - "grad_norm": 44.1955680847168, - "learning_rate": 1.0894222222222224e-05, - "loss": 1.6449, - "step": 31860 - }, - { - "epoch": 1.01984, - "grad_norm": 48.465232849121094, - "learning_rate": 1.0890666666666667e-05, - "loss": 1.6447, - "step": 31870 - }, - { - "epoch": 1.02016, - "grad_norm": 43.41231918334961, - "learning_rate": 1.0887111111111113e-05, - "loss": 1.6501, - "step": 31880 - }, - { - "epoch": 1.02048, - "grad_norm": 46.5025520324707, - "learning_rate": 1.0883555555555556e-05, - "loss": 1.6259, - "step": 31890 - }, - { - "epoch": 1.0208, - "grad_norm": 45.25193786621094, - "learning_rate": 1.0880000000000001e-05, - "loss": 1.5989, - "step": 31900 - }, - { - "epoch": 1.02112, - "grad_norm": 45.444549560546875, - "learning_rate": 1.0876444444444445e-05, - "loss": 1.6214, - "step": 31910 - }, - { - "epoch": 1.02144, - "grad_norm": 44.449981689453125, - "learning_rate": 1.087288888888889e-05, - "loss": 1.6275, - "step": 31920 - }, - { - "epoch": 1.02176, - "grad_norm": 43.29909896850586, - "learning_rate": 1.0869333333333335e-05, - "loss": 1.603, - "step": 31930 - }, - { - "epoch": 1.02208, - "grad_norm": 43.329078674316406, - "learning_rate": 1.0865777777777779e-05, - "loss": 1.646, - "step": 31940 - }, - { - "epoch": 1.0224, - "grad_norm": 43.6357421875, - "learning_rate": 1.0862222222222224e-05, - "loss": 1.654, - "step": 31950 - }, - { - "epoch": 1.02272, - "grad_norm": 44.786800384521484, - "learning_rate": 1.0858666666666668e-05, - "loss": 1.6129, - "step": 31960 - }, - { - "epoch": 1.02304, - "grad_norm": 44.97706604003906, - "learning_rate": 1.0855111111111111e-05, - "loss": 1.6147, - "step": 31970 - }, - { - "epoch": 1.02336, - "grad_norm": 44.8741340637207, - "learning_rate": 1.0851555555555557e-05, - "loss": 1.6381, - "step": 31980 - }, - { - "epoch": 1.02368, - "grad_norm": 45.01078796386719, - "learning_rate": 1.0848e-05, - "loss": 1.6143, - "step": 31990 - }, - { - "epoch": 1.024, - "grad_norm": 44.32587814331055, - "learning_rate": 1.0844444444444446e-05, - "loss": 1.6145, - "step": 32000 - }, - { - "epoch": 1.02432, - "grad_norm": 44.67451477050781, - "learning_rate": 1.084088888888889e-05, - "loss": 1.6423, - "step": 32010 - }, - { - "epoch": 1.02464, - "grad_norm": 43.36486053466797, - "learning_rate": 1.0837333333333336e-05, - "loss": 1.6254, - "step": 32020 - }, - { - "epoch": 1.02496, - "grad_norm": 44.7286376953125, - "learning_rate": 1.0833777777777778e-05, - "loss": 1.6588, - "step": 32030 - }, - { - "epoch": 1.02528, - "grad_norm": 42.48271560668945, - "learning_rate": 1.0830222222222225e-05, - "loss": 1.6242, - "step": 32040 - }, - { - "epoch": 1.0256, - "grad_norm": 44.96989440917969, - "learning_rate": 1.0826666666666667e-05, - "loss": 1.6284, - "step": 32050 - }, - { - "epoch": 1.02592, - "grad_norm": 42.45598220825195, - "learning_rate": 1.082311111111111e-05, - "loss": 1.6255, - "step": 32060 - }, - { - "epoch": 1.02624, - "grad_norm": 44.257591247558594, - "learning_rate": 1.0819555555555557e-05, - "loss": 1.6006, - "step": 32070 - }, - { - "epoch": 1.02656, - "grad_norm": 43.706756591796875, - "learning_rate": 1.0816e-05, - "loss": 1.6175, - "step": 32080 - }, - { - "epoch": 1.02688, - "grad_norm": 43.40682601928711, - "learning_rate": 1.0812444444444446e-05, - "loss": 1.6196, - "step": 32090 - }, - { - "epoch": 1.0272, - "grad_norm": 46.219940185546875, - "learning_rate": 1.0808888888888889e-05, - "loss": 1.6259, - "step": 32100 - }, - { - "epoch": 1.02752, - "grad_norm": 43.60252380371094, - "learning_rate": 1.0805333333333335e-05, - "loss": 1.6402, - "step": 32110 - }, - { - "epoch": 1.02784, - "grad_norm": 43.27579879760742, - "learning_rate": 1.0801777777777778e-05, - "loss": 1.6249, - "step": 32120 - }, - { - "epoch": 1.02816, - "grad_norm": 46.17375946044922, - "learning_rate": 1.0798222222222224e-05, - "loss": 1.6575, - "step": 32130 - }, - { - "epoch": 1.02848, - "grad_norm": 44.743988037109375, - "learning_rate": 1.0794666666666667e-05, - "loss": 1.6429, - "step": 32140 - }, - { - "epoch": 1.0288, - "grad_norm": 44.23445510864258, - "learning_rate": 1.0791111111111113e-05, - "loss": 1.6322, - "step": 32150 - }, - { - "epoch": 1.02912, - "grad_norm": 41.932987213134766, - "learning_rate": 1.0787555555555556e-05, - "loss": 1.6479, - "step": 32160 - }, - { - "epoch": 1.02944, - "grad_norm": 42.20267105102539, - "learning_rate": 1.0784e-05, - "loss": 1.6139, - "step": 32170 - }, - { - "epoch": 1.02976, - "grad_norm": 44.15998077392578, - "learning_rate": 1.0780444444444445e-05, - "loss": 1.6416, - "step": 32180 - }, - { - "epoch": 1.03008, - "grad_norm": 45.692970275878906, - "learning_rate": 1.077688888888889e-05, - "loss": 1.637, - "step": 32190 - }, - { - "epoch": 1.0304, - "grad_norm": 45.96401596069336, - "learning_rate": 1.0773333333333334e-05, - "loss": 1.6245, - "step": 32200 - }, - { - "epoch": 1.03072, - "grad_norm": 43.48420333862305, - "learning_rate": 1.0769777777777779e-05, - "loss": 1.6248, - "step": 32210 - }, - { - "epoch": 1.03104, - "grad_norm": 44.58662414550781, - "learning_rate": 1.0766222222222223e-05, - "loss": 1.6376, - "step": 32220 - }, - { - "epoch": 1.03136, - "grad_norm": 44.91218948364258, - "learning_rate": 1.0762666666666668e-05, - "loss": 1.6193, - "step": 32230 - }, - { - "epoch": 1.03168, - "grad_norm": 43.115234375, - "learning_rate": 1.075911111111111e-05, - "loss": 1.6597, - "step": 32240 - }, - { - "epoch": 1.032, - "grad_norm": 46.89992141723633, - "learning_rate": 1.0755555555555557e-05, - "loss": 1.6303, - "step": 32250 - }, - { - "epoch": 1.03232, - "grad_norm": 42.8564453125, - "learning_rate": 1.0752e-05, - "loss": 1.604, - "step": 32260 - }, - { - "epoch": 1.03264, - "grad_norm": 44.03951644897461, - "learning_rate": 1.0748444444444446e-05, - "loss": 1.5788, - "step": 32270 - }, - { - "epoch": 1.03296, - "grad_norm": 45.001678466796875, - "learning_rate": 1.0744888888888889e-05, - "loss": 1.6211, - "step": 32280 - }, - { - "epoch": 1.03328, - "grad_norm": 44.68024444580078, - "learning_rate": 1.0741333333333335e-05, - "loss": 1.6518, - "step": 32290 - }, - { - "epoch": 1.0336, - "grad_norm": 44.97967529296875, - "learning_rate": 1.0737777777777778e-05, - "loss": 1.6354, - "step": 32300 - }, - { - "epoch": 1.03392, - "grad_norm": 45.28949737548828, - "learning_rate": 1.0734222222222224e-05, - "loss": 1.6284, - "step": 32310 - }, - { - "epoch": 1.03424, - "grad_norm": 48.3412971496582, - "learning_rate": 1.0730666666666667e-05, - "loss": 1.654, - "step": 32320 - }, - { - "epoch": 1.03456, - "grad_norm": 45.43934631347656, - "learning_rate": 1.0727111111111113e-05, - "loss": 1.6313, - "step": 32330 - }, - { - "epoch": 1.03488, - "grad_norm": 44.49418640136719, - "learning_rate": 1.0723555555555556e-05, - "loss": 1.6009, - "step": 32340 - }, - { - "epoch": 1.0352, - "grad_norm": 43.252567291259766, - "learning_rate": 1.072e-05, - "loss": 1.6191, - "step": 32350 - }, - { - "epoch": 1.03552, - "grad_norm": 43.64153289794922, - "learning_rate": 1.0716444444444445e-05, - "loss": 1.6482, - "step": 32360 - }, - { - "epoch": 1.03584, - "grad_norm": 42.04774856567383, - "learning_rate": 1.071288888888889e-05, - "loss": 1.6464, - "step": 32370 - }, - { - "epoch": 1.03616, - "grad_norm": 44.96035385131836, - "learning_rate": 1.0709333333333334e-05, - "loss": 1.6204, - "step": 32380 - }, - { - "epoch": 1.03648, - "grad_norm": 44.43547058105469, - "learning_rate": 1.0705777777777779e-05, - "loss": 1.6563, - "step": 32390 - }, - { - "epoch": 1.0368, - "grad_norm": 44.755558013916016, - "learning_rate": 1.0702222222222223e-05, - "loss": 1.6514, - "step": 32400 - }, - { - "epoch": 1.03712, - "grad_norm": 43.75396728515625, - "learning_rate": 1.0698666666666668e-05, - "loss": 1.6043, - "step": 32410 - }, - { - "epoch": 1.03744, - "grad_norm": 46.955711364746094, - "learning_rate": 1.0695111111111111e-05, - "loss": 1.6138, - "step": 32420 - }, - { - "epoch": 1.03776, - "grad_norm": 44.48612594604492, - "learning_rate": 1.0691555555555557e-05, - "loss": 1.6539, - "step": 32430 - }, - { - "epoch": 1.03808, - "grad_norm": 45.34321594238281, - "learning_rate": 1.0688e-05, - "loss": 1.6247, - "step": 32440 - }, - { - "epoch": 1.0384, - "grad_norm": 42.34244155883789, - "learning_rate": 1.0684444444444446e-05, - "loss": 1.6117, - "step": 32450 - }, - { - "epoch": 1.03872, - "grad_norm": 44.84130096435547, - "learning_rate": 1.0680888888888889e-05, - "loss": 1.6138, - "step": 32460 - }, - { - "epoch": 1.03904, - "grad_norm": 45.60774612426758, - "learning_rate": 1.0677333333333335e-05, - "loss": 1.6179, - "step": 32470 - }, - { - "epoch": 1.03936, - "grad_norm": 42.9847412109375, - "learning_rate": 1.0673777777777778e-05, - "loss": 1.6525, - "step": 32480 - }, - { - "epoch": 1.03968, - "grad_norm": 43.739463806152344, - "learning_rate": 1.0670222222222224e-05, - "loss": 1.6358, - "step": 32490 - }, - { - "epoch": 1.04, - "grad_norm": 44.78418731689453, - "learning_rate": 1.0666666666666667e-05, - "loss": 1.6449, - "step": 32500 - }, - { - "epoch": 1.04032, - "grad_norm": 42.9957275390625, - "learning_rate": 1.0663111111111114e-05, - "loss": 1.6481, - "step": 32510 - }, - { - "epoch": 1.04064, - "grad_norm": 42.810630798339844, - "learning_rate": 1.0659555555555556e-05, - "loss": 1.6158, - "step": 32520 - }, - { - "epoch": 1.04096, - "grad_norm": 44.490386962890625, - "learning_rate": 1.0656000000000003e-05, - "loss": 1.6086, - "step": 32530 - }, - { - "epoch": 1.04128, - "grad_norm": 44.92964172363281, - "learning_rate": 1.0652444444444446e-05, - "loss": 1.6429, - "step": 32540 - }, - { - "epoch": 1.0416, - "grad_norm": 44.007816314697266, - "learning_rate": 1.064888888888889e-05, - "loss": 1.648, - "step": 32550 - }, - { - "epoch": 1.04192, - "grad_norm": 44.374534606933594, - "learning_rate": 1.0645333333333335e-05, - "loss": 1.6251, - "step": 32560 - }, - { - "epoch": 1.04224, - "grad_norm": 44.10312271118164, - "learning_rate": 1.0641777777777777e-05, - "loss": 1.6235, - "step": 32570 - }, - { - "epoch": 1.04256, - "grad_norm": 43.174400329589844, - "learning_rate": 1.0638222222222224e-05, - "loss": 1.6138, - "step": 32580 - }, - { - "epoch": 1.04288, - "grad_norm": 43.963924407958984, - "learning_rate": 1.0634666666666667e-05, - "loss": 1.6331, - "step": 32590 - }, - { - "epoch": 1.0432, - "grad_norm": 45.72218322753906, - "learning_rate": 1.0631111111111113e-05, - "loss": 1.6469, - "step": 32600 - }, - { - "epoch": 1.04352, - "grad_norm": 44.228797912597656, - "learning_rate": 1.0627555555555556e-05, - "loss": 1.623, - "step": 32610 - }, - { - "epoch": 1.04384, - "grad_norm": 43.118465423583984, - "learning_rate": 1.0624e-05, - "loss": 1.6266, - "step": 32620 - }, - { - "epoch": 1.04416, - "grad_norm": 44.9131965637207, - "learning_rate": 1.0620444444444445e-05, - "loss": 1.6052, - "step": 32630 - }, - { - "epoch": 1.04448, - "grad_norm": 41.5771369934082, - "learning_rate": 1.061688888888889e-05, - "loss": 1.6338, - "step": 32640 - }, - { - "epoch": 1.0448, - "grad_norm": 43.707706451416016, - "learning_rate": 1.0613333333333334e-05, - "loss": 1.6444, - "step": 32650 - }, - { - "epoch": 1.04512, - "grad_norm": 43.63424301147461, - "learning_rate": 1.0609777777777778e-05, - "loss": 1.6122, - "step": 32660 - }, - { - "epoch": 1.04544, - "grad_norm": 43.490196228027344, - "learning_rate": 1.0606222222222223e-05, - "loss": 1.5986, - "step": 32670 - }, - { - "epoch": 1.04576, - "grad_norm": 44.29290771484375, - "learning_rate": 1.0602666666666668e-05, - "loss": 1.6355, - "step": 32680 - }, - { - "epoch": 1.04608, - "grad_norm": 46.40851974487305, - "learning_rate": 1.059911111111111e-05, - "loss": 1.6291, - "step": 32690 - }, - { - "epoch": 1.0464, - "grad_norm": 43.00209426879883, - "learning_rate": 1.0595555555555557e-05, - "loss": 1.6312, - "step": 32700 - }, - { - "epoch": 1.04672, - "grad_norm": 46.49836349487305, - "learning_rate": 1.0592e-05, - "loss": 1.6316, - "step": 32710 - }, - { - "epoch": 1.04704, - "grad_norm": 43.06816482543945, - "learning_rate": 1.0588444444444446e-05, - "loss": 1.6084, - "step": 32720 - }, - { - "epoch": 1.04736, - "grad_norm": 44.20458221435547, - "learning_rate": 1.0584888888888889e-05, - "loss": 1.6038, - "step": 32730 - }, - { - "epoch": 1.04768, - "grad_norm": 45.879676818847656, - "learning_rate": 1.0581333333333335e-05, - "loss": 1.6372, - "step": 32740 - }, - { - "epoch": 1.048, - "grad_norm": 44.341094970703125, - "learning_rate": 1.0577777777777778e-05, - "loss": 1.639, - "step": 32750 - }, - { - "epoch": 1.04832, - "grad_norm": 43.37816619873047, - "learning_rate": 1.0574222222222224e-05, - "loss": 1.654, - "step": 32760 - }, - { - "epoch": 1.04864, - "grad_norm": 43.5418586730957, - "learning_rate": 1.0570666666666667e-05, - "loss": 1.6361, - "step": 32770 - }, - { - "epoch": 1.04896, - "grad_norm": 46.33805847167969, - "learning_rate": 1.0567111111111113e-05, - "loss": 1.6388, - "step": 32780 - }, - { - "epoch": 1.04928, - "grad_norm": 42.2393913269043, - "learning_rate": 1.0563555555555556e-05, - "loss": 1.6152, - "step": 32790 - }, - { - "epoch": 1.0496, - "grad_norm": 45.337345123291016, - "learning_rate": 1.056e-05, - "loss": 1.6162, - "step": 32800 - }, - { - "epoch": 1.04992, - "grad_norm": 44.33757781982422, - "learning_rate": 1.0556444444444445e-05, - "loss": 1.6181, - "step": 32810 - }, - { - "epoch": 1.05024, - "grad_norm": 43.743751525878906, - "learning_rate": 1.055288888888889e-05, - "loss": 1.6245, - "step": 32820 - }, - { - "epoch": 1.05056, - "grad_norm": 44.40175247192383, - "learning_rate": 1.0549333333333334e-05, - "loss": 1.6206, - "step": 32830 - }, - { - "epoch": 1.05088, - "grad_norm": 45.282310485839844, - "learning_rate": 1.0545777777777779e-05, - "loss": 1.6371, - "step": 32840 - }, - { - "epoch": 1.0512, - "grad_norm": 44.07647705078125, - "learning_rate": 1.0542222222222223e-05, - "loss": 1.6001, - "step": 32850 - }, - { - "epoch": 1.05152, - "grad_norm": 42.87550735473633, - "learning_rate": 1.0538666666666668e-05, - "loss": 1.6511, - "step": 32860 - }, - { - "epoch": 1.0518399999999999, - "grad_norm": 44.68036651611328, - "learning_rate": 1.053511111111111e-05, - "loss": 1.6109, - "step": 32870 - }, - { - "epoch": 1.05216, - "grad_norm": 45.50474548339844, - "learning_rate": 1.0531555555555557e-05, - "loss": 1.6316, - "step": 32880 - }, - { - "epoch": 1.05248, - "grad_norm": 45.01093673706055, - "learning_rate": 1.0528e-05, - "loss": 1.6129, - "step": 32890 - }, - { - "epoch": 1.0528, - "grad_norm": 42.464080810546875, - "learning_rate": 1.0524444444444446e-05, - "loss": 1.6246, - "step": 32900 - }, - { - "epoch": 1.05312, - "grad_norm": 44.48116683959961, - "learning_rate": 1.0520888888888889e-05, - "loss": 1.6577, - "step": 32910 - }, - { - "epoch": 1.05344, - "grad_norm": 44.605037689208984, - "learning_rate": 1.0517333333333335e-05, - "loss": 1.654, - "step": 32920 - }, - { - "epoch": 1.05376, - "grad_norm": 43.643428802490234, - "learning_rate": 1.0513777777777778e-05, - "loss": 1.6288, - "step": 32930 - }, - { - "epoch": 1.05408, - "grad_norm": 42.4962272644043, - "learning_rate": 1.0510222222222224e-05, - "loss": 1.6439, - "step": 32940 - }, - { - "epoch": 1.0544, - "grad_norm": 44.38206481933594, - "learning_rate": 1.0506666666666667e-05, - "loss": 1.6318, - "step": 32950 - }, - { - "epoch": 1.05472, - "grad_norm": 42.56404113769531, - "learning_rate": 1.0503111111111113e-05, - "loss": 1.6332, - "step": 32960 - }, - { - "epoch": 1.05504, - "grad_norm": 47.60989761352539, - "learning_rate": 1.0499555555555556e-05, - "loss": 1.6258, - "step": 32970 - }, - { - "epoch": 1.05536, - "grad_norm": 47.522117614746094, - "learning_rate": 1.0496000000000003e-05, - "loss": 1.621, - "step": 32980 - }, - { - "epoch": 1.05568, - "grad_norm": 44.03647232055664, - "learning_rate": 1.0492444444444445e-05, - "loss": 1.6203, - "step": 32990 - }, - { - "epoch": 1.056, - "grad_norm": 42.845619201660156, - "learning_rate": 1.048888888888889e-05, - "loss": 1.5946, - "step": 33000 - }, - { - "epoch": 1.05632, - "grad_norm": 46.014015197753906, - "learning_rate": 1.0485333333333334e-05, - "loss": 1.6354, - "step": 33010 - }, - { - "epoch": 1.05664, - "grad_norm": 45.520111083984375, - "learning_rate": 1.0481777777777779e-05, - "loss": 1.6023, - "step": 33020 - }, - { - "epoch": 1.05696, - "grad_norm": 43.73442840576172, - "learning_rate": 1.0478222222222224e-05, - "loss": 1.6684, - "step": 33030 - }, - { - "epoch": 1.05728, - "grad_norm": 46.191837310791016, - "learning_rate": 1.0474666666666668e-05, - "loss": 1.6118, - "step": 33040 - }, - { - "epoch": 1.0576, - "grad_norm": 46.10258483886719, - "learning_rate": 1.0471111111111113e-05, - "loss": 1.6706, - "step": 33050 - }, - { - "epoch": 1.05792, - "grad_norm": 42.99847412109375, - "learning_rate": 1.0467555555555557e-05, - "loss": 1.6016, - "step": 33060 - }, - { - "epoch": 1.05824, - "grad_norm": 46.8404655456543, - "learning_rate": 1.0464e-05, - "loss": 1.6362, - "step": 33070 - }, - { - "epoch": 1.05856, - "grad_norm": 44.80493927001953, - "learning_rate": 1.0460444444444446e-05, - "loss": 1.644, - "step": 33080 - }, - { - "epoch": 1.05888, - "grad_norm": 45.339271545410156, - "learning_rate": 1.045688888888889e-05, - "loss": 1.6411, - "step": 33090 - }, - { - "epoch": 1.0592, - "grad_norm": 44.27571105957031, - "learning_rate": 1.0453333333333334e-05, - "loss": 1.6183, - "step": 33100 - }, - { - "epoch": 1.05952, - "grad_norm": 45.33119583129883, - "learning_rate": 1.0449777777777778e-05, - "loss": 1.6413, - "step": 33110 - }, - { - "epoch": 1.05984, - "grad_norm": 45.91305160522461, - "learning_rate": 1.0446222222222223e-05, - "loss": 1.6397, - "step": 33120 - }, - { - "epoch": 1.06016, - "grad_norm": 43.498374938964844, - "learning_rate": 1.0442666666666667e-05, - "loss": 1.5971, - "step": 33130 - }, - { - "epoch": 1.06048, - "grad_norm": 44.306419372558594, - "learning_rate": 1.043911111111111e-05, - "loss": 1.6197, - "step": 33140 - }, - { - "epoch": 1.0608, - "grad_norm": 44.896297454833984, - "learning_rate": 1.0435555555555557e-05, - "loss": 1.6165, - "step": 33150 - }, - { - "epoch": 1.06112, - "grad_norm": 45.88273620605469, - "learning_rate": 1.0432e-05, - "loss": 1.6174, - "step": 33160 - }, - { - "epoch": 1.06144, - "grad_norm": 45.505531311035156, - "learning_rate": 1.0428444444444446e-05, - "loss": 1.6441, - "step": 33170 - }, - { - "epoch": 1.06176, - "grad_norm": 46.69767761230469, - "learning_rate": 1.0424888888888888e-05, - "loss": 1.6241, - "step": 33180 - }, - { - "epoch": 1.06208, - "grad_norm": 43.01450729370117, - "learning_rate": 1.0421333333333335e-05, - "loss": 1.6151, - "step": 33190 - }, - { - "epoch": 1.0624, - "grad_norm": 44.89165115356445, - "learning_rate": 1.0417777777777778e-05, - "loss": 1.6233, - "step": 33200 - }, - { - "epoch": 1.06272, - "grad_norm": 44.70990753173828, - "learning_rate": 1.0414222222222224e-05, - "loss": 1.6288, - "step": 33210 - }, - { - "epoch": 1.06304, - "grad_norm": 42.99964141845703, - "learning_rate": 1.0410666666666667e-05, - "loss": 1.655, - "step": 33220 - }, - { - "epoch": 1.06336, - "grad_norm": 42.71984100341797, - "learning_rate": 1.0407111111111113e-05, - "loss": 1.6225, - "step": 33230 - }, - { - "epoch": 1.06368, - "grad_norm": 43.9577751159668, - "learning_rate": 1.0403555555555556e-05, - "loss": 1.5939, - "step": 33240 - }, - { - "epoch": 1.064, - "grad_norm": 43.84825134277344, - "learning_rate": 1.04e-05, - "loss": 1.6543, - "step": 33250 - }, - { - "epoch": 1.06432, - "grad_norm": 44.09382247924805, - "learning_rate": 1.0396444444444445e-05, - "loss": 1.6217, - "step": 33260 - }, - { - "epoch": 1.06464, - "grad_norm": 47.45444869995117, - "learning_rate": 1.039288888888889e-05, - "loss": 1.6282, - "step": 33270 - }, - { - "epoch": 1.06496, - "grad_norm": 43.27195739746094, - "learning_rate": 1.0389333333333334e-05, - "loss": 1.6359, - "step": 33280 - }, - { - "epoch": 1.06528, - "grad_norm": 44.726478576660156, - "learning_rate": 1.0385777777777779e-05, - "loss": 1.6212, - "step": 33290 - }, - { - "epoch": 1.0656, - "grad_norm": 45.426143646240234, - "learning_rate": 1.0382222222222223e-05, - "loss": 1.6219, - "step": 33300 - }, - { - "epoch": 1.06592, - "grad_norm": 45.67955780029297, - "learning_rate": 1.0378666666666668e-05, - "loss": 1.6726, - "step": 33310 - }, - { - "epoch": 1.06624, - "grad_norm": 44.08571243286133, - "learning_rate": 1.037511111111111e-05, - "loss": 1.6149, - "step": 33320 - }, - { - "epoch": 1.06656, - "grad_norm": 45.839317321777344, - "learning_rate": 1.0371555555555557e-05, - "loss": 1.6193, - "step": 33330 - }, - { - "epoch": 1.06688, - "grad_norm": 43.84119415283203, - "learning_rate": 1.0368e-05, - "loss": 1.6141, - "step": 33340 - }, - { - "epoch": 1.0672, - "grad_norm": 43.55615997314453, - "learning_rate": 1.0364444444444446e-05, - "loss": 1.6403, - "step": 33350 - }, - { - "epoch": 1.06752, - "grad_norm": 43.03118133544922, - "learning_rate": 1.0360888888888889e-05, - "loss": 1.6034, - "step": 33360 - }, - { - "epoch": 1.06784, - "grad_norm": 43.8675422668457, - "learning_rate": 1.0357333333333335e-05, - "loss": 1.6393, - "step": 33370 - }, - { - "epoch": 1.06816, - "grad_norm": 43.28858947753906, - "learning_rate": 1.0353777777777778e-05, - "loss": 1.6148, - "step": 33380 - }, - { - "epoch": 1.06848, - "grad_norm": 44.096370697021484, - "learning_rate": 1.0350222222222224e-05, - "loss": 1.6145, - "step": 33390 - }, - { - "epoch": 1.0688, - "grad_norm": 42.880043029785156, - "learning_rate": 1.0346666666666667e-05, - "loss": 1.6401, - "step": 33400 - }, - { - "epoch": 1.06912, - "grad_norm": 42.06776428222656, - "learning_rate": 1.0343111111111113e-05, - "loss": 1.6416, - "step": 33410 - }, - { - "epoch": 1.06944, - "grad_norm": 44.670833587646484, - "learning_rate": 1.0339555555555556e-05, - "loss": 1.6126, - "step": 33420 - }, - { - "epoch": 1.06976, - "grad_norm": 44.110321044921875, - "learning_rate": 1.0336000000000002e-05, - "loss": 1.649, - "step": 33430 - }, - { - "epoch": 1.07008, - "grad_norm": 47.0807991027832, - "learning_rate": 1.0332444444444445e-05, - "loss": 1.637, - "step": 33440 - }, - { - "epoch": 1.0704, - "grad_norm": 45.72868728637695, - "learning_rate": 1.032888888888889e-05, - "loss": 1.6213, - "step": 33450 - }, - { - "epoch": 1.0707200000000001, - "grad_norm": 45.216796875, - "learning_rate": 1.0325333333333334e-05, - "loss": 1.6272, - "step": 33460 - }, - { - "epoch": 1.07104, - "grad_norm": 44.5551872253418, - "learning_rate": 1.0321777777777779e-05, - "loss": 1.6565, - "step": 33470 - }, - { - "epoch": 1.07136, - "grad_norm": 43.27811813354492, - "learning_rate": 1.0318222222222223e-05, - "loss": 1.6362, - "step": 33480 - }, - { - "epoch": 1.07168, - "grad_norm": 44.5125732421875, - "learning_rate": 1.0314666666666668e-05, - "loss": 1.6424, - "step": 33490 - }, - { - "epoch": 1.072, - "grad_norm": 44.89219284057617, - "learning_rate": 1.0311111111111113e-05, - "loss": 1.6211, - "step": 33500 - }, - { - "epoch": 1.07232, - "grad_norm": 43.710697174072266, - "learning_rate": 1.0307555555555557e-05, - "loss": 1.6141, - "step": 33510 - }, - { - "epoch": 1.07264, - "grad_norm": 42.3712158203125, - "learning_rate": 1.0304e-05, - "loss": 1.5936, - "step": 33520 - }, - { - "epoch": 1.07296, - "grad_norm": 44.5188102722168, - "learning_rate": 1.0300444444444446e-05, - "loss": 1.6056, - "step": 33530 - }, - { - "epoch": 1.07328, - "grad_norm": 44.64246368408203, - "learning_rate": 1.0296888888888889e-05, - "loss": 1.6199, - "step": 33540 - }, - { - "epoch": 1.0735999999999999, - "grad_norm": 45.309329986572266, - "learning_rate": 1.0293333333333335e-05, - "loss": 1.6208, - "step": 33550 - }, - { - "epoch": 1.07392, - "grad_norm": 43.31216049194336, - "learning_rate": 1.0289777777777778e-05, - "loss": 1.6197, - "step": 33560 - }, - { - "epoch": 1.07424, - "grad_norm": 43.84577560424805, - "learning_rate": 1.0286222222222224e-05, - "loss": 1.6484, - "step": 33570 - }, - { - "epoch": 1.07456, - "grad_norm": 43.01450729370117, - "learning_rate": 1.0282666666666667e-05, - "loss": 1.6277, - "step": 33580 - }, - { - "epoch": 1.07488, - "grad_norm": 44.55772018432617, - "learning_rate": 1.0279111111111114e-05, - "loss": 1.6333, - "step": 33590 - }, - { - "epoch": 1.0752, - "grad_norm": 43.11758804321289, - "learning_rate": 1.0275555555555556e-05, - "loss": 1.63, - "step": 33600 - }, - { - "epoch": 1.07552, - "grad_norm": 42.40789794921875, - "learning_rate": 1.0272e-05, - "loss": 1.6005, - "step": 33610 - }, - { - "epoch": 1.07584, - "grad_norm": 43.52560043334961, - "learning_rate": 1.0268444444444445e-05, - "loss": 1.6346, - "step": 33620 - }, - { - "epoch": 1.07616, - "grad_norm": 42.63965606689453, - "learning_rate": 1.0264888888888888e-05, - "loss": 1.6006, - "step": 33630 - }, - { - "epoch": 1.07648, - "grad_norm": 44.4262809753418, - "learning_rate": 1.0261333333333335e-05, - "loss": 1.591, - "step": 33640 - }, - { - "epoch": 1.0768, - "grad_norm": 43.22575759887695, - "learning_rate": 1.0257777777777777e-05, - "loss": 1.6324, - "step": 33650 - }, - { - "epoch": 1.07712, - "grad_norm": 44.486385345458984, - "learning_rate": 1.0254222222222224e-05, - "loss": 1.5864, - "step": 33660 - }, - { - "epoch": 1.07744, - "grad_norm": 44.114166259765625, - "learning_rate": 1.0250666666666667e-05, - "loss": 1.6076, - "step": 33670 - }, - { - "epoch": 1.07776, - "grad_norm": 44.422061920166016, - "learning_rate": 1.0247111111111113e-05, - "loss": 1.6249, - "step": 33680 - }, - { - "epoch": 1.07808, - "grad_norm": 44.38449478149414, - "learning_rate": 1.0243555555555556e-05, - "loss": 1.5697, - "step": 33690 - }, - { - "epoch": 1.0784, - "grad_norm": 43.65522766113281, - "learning_rate": 1.024e-05, - "loss": 1.6384, - "step": 33700 - }, - { - "epoch": 1.07872, - "grad_norm": 45.55866241455078, - "learning_rate": 1.0236444444444445e-05, - "loss": 1.6395, - "step": 33710 - }, - { - "epoch": 1.07904, - "grad_norm": 44.74530792236328, - "learning_rate": 1.023288888888889e-05, - "loss": 1.6478, - "step": 33720 - }, - { - "epoch": 1.07936, - "grad_norm": 45.45027542114258, - "learning_rate": 1.0229333333333334e-05, - "loss": 1.6378, - "step": 33730 - }, - { - "epoch": 1.07968, - "grad_norm": 44.05388641357422, - "learning_rate": 1.0225777777777778e-05, - "loss": 1.6558, - "step": 33740 - }, - { - "epoch": 1.08, - "grad_norm": 44.426246643066406, - "learning_rate": 1.0222222222222223e-05, - "loss": 1.6432, - "step": 33750 - }, - { - "epoch": 1.08032, - "grad_norm": 45.299373626708984, - "learning_rate": 1.0218666666666668e-05, - "loss": 1.6445, - "step": 33760 - }, - { - "epoch": 1.08064, - "grad_norm": 45.54659652709961, - "learning_rate": 1.021511111111111e-05, - "loss": 1.6199, - "step": 33770 - }, - { - "epoch": 1.08096, - "grad_norm": 43.4600830078125, - "learning_rate": 1.0211555555555557e-05, - "loss": 1.6035, - "step": 33780 - }, - { - "epoch": 1.08128, - "grad_norm": 45.97517395019531, - "learning_rate": 1.0208e-05, - "loss": 1.6512, - "step": 33790 - }, - { - "epoch": 1.0816, - "grad_norm": 42.87276077270508, - "learning_rate": 1.0204444444444446e-05, - "loss": 1.6324, - "step": 33800 - }, - { - "epoch": 1.08192, - "grad_norm": 45.31087112426758, - "learning_rate": 1.0200888888888889e-05, - "loss": 1.6516, - "step": 33810 - }, - { - "epoch": 1.08224, - "grad_norm": 43.140838623046875, - "learning_rate": 1.0197333333333335e-05, - "loss": 1.6259, - "step": 33820 - }, - { - "epoch": 1.08256, - "grad_norm": 44.9212646484375, - "learning_rate": 1.0193777777777778e-05, - "loss": 1.6139, - "step": 33830 - }, - { - "epoch": 1.08288, - "grad_norm": 45.999183654785156, - "learning_rate": 1.0190222222222224e-05, - "loss": 1.6218, - "step": 33840 - }, - { - "epoch": 1.0832, - "grad_norm": 42.98609161376953, - "learning_rate": 1.0186666666666667e-05, - "loss": 1.6599, - "step": 33850 - }, - { - "epoch": 1.08352, - "grad_norm": 45.387821197509766, - "learning_rate": 1.0183111111111113e-05, - "loss": 1.6251, - "step": 33860 - }, - { - "epoch": 1.08384, - "grad_norm": 44.79252624511719, - "learning_rate": 1.0179555555555556e-05, - "loss": 1.596, - "step": 33870 - }, - { - "epoch": 1.08416, - "grad_norm": 45.170902252197266, - "learning_rate": 1.0176000000000002e-05, - "loss": 1.6124, - "step": 33880 - }, - { - "epoch": 1.08448, - "grad_norm": 43.105857849121094, - "learning_rate": 1.0172444444444445e-05, - "loss": 1.6146, - "step": 33890 - }, - { - "epoch": 1.0848, - "grad_norm": 46.16950225830078, - "learning_rate": 1.016888888888889e-05, - "loss": 1.6163, - "step": 33900 - }, - { - "epoch": 1.08512, - "grad_norm": 43.8795051574707, - "learning_rate": 1.0165333333333334e-05, - "loss": 1.6246, - "step": 33910 - }, - { - "epoch": 1.08544, - "grad_norm": 44.4942626953125, - "learning_rate": 1.0161777777777779e-05, - "loss": 1.629, - "step": 33920 - }, - { - "epoch": 1.08576, - "grad_norm": 44.03514099121094, - "learning_rate": 1.0158222222222223e-05, - "loss": 1.6344, - "step": 33930 - }, - { - "epoch": 1.08608, - "grad_norm": 43.44086837768555, - "learning_rate": 1.0154666666666668e-05, - "loss": 1.6399, - "step": 33940 - }, - { - "epoch": 1.0864, - "grad_norm": 44.283836364746094, - "learning_rate": 1.0151111111111112e-05, - "loss": 1.6237, - "step": 33950 - }, - { - "epoch": 1.08672, - "grad_norm": 42.35454559326172, - "learning_rate": 1.0147555555555557e-05, - "loss": 1.6246, - "step": 33960 - }, - { - "epoch": 1.08704, - "grad_norm": 42.41131591796875, - "learning_rate": 1.0144e-05, - "loss": 1.6454, - "step": 33970 - }, - { - "epoch": 1.0873599999999999, - "grad_norm": 46.35525894165039, - "learning_rate": 1.0140444444444446e-05, - "loss": 1.6325, - "step": 33980 - }, - { - "epoch": 1.08768, - "grad_norm": 44.864994049072266, - "learning_rate": 1.0136888888888889e-05, - "loss": 1.6092, - "step": 33990 - }, - { - "epoch": 1.088, - "grad_norm": 43.37247085571289, - "learning_rate": 1.0133333333333335e-05, - "loss": 1.6148, - "step": 34000 - }, - { - "epoch": 1.08832, - "grad_norm": 43.918304443359375, - "learning_rate": 1.0129777777777778e-05, - "loss": 1.6151, - "step": 34010 - }, - { - "epoch": 1.08864, - "grad_norm": 46.054325103759766, - "learning_rate": 1.0126222222222224e-05, - "loss": 1.6226, - "step": 34020 - }, - { - "epoch": 1.08896, - "grad_norm": 45.775657653808594, - "learning_rate": 1.0122666666666667e-05, - "loss": 1.6358, - "step": 34030 - }, - { - "epoch": 1.08928, - "grad_norm": 43.96372985839844, - "learning_rate": 1.0119111111111113e-05, - "loss": 1.6379, - "step": 34040 - }, - { - "epoch": 1.0896, - "grad_norm": 48.16429138183594, - "learning_rate": 1.0115555555555556e-05, - "loss": 1.6075, - "step": 34050 - }, - { - "epoch": 1.08992, - "grad_norm": 47.51073455810547, - "learning_rate": 1.0112000000000002e-05, - "loss": 1.583, - "step": 34060 - }, - { - "epoch": 1.09024, - "grad_norm": 45.64347457885742, - "learning_rate": 1.0108444444444445e-05, - "loss": 1.6078, - "step": 34070 - }, - { - "epoch": 1.09056, - "grad_norm": 47.757633209228516, - "learning_rate": 1.0104888888888892e-05, - "loss": 1.6254, - "step": 34080 - }, - { - "epoch": 1.09088, - "grad_norm": 43.865596771240234, - "learning_rate": 1.0101333333333334e-05, - "loss": 1.6257, - "step": 34090 - }, - { - "epoch": 1.0912, - "grad_norm": 43.02202224731445, - "learning_rate": 1.0097777777777779e-05, - "loss": 1.6379, - "step": 34100 - }, - { - "epoch": 1.09152, - "grad_norm": 48.95008087158203, - "learning_rate": 1.0094222222222224e-05, - "loss": 1.6293, - "step": 34110 - }, - { - "epoch": 1.09184, - "grad_norm": 44.4239501953125, - "learning_rate": 1.0090666666666666e-05, - "loss": 1.6261, - "step": 34120 - }, - { - "epoch": 1.09216, - "grad_norm": 44.51701736450195, - "learning_rate": 1.0087111111111113e-05, - "loss": 1.6237, - "step": 34130 - }, - { - "epoch": 1.0924800000000001, - "grad_norm": 45.51099395751953, - "learning_rate": 1.0083555555555555e-05, - "loss": 1.598, - "step": 34140 - }, - { - "epoch": 1.0928, - "grad_norm": 44.413673400878906, - "learning_rate": 1.008e-05, - "loss": 1.6479, - "step": 34150 - }, - { - "epoch": 1.09312, - "grad_norm": 44.12715148925781, - "learning_rate": 1.0076444444444445e-05, - "loss": 1.6549, - "step": 34160 - }, - { - "epoch": 1.09344, - "grad_norm": 44.77954864501953, - "learning_rate": 1.007288888888889e-05, - "loss": 1.6036, - "step": 34170 - }, - { - "epoch": 1.09376, - "grad_norm": 43.165225982666016, - "learning_rate": 1.0069333333333334e-05, - "loss": 1.6159, - "step": 34180 - }, - { - "epoch": 1.09408, - "grad_norm": 43.7326545715332, - "learning_rate": 1.0065777777777778e-05, - "loss": 1.6138, - "step": 34190 - }, - { - "epoch": 1.0944, - "grad_norm": 45.99542999267578, - "learning_rate": 1.0062222222222223e-05, - "loss": 1.6264, - "step": 34200 - }, - { - "epoch": 1.09472, - "grad_norm": 45.08587646484375, - "learning_rate": 1.0058666666666667e-05, - "loss": 1.6064, - "step": 34210 - }, - { - "epoch": 1.09504, - "grad_norm": 44.10668182373047, - "learning_rate": 1.005511111111111e-05, - "loss": 1.6188, - "step": 34220 - }, - { - "epoch": 1.09536, - "grad_norm": 45.843345642089844, - "learning_rate": 1.0051555555555556e-05, - "loss": 1.6259, - "step": 34230 - }, - { - "epoch": 1.09568, - "grad_norm": 45.700645446777344, - "learning_rate": 1.0048e-05, - "loss": 1.628, - "step": 34240 - }, - { - "epoch": 1.096, - "grad_norm": 44.80293655395508, - "learning_rate": 1.0044444444444446e-05, - "loss": 1.5932, - "step": 34250 - }, - { - "epoch": 1.09632, - "grad_norm": 43.28338623046875, - "learning_rate": 1.0040888888888888e-05, - "loss": 1.6261, - "step": 34260 - }, - { - "epoch": 1.09664, - "grad_norm": 46.780269622802734, - "learning_rate": 1.0037333333333335e-05, - "loss": 1.6001, - "step": 34270 - }, - { - "epoch": 1.09696, - "grad_norm": 42.56535339355469, - "learning_rate": 1.0033777777777778e-05, - "loss": 1.6386, - "step": 34280 - }, - { - "epoch": 1.09728, - "grad_norm": 45.69676971435547, - "learning_rate": 1.0030222222222224e-05, - "loss": 1.6089, - "step": 34290 - }, - { - "epoch": 1.0976, - "grad_norm": 42.413429260253906, - "learning_rate": 1.0026666666666667e-05, - "loss": 1.6273, - "step": 34300 - }, - { - "epoch": 1.09792, - "grad_norm": 44.377784729003906, - "learning_rate": 1.0023111111111113e-05, - "loss": 1.6301, - "step": 34310 - }, - { - "epoch": 1.09824, - "grad_norm": 44.19623565673828, - "learning_rate": 1.0019555555555556e-05, - "loss": 1.6346, - "step": 34320 - }, - { - "epoch": 1.09856, - "grad_norm": 44.605125427246094, - "learning_rate": 1.0016000000000002e-05, - "loss": 1.6098, - "step": 34330 - }, - { - "epoch": 1.09888, - "grad_norm": 45.71287536621094, - "learning_rate": 1.0012444444444445e-05, - "loss": 1.6236, - "step": 34340 - }, - { - "epoch": 1.0992, - "grad_norm": 44.4366455078125, - "learning_rate": 1.000888888888889e-05, - "loss": 1.6575, - "step": 34350 - }, - { - "epoch": 1.09952, - "grad_norm": 46.797176361083984, - "learning_rate": 1.0005333333333334e-05, - "loss": 1.6285, - "step": 34360 - }, - { - "epoch": 1.09984, - "grad_norm": 42.749549865722656, - "learning_rate": 1.0001777777777779e-05, - "loss": 1.5832, - "step": 34370 - }, - { - "epoch": 1.10016, - "grad_norm": 44.592552185058594, - "learning_rate": 9.998222222222223e-06, - "loss": 1.6181, - "step": 34380 - }, - { - "epoch": 1.10048, - "grad_norm": 44.82821273803711, - "learning_rate": 9.994666666666668e-06, - "loss": 1.6198, - "step": 34390 - }, - { - "epoch": 1.1008, - "grad_norm": 45.68002700805664, - "learning_rate": 9.991111111111112e-06, - "loss": 1.6202, - "step": 34400 - }, - { - "epoch": 1.10112, - "grad_norm": 45.76319122314453, - "learning_rate": 9.987555555555555e-06, - "loss": 1.6217, - "step": 34410 - }, - { - "epoch": 1.10144, - "grad_norm": 45.374473571777344, - "learning_rate": 9.984e-06, - "loss": 1.6254, - "step": 34420 - }, - { - "epoch": 1.10176, - "grad_norm": 46.05216979980469, - "learning_rate": 9.980444444444444e-06, - "loss": 1.6517, - "step": 34430 - }, - { - "epoch": 1.10208, - "grad_norm": 45.405696868896484, - "learning_rate": 9.976888888888889e-06, - "loss": 1.5984, - "step": 34440 - }, - { - "epoch": 1.1024, - "grad_norm": 44.274539947509766, - "learning_rate": 9.973333333333333e-06, - "loss": 1.6108, - "step": 34450 - }, - { - "epoch": 1.10272, - "grad_norm": 43.765960693359375, - "learning_rate": 9.969777777777778e-06, - "loss": 1.6175, - "step": 34460 - }, - { - "epoch": 1.10304, - "grad_norm": 44.908416748046875, - "learning_rate": 9.966222222222222e-06, - "loss": 1.6131, - "step": 34470 - }, - { - "epoch": 1.10336, - "grad_norm": 45.51877975463867, - "learning_rate": 9.962666666666667e-06, - "loss": 1.6011, - "step": 34480 - }, - { - "epoch": 1.10368, - "grad_norm": 43.720340728759766, - "learning_rate": 9.959111111111111e-06, - "loss": 1.6129, - "step": 34490 - }, - { - "epoch": 1.104, - "grad_norm": 44.68758773803711, - "learning_rate": 9.955555555555556e-06, - "loss": 1.6381, - "step": 34500 - }, - { - "epoch": 1.10432, - "grad_norm": 43.98762512207031, - "learning_rate": 9.952e-06, - "loss": 1.6444, - "step": 34510 - }, - { - "epoch": 1.10464, - "grad_norm": 44.053226470947266, - "learning_rate": 9.948444444444445e-06, - "loss": 1.6195, - "step": 34520 - }, - { - "epoch": 1.10496, - "grad_norm": 44.89874267578125, - "learning_rate": 9.94488888888889e-06, - "loss": 1.6158, - "step": 34530 - }, - { - "epoch": 1.10528, - "grad_norm": 45.770843505859375, - "learning_rate": 9.941333333333334e-06, - "loss": 1.6134, - "step": 34540 - }, - { - "epoch": 1.1056, - "grad_norm": 43.45571517944336, - "learning_rate": 9.937777777777779e-06, - "loss": 1.6221, - "step": 34550 - }, - { - "epoch": 1.10592, - "grad_norm": 44.796852111816406, - "learning_rate": 9.934222222222223e-06, - "loss": 1.6123, - "step": 34560 - }, - { - "epoch": 1.1062400000000001, - "grad_norm": 44.354251861572266, - "learning_rate": 9.930666666666668e-06, - "loss": 1.5849, - "step": 34570 - }, - { - "epoch": 1.10656, - "grad_norm": 44.47141647338867, - "learning_rate": 9.927111111111112e-06, - "loss": 1.6325, - "step": 34580 - }, - { - "epoch": 1.10688, - "grad_norm": 44.321067810058594, - "learning_rate": 9.923555555555557e-06, - "loss": 1.6389, - "step": 34590 - }, - { - "epoch": 1.1072, - "grad_norm": 44.76230239868164, - "learning_rate": 9.920000000000002e-06, - "loss": 1.6257, - "step": 34600 - }, - { - "epoch": 1.10752, - "grad_norm": 44.10141372680664, - "learning_rate": 9.916444444444444e-06, - "loss": 1.6173, - "step": 34610 - }, - { - "epoch": 1.10784, - "grad_norm": 45.80775833129883, - "learning_rate": 9.912888888888889e-06, - "loss": 1.6211, - "step": 34620 - }, - { - "epoch": 1.10816, - "grad_norm": 46.45954895019531, - "learning_rate": 9.909333333333334e-06, - "loss": 1.6053, - "step": 34630 - }, - { - "epoch": 1.10848, - "grad_norm": 42.96110916137695, - "learning_rate": 9.905777777777778e-06, - "loss": 1.6146, - "step": 34640 - }, - { - "epoch": 1.1088, - "grad_norm": 45.49494934082031, - "learning_rate": 9.902222222222223e-06, - "loss": 1.5967, - "step": 34650 - }, - { - "epoch": 1.1091199999999999, - "grad_norm": 43.099300384521484, - "learning_rate": 9.898666666666667e-06, - "loss": 1.6203, - "step": 34660 - }, - { - "epoch": 1.10944, - "grad_norm": 42.93831253051758, - "learning_rate": 9.895111111111112e-06, - "loss": 1.6078, - "step": 34670 - }, - { - "epoch": 1.10976, - "grad_norm": 43.761878967285156, - "learning_rate": 9.891555555555556e-06, - "loss": 1.6117, - "step": 34680 - }, - { - "epoch": 1.11008, - "grad_norm": 44.20676040649414, - "learning_rate": 9.888000000000001e-06, - "loss": 1.6246, - "step": 34690 - }, - { - "epoch": 1.1104, - "grad_norm": 43.08485412597656, - "learning_rate": 9.884444444444445e-06, - "loss": 1.6179, - "step": 34700 - }, - { - "epoch": 1.11072, - "grad_norm": 45.42795181274414, - "learning_rate": 9.88088888888889e-06, - "loss": 1.6055, - "step": 34710 - }, - { - "epoch": 1.11104, - "grad_norm": 46.919559478759766, - "learning_rate": 9.877333333333335e-06, - "loss": 1.6061, - "step": 34720 - }, - { - "epoch": 1.11136, - "grad_norm": 46.053768157958984, - "learning_rate": 9.873777777777779e-06, - "loss": 1.5963, - "step": 34730 - }, - { - "epoch": 1.11168, - "grad_norm": 44.76418685913086, - "learning_rate": 9.870222222222224e-06, - "loss": 1.6167, - "step": 34740 - }, - { - "epoch": 1.112, - "grad_norm": 44.72624969482422, - "learning_rate": 9.866666666666668e-06, - "loss": 1.6254, - "step": 34750 - }, - { - "epoch": 1.11232, - "grad_norm": 43.534324645996094, - "learning_rate": 9.863111111111113e-06, - "loss": 1.6042, - "step": 34760 - }, - { - "epoch": 1.11264, - "grad_norm": 45.52174377441406, - "learning_rate": 9.859555555555557e-06, - "loss": 1.6191, - "step": 34770 - }, - { - "epoch": 1.11296, - "grad_norm": 44.6978645324707, - "learning_rate": 9.856000000000002e-06, - "loss": 1.6425, - "step": 34780 - }, - { - "epoch": 1.11328, - "grad_norm": 44.247283935546875, - "learning_rate": 9.852444444444446e-06, - "loss": 1.6097, - "step": 34790 - }, - { - "epoch": 1.1136, - "grad_norm": 44.1110725402832, - "learning_rate": 9.84888888888889e-06, - "loss": 1.6406, - "step": 34800 - }, - { - "epoch": 1.11392, - "grad_norm": 44.1603889465332, - "learning_rate": 9.845333333333334e-06, - "loss": 1.6166, - "step": 34810 - }, - { - "epoch": 1.11424, - "grad_norm": 46.36598205566406, - "learning_rate": 9.841777777777778e-06, - "loss": 1.6222, - "step": 34820 - }, - { - "epoch": 1.11456, - "grad_norm": 42.831356048583984, - "learning_rate": 9.838222222222223e-06, - "loss": 1.6166, - "step": 34830 - }, - { - "epoch": 1.11488, - "grad_norm": 44.336055755615234, - "learning_rate": 9.834666666666667e-06, - "loss": 1.6025, - "step": 34840 - }, - { - "epoch": 1.1152, - "grad_norm": 45.25812911987305, - "learning_rate": 9.831111111111112e-06, - "loss": 1.6351, - "step": 34850 - }, - { - "epoch": 1.11552, - "grad_norm": 44.683589935302734, - "learning_rate": 9.827555555555557e-06, - "loss": 1.6015, - "step": 34860 - }, - { - "epoch": 1.11584, - "grad_norm": 47.49604797363281, - "learning_rate": 9.824000000000001e-06, - "loss": 1.6258, - "step": 34870 - }, - { - "epoch": 1.11616, - "grad_norm": 43.392303466796875, - "learning_rate": 9.820444444444446e-06, - "loss": 1.6146, - "step": 34880 - }, - { - "epoch": 1.11648, - "grad_norm": 44.247188568115234, - "learning_rate": 9.81688888888889e-06, - "loss": 1.6411, - "step": 34890 - }, - { - "epoch": 1.1168, - "grad_norm": 46.70269012451172, - "learning_rate": 9.813333333333333e-06, - "loss": 1.6291, - "step": 34900 - }, - { - "epoch": 1.11712, - "grad_norm": 44.9300537109375, - "learning_rate": 9.809777777777778e-06, - "loss": 1.6169, - "step": 34910 - }, - { - "epoch": 1.11744, - "grad_norm": 44.363807678222656, - "learning_rate": 9.806222222222222e-06, - "loss": 1.6618, - "step": 34920 - }, - { - "epoch": 1.11776, - "grad_norm": 46.45115661621094, - "learning_rate": 9.802666666666667e-06, - "loss": 1.6276, - "step": 34930 - }, - { - "epoch": 1.11808, - "grad_norm": 44.15941619873047, - "learning_rate": 9.799111111111111e-06, - "loss": 1.5878, - "step": 34940 - }, - { - "epoch": 1.1184, - "grad_norm": 46.215606689453125, - "learning_rate": 9.795555555555556e-06, - "loss": 1.642, - "step": 34950 - }, - { - "epoch": 1.11872, - "grad_norm": 42.38996887207031, - "learning_rate": 9.792e-06, - "loss": 1.5918, - "step": 34960 - }, - { - "epoch": 1.11904, - "grad_norm": 45.36106491088867, - "learning_rate": 9.788444444444445e-06, - "loss": 1.5951, - "step": 34970 - }, - { - "epoch": 1.11936, - "grad_norm": 42.64955520629883, - "learning_rate": 9.78488888888889e-06, - "loss": 1.5968, - "step": 34980 - }, - { - "epoch": 1.11968, - "grad_norm": 43.310054779052734, - "learning_rate": 9.781333333333334e-06, - "loss": 1.6062, - "step": 34990 - }, - { - "epoch": 1.12, - "grad_norm": 45.69913101196289, - "learning_rate": 9.777777777777779e-06, - "loss": 1.6475, - "step": 35000 - }, - { - "epoch": 1.12032, - "grad_norm": 45.340972900390625, - "learning_rate": 9.774222222222223e-06, - "loss": 1.6389, - "step": 35010 - }, - { - "epoch": 1.12064, - "grad_norm": 43.69623947143555, - "learning_rate": 9.770666666666668e-06, - "loss": 1.6071, - "step": 35020 - }, - { - "epoch": 1.12096, - "grad_norm": 43.38682174682617, - "learning_rate": 9.767111111111112e-06, - "loss": 1.6332, - "step": 35030 - }, - { - "epoch": 1.12128, - "grad_norm": 44.497440338134766, - "learning_rate": 9.763555555555557e-06, - "loss": 1.6454, - "step": 35040 - }, - { - "epoch": 1.1216, - "grad_norm": 43.34943389892578, - "learning_rate": 9.760000000000001e-06, - "loss": 1.6254, - "step": 35050 - }, - { - "epoch": 1.12192, - "grad_norm": 45.34946060180664, - "learning_rate": 9.756444444444444e-06, - "loss": 1.638, - "step": 35060 - }, - { - "epoch": 1.12224, - "grad_norm": 46.76422882080078, - "learning_rate": 9.752888888888889e-06, - "loss": 1.6248, - "step": 35070 - }, - { - "epoch": 1.12256, - "grad_norm": 43.90962219238281, - "learning_rate": 9.749333333333333e-06, - "loss": 1.6249, - "step": 35080 - }, - { - "epoch": 1.12288, - "grad_norm": 45.085994720458984, - "learning_rate": 9.745777777777778e-06, - "loss": 1.6255, - "step": 35090 - }, - { - "epoch": 1.1232, - "grad_norm": 46.66875457763672, - "learning_rate": 9.742222222222222e-06, - "loss": 1.6219, - "step": 35100 - }, - { - "epoch": 1.12352, - "grad_norm": 46.58740234375, - "learning_rate": 9.738666666666667e-06, - "loss": 1.6079, - "step": 35110 - }, - { - "epoch": 1.12384, - "grad_norm": 45.570945739746094, - "learning_rate": 9.735111111111112e-06, - "loss": 1.629, - "step": 35120 - }, - { - "epoch": 1.12416, - "grad_norm": 43.049095153808594, - "learning_rate": 9.731555555555556e-06, - "loss": 1.608, - "step": 35130 - }, - { - "epoch": 1.12448, - "grad_norm": 45.4088020324707, - "learning_rate": 9.728e-06, - "loss": 1.5962, - "step": 35140 - }, - { - "epoch": 1.1248, - "grad_norm": 45.11723709106445, - "learning_rate": 9.724444444444445e-06, - "loss": 1.6141, - "step": 35150 - }, - { - "epoch": 1.12512, - "grad_norm": 44.503482818603516, - "learning_rate": 9.72088888888889e-06, - "loss": 1.643, - "step": 35160 - }, - { - "epoch": 1.12544, - "grad_norm": 42.30671310424805, - "learning_rate": 9.717333333333334e-06, - "loss": 1.6232, - "step": 35170 - }, - { - "epoch": 1.12576, - "grad_norm": 44.18793869018555, - "learning_rate": 9.713777777777779e-06, - "loss": 1.6206, - "step": 35180 - }, - { - "epoch": 1.12608, - "grad_norm": 44.02192687988281, - "learning_rate": 9.710222222222223e-06, - "loss": 1.6226, - "step": 35190 - }, - { - "epoch": 1.1264, - "grad_norm": 42.9251823425293, - "learning_rate": 9.706666666666668e-06, - "loss": 1.6467, - "step": 35200 - }, - { - "epoch": 1.12672, - "grad_norm": 43.18367385864258, - "learning_rate": 9.703111111111113e-06, - "loss": 1.6253, - "step": 35210 - }, - { - "epoch": 1.12704, - "grad_norm": 44.549320220947266, - "learning_rate": 9.699555555555557e-06, - "loss": 1.6039, - "step": 35220 - }, - { - "epoch": 1.12736, - "grad_norm": 45.207427978515625, - "learning_rate": 9.696000000000002e-06, - "loss": 1.6157, - "step": 35230 - }, - { - "epoch": 1.12768, - "grad_norm": 44.419551849365234, - "learning_rate": 9.692444444444446e-06, - "loss": 1.6288, - "step": 35240 - }, - { - "epoch": 1.1280000000000001, - "grad_norm": 47.223838806152344, - "learning_rate": 9.688888888888889e-06, - "loss": 1.6373, - "step": 35250 - }, - { - "epoch": 1.12832, - "grad_norm": 44.445526123046875, - "learning_rate": 9.685333333333334e-06, - "loss": 1.6128, - "step": 35260 - }, - { - "epoch": 1.12864, - "grad_norm": 42.873783111572266, - "learning_rate": 9.681777777777778e-06, - "loss": 1.624, - "step": 35270 - }, - { - "epoch": 1.12896, - "grad_norm": 43.351097106933594, - "learning_rate": 9.678222222222223e-06, - "loss": 1.606, - "step": 35280 - }, - { - "epoch": 1.12928, - "grad_norm": 42.88664245605469, - "learning_rate": 9.674666666666667e-06, - "loss": 1.5858, - "step": 35290 - }, - { - "epoch": 1.1296, - "grad_norm": 46.03646469116211, - "learning_rate": 9.671111111111112e-06, - "loss": 1.6352, - "step": 35300 - }, - { - "epoch": 1.12992, - "grad_norm": 43.69074630737305, - "learning_rate": 9.667555555555556e-06, - "loss": 1.6128, - "step": 35310 - }, - { - "epoch": 1.13024, - "grad_norm": 44.54518127441406, - "learning_rate": 9.664000000000001e-06, - "loss": 1.5902, - "step": 35320 - }, - { - "epoch": 1.13056, - "grad_norm": 43.998966217041016, - "learning_rate": 9.660444444444446e-06, - "loss": 1.5957, - "step": 35330 - }, - { - "epoch": 1.1308799999999999, - "grad_norm": 44.25476837158203, - "learning_rate": 9.65688888888889e-06, - "loss": 1.6201, - "step": 35340 - }, - { - "epoch": 1.1312, - "grad_norm": 43.57674026489258, - "learning_rate": 9.653333333333335e-06, - "loss": 1.6227, - "step": 35350 - }, - { - "epoch": 1.13152, - "grad_norm": 42.928321838378906, - "learning_rate": 9.64977777777778e-06, - "loss": 1.5962, - "step": 35360 - }, - { - "epoch": 1.13184, - "grad_norm": 46.871498107910156, - "learning_rate": 9.646222222222224e-06, - "loss": 1.5984, - "step": 35370 - }, - { - "epoch": 1.13216, - "grad_norm": 44.98406219482422, - "learning_rate": 9.642666666666668e-06, - "loss": 1.561, - "step": 35380 - }, - { - "epoch": 1.13248, - "grad_norm": 43.02797317504883, - "learning_rate": 9.639111111111113e-06, - "loss": 1.6324, - "step": 35390 - }, - { - "epoch": 1.1328, - "grad_norm": 47.9857292175293, - "learning_rate": 9.635555555555557e-06, - "loss": 1.6074, - "step": 35400 - }, - { - "epoch": 1.13312, - "grad_norm": 45.18562316894531, - "learning_rate": 9.632e-06, - "loss": 1.6103, - "step": 35410 - }, - { - "epoch": 1.13344, - "grad_norm": 45.36076736450195, - "learning_rate": 9.628444444444445e-06, - "loss": 1.6141, - "step": 35420 - }, - { - "epoch": 1.13376, - "grad_norm": 44.99217987060547, - "learning_rate": 9.62488888888889e-06, - "loss": 1.65, - "step": 35430 - }, - { - "epoch": 1.13408, - "grad_norm": 43.21048355102539, - "learning_rate": 9.621333333333334e-06, - "loss": 1.6333, - "step": 35440 - }, - { - "epoch": 1.1344, - "grad_norm": 46.94039535522461, - "learning_rate": 9.617777777777778e-06, - "loss": 1.6215, - "step": 35450 - }, - { - "epoch": 1.13472, - "grad_norm": 45.81171798706055, - "learning_rate": 9.614222222222223e-06, - "loss": 1.6278, - "step": 35460 - }, - { - "epoch": 1.13504, - "grad_norm": 42.52925109863281, - "learning_rate": 9.610666666666668e-06, - "loss": 1.6303, - "step": 35470 - }, - { - "epoch": 1.13536, - "grad_norm": 46.01049041748047, - "learning_rate": 9.607111111111112e-06, - "loss": 1.6218, - "step": 35480 - }, - { - "epoch": 1.13568, - "grad_norm": 43.44357681274414, - "learning_rate": 9.603555555555557e-06, - "loss": 1.5914, - "step": 35490 - }, - { - "epoch": 1.1360000000000001, - "grad_norm": 44.107540130615234, - "learning_rate": 9.600000000000001e-06, - "loss": 1.6313, - "step": 35500 - }, - { - "epoch": 1.13632, - "grad_norm": 47.5319709777832, - "learning_rate": 9.596444444444444e-06, - "loss": 1.6151, - "step": 35510 - }, - { - "epoch": 1.13664, - "grad_norm": 46.17417526245117, - "learning_rate": 9.592888888888889e-06, - "loss": 1.6174, - "step": 35520 - }, - { - "epoch": 1.13696, - "grad_norm": 47.07992172241211, - "learning_rate": 9.589333333333333e-06, - "loss": 1.6122, - "step": 35530 - }, - { - "epoch": 1.13728, - "grad_norm": 44.27360534667969, - "learning_rate": 9.585777777777778e-06, - "loss": 1.6238, - "step": 35540 - }, - { - "epoch": 1.1376, - "grad_norm": 45.99604797363281, - "learning_rate": 9.582222222222222e-06, - "loss": 1.619, - "step": 35550 - }, - { - "epoch": 1.13792, - "grad_norm": 47.591575622558594, - "learning_rate": 9.578666666666667e-06, - "loss": 1.6273, - "step": 35560 - }, - { - "epoch": 1.13824, - "grad_norm": 44.06424331665039, - "learning_rate": 9.575111111111111e-06, - "loss": 1.6236, - "step": 35570 - }, - { - "epoch": 1.13856, - "grad_norm": 43.544307708740234, - "learning_rate": 9.571555555555556e-06, - "loss": 1.6153, - "step": 35580 - }, - { - "epoch": 1.13888, - "grad_norm": 43.172245025634766, - "learning_rate": 9.568e-06, - "loss": 1.6304, - "step": 35590 - }, - { - "epoch": 1.1392, - "grad_norm": 43.66238021850586, - "learning_rate": 9.564444444444445e-06, - "loss": 1.5803, - "step": 35600 - }, - { - "epoch": 1.13952, - "grad_norm": 44.61842346191406, - "learning_rate": 9.56088888888889e-06, - "loss": 1.6163, - "step": 35610 - }, - { - "epoch": 1.13984, - "grad_norm": 43.94649887084961, - "learning_rate": 9.557333333333334e-06, - "loss": 1.6256, - "step": 35620 - }, - { - "epoch": 1.14016, - "grad_norm": 44.97169494628906, - "learning_rate": 9.553777777777779e-06, - "loss": 1.6265, - "step": 35630 - }, - { - "epoch": 1.14048, - "grad_norm": 42.63601303100586, - "learning_rate": 9.550222222222223e-06, - "loss": 1.6296, - "step": 35640 - }, - { - "epoch": 1.1408, - "grad_norm": 43.537925720214844, - "learning_rate": 9.546666666666668e-06, - "loss": 1.6142, - "step": 35650 - }, - { - "epoch": 1.14112, - "grad_norm": 43.652381896972656, - "learning_rate": 9.543111111111112e-06, - "loss": 1.6481, - "step": 35660 - }, - { - "epoch": 1.14144, - "grad_norm": 43.40591812133789, - "learning_rate": 9.539555555555557e-06, - "loss": 1.6477, - "step": 35670 - }, - { - "epoch": 1.14176, - "grad_norm": 44.605796813964844, - "learning_rate": 9.536000000000002e-06, - "loss": 1.6217, - "step": 35680 - }, - { - "epoch": 1.14208, - "grad_norm": 44.9923095703125, - "learning_rate": 9.532444444444446e-06, - "loss": 1.6236, - "step": 35690 - }, - { - "epoch": 1.1424, - "grad_norm": 45.73491668701172, - "learning_rate": 9.528888888888889e-06, - "loss": 1.6207, - "step": 35700 - }, - { - "epoch": 1.14272, - "grad_norm": 43.9902458190918, - "learning_rate": 9.525333333333334e-06, - "loss": 1.6332, - "step": 35710 - }, - { - "epoch": 1.14304, - "grad_norm": 44.50312805175781, - "learning_rate": 9.521777777777778e-06, - "loss": 1.6095, - "step": 35720 - }, - { - "epoch": 1.14336, - "grad_norm": 42.85441970825195, - "learning_rate": 9.518222222222223e-06, - "loss": 1.6086, - "step": 35730 - }, - { - "epoch": 1.14368, - "grad_norm": 43.633811950683594, - "learning_rate": 9.514666666666667e-06, - "loss": 1.6195, - "step": 35740 - }, - { - "epoch": 1.144, - "grad_norm": 44.819847106933594, - "learning_rate": 9.511111111111112e-06, - "loss": 1.6025, - "step": 35750 - }, - { - "epoch": 1.14432, - "grad_norm": 42.97698211669922, - "learning_rate": 9.507555555555556e-06, - "loss": 1.6075, - "step": 35760 - }, - { - "epoch": 1.1446399999999999, - "grad_norm": 42.89186477661133, - "learning_rate": 9.504e-06, - "loss": 1.604, - "step": 35770 - }, - { - "epoch": 1.14496, - "grad_norm": 44.783382415771484, - "learning_rate": 9.500444444444445e-06, - "loss": 1.642, - "step": 35780 - }, - { - "epoch": 1.14528, - "grad_norm": 46.417877197265625, - "learning_rate": 9.49688888888889e-06, - "loss": 1.6321, - "step": 35790 - }, - { - "epoch": 1.1456, - "grad_norm": 45.94801712036133, - "learning_rate": 9.493333333333334e-06, - "loss": 1.6116, - "step": 35800 - }, - { - "epoch": 1.14592, - "grad_norm": 45.10016632080078, - "learning_rate": 9.489777777777779e-06, - "loss": 1.6527, - "step": 35810 - }, - { - "epoch": 1.14624, - "grad_norm": 43.96493911743164, - "learning_rate": 9.486222222222224e-06, - "loss": 1.6229, - "step": 35820 - }, - { - "epoch": 1.14656, - "grad_norm": 46.303653717041016, - "learning_rate": 9.482666666666668e-06, - "loss": 1.6128, - "step": 35830 - }, - { - "epoch": 1.14688, - "grad_norm": 43.05647277832031, - "learning_rate": 9.479111111111113e-06, - "loss": 1.6246, - "step": 35840 - }, - { - "epoch": 1.1472, - "grad_norm": 43.26482391357422, - "learning_rate": 9.475555555555557e-06, - "loss": 1.617, - "step": 35850 - }, - { - "epoch": 1.14752, - "grad_norm": 46.7188720703125, - "learning_rate": 9.472000000000002e-06, - "loss": 1.6311, - "step": 35860 - }, - { - "epoch": 1.14784, - "grad_norm": 44.45231246948242, - "learning_rate": 9.468444444444446e-06, - "loss": 1.6216, - "step": 35870 - }, - { - "epoch": 1.14816, - "grad_norm": 44.6671028137207, - "learning_rate": 9.464888888888891e-06, - "loss": 1.6041, - "step": 35880 - }, - { - "epoch": 1.14848, - "grad_norm": 44.429935455322266, - "learning_rate": 9.461333333333334e-06, - "loss": 1.6353, - "step": 35890 - }, - { - "epoch": 1.1488, - "grad_norm": 45.848201751708984, - "learning_rate": 9.457777777777778e-06, - "loss": 1.6308, - "step": 35900 - }, - { - "epoch": 1.14912, - "grad_norm": 48.60066604614258, - "learning_rate": 9.454222222222223e-06, - "loss": 1.6377, - "step": 35910 - }, - { - "epoch": 1.14944, - "grad_norm": 43.88365173339844, - "learning_rate": 9.450666666666667e-06, - "loss": 1.6467, - "step": 35920 - }, - { - "epoch": 1.1497600000000001, - "grad_norm": 45.288902282714844, - "learning_rate": 9.447111111111112e-06, - "loss": 1.6111, - "step": 35930 - }, - { - "epoch": 1.15008, - "grad_norm": 44.7678337097168, - "learning_rate": 9.443555555555557e-06, - "loss": 1.624, - "step": 35940 - }, - { - "epoch": 1.1504, - "grad_norm": 42.77717971801758, - "learning_rate": 9.440000000000001e-06, - "loss": 1.6127, - "step": 35950 - }, - { - "epoch": 1.15072, - "grad_norm": 44.70463943481445, - "learning_rate": 9.436444444444444e-06, - "loss": 1.6006, - "step": 35960 - }, - { - "epoch": 1.15104, - "grad_norm": 45.02092742919922, - "learning_rate": 9.432888888888889e-06, - "loss": 1.6308, - "step": 35970 - }, - { - "epoch": 1.15136, - "grad_norm": 43.64936065673828, - "learning_rate": 9.429333333333333e-06, - "loss": 1.6191, - "step": 35980 - }, - { - "epoch": 1.15168, - "grad_norm": 46.581573486328125, - "learning_rate": 9.425777777777778e-06, - "loss": 1.6276, - "step": 35990 - }, - { - "epoch": 1.152, - "grad_norm": 43.60871887207031, - "learning_rate": 9.422222222222222e-06, - "loss": 1.6308, - "step": 36000 - }, - { - "epoch": 1.15232, - "grad_norm": 46.670352935791016, - "learning_rate": 9.418666666666667e-06, - "loss": 1.6315, - "step": 36010 - }, - { - "epoch": 1.1526399999999999, - "grad_norm": 46.51318359375, - "learning_rate": 9.415111111111111e-06, - "loss": 1.6134, - "step": 36020 - }, - { - "epoch": 1.15296, - "grad_norm": 44.41347122192383, - "learning_rate": 9.411555555555556e-06, - "loss": 1.6124, - "step": 36030 - }, - { - "epoch": 1.15328, - "grad_norm": 45.49982833862305, - "learning_rate": 9.408e-06, - "loss": 1.6189, - "step": 36040 - }, - { - "epoch": 1.1536, - "grad_norm": 43.75909423828125, - "learning_rate": 9.404444444444445e-06, - "loss": 1.593, - "step": 36050 - }, - { - "epoch": 1.15392, - "grad_norm": 45.35482406616211, - "learning_rate": 9.40088888888889e-06, - "loss": 1.6289, - "step": 36060 - }, - { - "epoch": 1.15424, - "grad_norm": 44.05399703979492, - "learning_rate": 9.397333333333334e-06, - "loss": 1.6319, - "step": 36070 - }, - { - "epoch": 1.15456, - "grad_norm": 44.33708953857422, - "learning_rate": 9.393777777777779e-06, - "loss": 1.642, - "step": 36080 - }, - { - "epoch": 1.15488, - "grad_norm": 43.19767761230469, - "learning_rate": 9.390222222222223e-06, - "loss": 1.5925, - "step": 36090 - }, - { - "epoch": 1.1552, - "grad_norm": 55.544673919677734, - "learning_rate": 9.386666666666668e-06, - "loss": 1.5988, - "step": 36100 - }, - { - "epoch": 1.15552, - "grad_norm": 43.77720260620117, - "learning_rate": 9.383111111111112e-06, - "loss": 1.6369, - "step": 36110 - }, - { - "epoch": 1.15584, - "grad_norm": 44.2166748046875, - "learning_rate": 9.379555555555557e-06, - "loss": 1.6292, - "step": 36120 - }, - { - "epoch": 1.15616, - "grad_norm": 44.4605598449707, - "learning_rate": 9.376000000000001e-06, - "loss": 1.6638, - "step": 36130 - }, - { - "epoch": 1.15648, - "grad_norm": 43.97906494140625, - "learning_rate": 9.372444444444446e-06, - "loss": 1.6261, - "step": 36140 - }, - { - "epoch": 1.1568, - "grad_norm": 42.21242141723633, - "learning_rate": 9.368888888888889e-06, - "loss": 1.6303, - "step": 36150 - }, - { - "epoch": 1.15712, - "grad_norm": 45.19140625, - "learning_rate": 9.365333333333333e-06, - "loss": 1.6001, - "step": 36160 - }, - { - "epoch": 1.15744, - "grad_norm": 45.53489685058594, - "learning_rate": 9.361777777777778e-06, - "loss": 1.6404, - "step": 36170 - }, - { - "epoch": 1.1577600000000001, - "grad_norm": 48.85858917236328, - "learning_rate": 9.358222222222222e-06, - "loss": 1.6627, - "step": 36180 - }, - { - "epoch": 1.15808, - "grad_norm": 44.05198669433594, - "learning_rate": 9.354666666666667e-06, - "loss": 1.645, - "step": 36190 - }, - { - "epoch": 1.1584, - "grad_norm": 44.831424713134766, - "learning_rate": 9.351111111111112e-06, - "loss": 1.6328, - "step": 36200 - }, - { - "epoch": 1.15872, - "grad_norm": 43.89720153808594, - "learning_rate": 9.347555555555556e-06, - "loss": 1.6223, - "step": 36210 - }, - { - "epoch": 1.15904, - "grad_norm": 43.87096405029297, - "learning_rate": 9.344e-06, - "loss": 1.6305, - "step": 36220 - }, - { - "epoch": 1.15936, - "grad_norm": 45.65452194213867, - "learning_rate": 9.340444444444445e-06, - "loss": 1.6264, - "step": 36230 - }, - { - "epoch": 1.15968, - "grad_norm": 45.44213104248047, - "learning_rate": 9.33688888888889e-06, - "loss": 1.6148, - "step": 36240 - }, - { - "epoch": 1.16, - "grad_norm": 43.852787017822266, - "learning_rate": 9.333333333333334e-06, - "loss": 1.6007, - "step": 36250 - }, - { - "epoch": 1.16032, - "grad_norm": 42.73979568481445, - "learning_rate": 9.329777777777779e-06, - "loss": 1.6034, - "step": 36260 - }, - { - "epoch": 1.16064, - "grad_norm": 44.16573715209961, - "learning_rate": 9.326222222222223e-06, - "loss": 1.5928, - "step": 36270 - }, - { - "epoch": 1.16096, - "grad_norm": 44.12287139892578, - "learning_rate": 9.322666666666668e-06, - "loss": 1.6128, - "step": 36280 - }, - { - "epoch": 1.16128, - "grad_norm": 46.16853713989258, - "learning_rate": 9.319111111111113e-06, - "loss": 1.6232, - "step": 36290 - }, - { - "epoch": 1.1616, - "grad_norm": 44.73285675048828, - "learning_rate": 9.315555555555557e-06, - "loss": 1.6311, - "step": 36300 - }, - { - "epoch": 1.16192, - "grad_norm": 44.60889434814453, - "learning_rate": 9.312000000000002e-06, - "loss": 1.5997, - "step": 36310 - }, - { - "epoch": 1.16224, - "grad_norm": 42.61589050292969, - "learning_rate": 9.308444444444446e-06, - "loss": 1.5993, - "step": 36320 - }, - { - "epoch": 1.16256, - "grad_norm": 43.2819938659668, - "learning_rate": 9.30488888888889e-06, - "loss": 1.6166, - "step": 36330 - }, - { - "epoch": 1.16288, - "grad_norm": 43.691932678222656, - "learning_rate": 9.301333333333334e-06, - "loss": 1.6158, - "step": 36340 - }, - { - "epoch": 1.1632, - "grad_norm": 45.57815170288086, - "learning_rate": 9.297777777777778e-06, - "loss": 1.6539, - "step": 36350 - }, - { - "epoch": 1.16352, - "grad_norm": 45.618343353271484, - "learning_rate": 9.294222222222223e-06, - "loss": 1.6236, - "step": 36360 - }, - { - "epoch": 1.16384, - "grad_norm": 43.39927291870117, - "learning_rate": 9.290666666666667e-06, - "loss": 1.5714, - "step": 36370 - }, - { - "epoch": 1.16416, - "grad_norm": 43.51185607910156, - "learning_rate": 9.287111111111112e-06, - "loss": 1.6148, - "step": 36380 - }, - { - "epoch": 1.16448, - "grad_norm": 44.672054290771484, - "learning_rate": 9.283555555555556e-06, - "loss": 1.6241, - "step": 36390 - }, - { - "epoch": 1.1648, - "grad_norm": 44.70090103149414, - "learning_rate": 9.280000000000001e-06, - "loss": 1.6138, - "step": 36400 - }, - { - "epoch": 1.16512, - "grad_norm": 46.62152099609375, - "learning_rate": 9.276444444444445e-06, - "loss": 1.6375, - "step": 36410 - }, - { - "epoch": 1.16544, - "grad_norm": 44.284019470214844, - "learning_rate": 9.27288888888889e-06, - "loss": 1.6125, - "step": 36420 - }, - { - "epoch": 1.16576, - "grad_norm": 43.17630386352539, - "learning_rate": 9.269333333333335e-06, - "loss": 1.6245, - "step": 36430 - }, - { - "epoch": 1.16608, - "grad_norm": 44.420249938964844, - "learning_rate": 9.265777777777777e-06, - "loss": 1.6003, - "step": 36440 - }, - { - "epoch": 1.1663999999999999, - "grad_norm": 46.07585525512695, - "learning_rate": 9.262222222222222e-06, - "loss": 1.6219, - "step": 36450 - }, - { - "epoch": 1.16672, - "grad_norm": 45.58720779418945, - "learning_rate": 9.258666666666667e-06, - "loss": 1.6262, - "step": 36460 - }, - { - "epoch": 1.16704, - "grad_norm": 45.04667282104492, - "learning_rate": 9.255111111111111e-06, - "loss": 1.6299, - "step": 36470 - }, - { - "epoch": 1.16736, - "grad_norm": 44.7858772277832, - "learning_rate": 9.251555555555556e-06, - "loss": 1.5922, - "step": 36480 - }, - { - "epoch": 1.16768, - "grad_norm": 45.5151252746582, - "learning_rate": 9.248e-06, - "loss": 1.6394, - "step": 36490 - }, - { - "epoch": 1.168, - "grad_norm": 44.810909271240234, - "learning_rate": 9.244444444444445e-06, - "loss": 1.6145, - "step": 36500 - }, - { - "epoch": 1.16832, - "grad_norm": 44.83076858520508, - "learning_rate": 9.24088888888889e-06, - "loss": 1.6148, - "step": 36510 - }, - { - "epoch": 1.16864, - "grad_norm": 45.17132568359375, - "learning_rate": 9.237333333333334e-06, - "loss": 1.6096, - "step": 36520 - }, - { - "epoch": 1.16896, - "grad_norm": 45.50776290893555, - "learning_rate": 9.233777777777778e-06, - "loss": 1.596, - "step": 36530 - }, - { - "epoch": 1.16928, - "grad_norm": 45.95411682128906, - "learning_rate": 9.230222222222223e-06, - "loss": 1.5717, - "step": 36540 - }, - { - "epoch": 1.1696, - "grad_norm": 44.267269134521484, - "learning_rate": 9.226666666666668e-06, - "loss": 1.6102, - "step": 36550 - }, - { - "epoch": 1.16992, - "grad_norm": 43.271270751953125, - "learning_rate": 9.223111111111112e-06, - "loss": 1.6105, - "step": 36560 - }, - { - "epoch": 1.17024, - "grad_norm": 45.06768798828125, - "learning_rate": 9.219555555555557e-06, - "loss": 1.5963, - "step": 36570 - }, - { - "epoch": 1.17056, - "grad_norm": 43.08013153076172, - "learning_rate": 9.216000000000001e-06, - "loss": 1.634, - "step": 36580 - }, - { - "epoch": 1.17088, - "grad_norm": 44.94239044189453, - "learning_rate": 9.212444444444446e-06, - "loss": 1.6408, - "step": 36590 - }, - { - "epoch": 1.1712, - "grad_norm": 42.683597564697266, - "learning_rate": 9.208888888888889e-06, - "loss": 1.6013, - "step": 36600 - }, - { - "epoch": 1.1715200000000001, - "grad_norm": 43.48340606689453, - "learning_rate": 9.205333333333333e-06, - "loss": 1.6343, - "step": 36610 - }, - { - "epoch": 1.17184, - "grad_norm": 44.20817565917969, - "learning_rate": 9.201777777777778e-06, - "loss": 1.6184, - "step": 36620 - }, - { - "epoch": 1.17216, - "grad_norm": 44.68756103515625, - "learning_rate": 9.198222222222222e-06, - "loss": 1.5928, - "step": 36630 - }, - { - "epoch": 1.17248, - "grad_norm": 45.76731491088867, - "learning_rate": 9.194666666666667e-06, - "loss": 1.6248, - "step": 36640 - }, - { - "epoch": 1.1728, - "grad_norm": 45.566612243652344, - "learning_rate": 9.191111111111111e-06, - "loss": 1.6358, - "step": 36650 - }, - { - "epoch": 1.17312, - "grad_norm": 44.99404525756836, - "learning_rate": 9.187555555555556e-06, - "loss": 1.594, - "step": 36660 - }, - { - "epoch": 1.17344, - "grad_norm": 45.59415817260742, - "learning_rate": 9.184e-06, - "loss": 1.6198, - "step": 36670 - }, - { - "epoch": 1.17376, - "grad_norm": 44.73423767089844, - "learning_rate": 9.180444444444445e-06, - "loss": 1.6289, - "step": 36680 - }, - { - "epoch": 1.17408, - "grad_norm": 42.23807907104492, - "learning_rate": 9.17688888888889e-06, - "loss": 1.6122, - "step": 36690 - }, - { - "epoch": 1.1743999999999999, - "grad_norm": 46.371463775634766, - "learning_rate": 9.173333333333334e-06, - "loss": 1.593, - "step": 36700 - }, - { - "epoch": 1.17472, - "grad_norm": 42.88974380493164, - "learning_rate": 9.169777777777779e-06, - "loss": 1.5967, - "step": 36710 - }, - { - "epoch": 1.17504, - "grad_norm": 45.92631149291992, - "learning_rate": 9.166222222222223e-06, - "loss": 1.5994, - "step": 36720 - }, - { - "epoch": 1.17536, - "grad_norm": 42.9425048828125, - "learning_rate": 9.162666666666668e-06, - "loss": 1.6348, - "step": 36730 - }, - { - "epoch": 1.17568, - "grad_norm": 44.71460723876953, - "learning_rate": 9.159111111111112e-06, - "loss": 1.6139, - "step": 36740 - }, - { - "epoch": 1.176, - "grad_norm": 45.90800476074219, - "learning_rate": 9.155555555555557e-06, - "loss": 1.6034, - "step": 36750 - }, - { - "epoch": 1.17632, - "grad_norm": 45.773597717285156, - "learning_rate": 9.152000000000001e-06, - "loss": 1.6491, - "step": 36760 - }, - { - "epoch": 1.17664, - "grad_norm": 45.82653045654297, - "learning_rate": 9.148444444444446e-06, - "loss": 1.6415, - "step": 36770 - }, - { - "epoch": 1.17696, - "grad_norm": 43.5515022277832, - "learning_rate": 9.14488888888889e-06, - "loss": 1.6281, - "step": 36780 - }, - { - "epoch": 1.17728, - "grad_norm": 43.16986846923828, - "learning_rate": 9.141333333333333e-06, - "loss": 1.6479, - "step": 36790 - }, - { - "epoch": 1.1776, - "grad_norm": 46.149314880371094, - "learning_rate": 9.137777777777778e-06, - "loss": 1.6163, - "step": 36800 - }, - { - "epoch": 1.17792, - "grad_norm": 42.756710052490234, - "learning_rate": 9.134222222222223e-06, - "loss": 1.6276, - "step": 36810 - }, - { - "epoch": 1.17824, - "grad_norm": 45.28952407836914, - "learning_rate": 9.130666666666667e-06, - "loss": 1.6396, - "step": 36820 - }, - { - "epoch": 1.17856, - "grad_norm": 43.20062255859375, - "learning_rate": 9.127111111111112e-06, - "loss": 1.614, - "step": 36830 - }, - { - "epoch": 1.17888, - "grad_norm": 45.23955154418945, - "learning_rate": 9.123555555555556e-06, - "loss": 1.6036, - "step": 36840 - }, - { - "epoch": 1.1792, - "grad_norm": 44.820648193359375, - "learning_rate": 9.12e-06, - "loss": 1.6039, - "step": 36850 - }, - { - "epoch": 1.1795200000000001, - "grad_norm": 45.45117950439453, - "learning_rate": 9.116444444444445e-06, - "loss": 1.6328, - "step": 36860 - }, - { - "epoch": 1.17984, - "grad_norm": 44.188228607177734, - "learning_rate": 9.11288888888889e-06, - "loss": 1.611, - "step": 36870 - }, - { - "epoch": 1.1801599999999999, - "grad_norm": 43.97381591796875, - "learning_rate": 9.109333333333334e-06, - "loss": 1.6189, - "step": 36880 - }, - { - "epoch": 1.18048, - "grad_norm": 44.498863220214844, - "learning_rate": 9.105777777777779e-06, - "loss": 1.6056, - "step": 36890 - }, - { - "epoch": 1.1808, - "grad_norm": 45.77372360229492, - "learning_rate": 9.102222222222224e-06, - "loss": 1.6487, - "step": 36900 - }, - { - "epoch": 1.18112, - "grad_norm": 45.980873107910156, - "learning_rate": 9.098666666666668e-06, - "loss": 1.5998, - "step": 36910 - }, - { - "epoch": 1.18144, - "grad_norm": 46.08082962036133, - "learning_rate": 9.095111111111113e-06, - "loss": 1.6208, - "step": 36920 - }, - { - "epoch": 1.18176, - "grad_norm": 44.75496292114258, - "learning_rate": 9.091555555555557e-06, - "loss": 1.6204, - "step": 36930 - }, - { - "epoch": 1.18208, - "grad_norm": 43.503047943115234, - "learning_rate": 9.088000000000002e-06, - "loss": 1.5918, - "step": 36940 - }, - { - "epoch": 1.1824, - "grad_norm": 43.14474868774414, - "learning_rate": 9.084444444444446e-06, - "loss": 1.6343, - "step": 36950 - }, - { - "epoch": 1.18272, - "grad_norm": 44.471099853515625, - "learning_rate": 9.08088888888889e-06, - "loss": 1.6001, - "step": 36960 - }, - { - "epoch": 1.18304, - "grad_norm": 44.57168960571289, - "learning_rate": 9.077333333333334e-06, - "loss": 1.6016, - "step": 36970 - }, - { - "epoch": 1.18336, - "grad_norm": 45.980613708496094, - "learning_rate": 9.073777777777778e-06, - "loss": 1.6027, - "step": 36980 - }, - { - "epoch": 1.18368, - "grad_norm": 43.24493408203125, - "learning_rate": 9.070222222222223e-06, - "loss": 1.6249, - "step": 36990 - }, - { - "epoch": 1.184, - "grad_norm": 42.255821228027344, - "learning_rate": 9.066666666666667e-06, - "loss": 1.6003, - "step": 37000 - }, - { - "epoch": 1.18432, - "grad_norm": 44.33125305175781, - "learning_rate": 9.063111111111112e-06, - "loss": 1.6218, - "step": 37010 - }, - { - "epoch": 1.18464, - "grad_norm": 43.5249137878418, - "learning_rate": 9.059555555555556e-06, - "loss": 1.633, - "step": 37020 - }, - { - "epoch": 1.18496, - "grad_norm": 45.45284652709961, - "learning_rate": 9.056000000000001e-06, - "loss": 1.6085, - "step": 37030 - }, - { - "epoch": 1.1852800000000001, - "grad_norm": 43.63113784790039, - "learning_rate": 9.052444444444446e-06, - "loss": 1.5775, - "step": 37040 - }, - { - "epoch": 1.1856, - "grad_norm": 44.04764175415039, - "learning_rate": 9.048888888888888e-06, - "loss": 1.5973, - "step": 37050 - }, - { - "epoch": 1.18592, - "grad_norm": 47.08494567871094, - "learning_rate": 9.045333333333333e-06, - "loss": 1.6013, - "step": 37060 - }, - { - "epoch": 1.18624, - "grad_norm": 44.20665740966797, - "learning_rate": 9.041777777777778e-06, - "loss": 1.6115, - "step": 37070 - }, - { - "epoch": 1.18656, - "grad_norm": 43.61125564575195, - "learning_rate": 9.038222222222222e-06, - "loss": 1.632, - "step": 37080 - }, - { - "epoch": 1.18688, - "grad_norm": 43.4201774597168, - "learning_rate": 9.034666666666667e-06, - "loss": 1.6255, - "step": 37090 - }, - { - "epoch": 1.1872, - "grad_norm": 45.814422607421875, - "learning_rate": 9.031111111111111e-06, - "loss": 1.6289, - "step": 37100 - }, - { - "epoch": 1.18752, - "grad_norm": 44.91145706176758, - "learning_rate": 9.027555555555556e-06, - "loss": 1.6335, - "step": 37110 - }, - { - "epoch": 1.18784, - "grad_norm": 45.26153564453125, - "learning_rate": 9.024e-06, - "loss": 1.5778, - "step": 37120 - }, - { - "epoch": 1.1881599999999999, - "grad_norm": 43.415077209472656, - "learning_rate": 9.020444444444445e-06, - "loss": 1.6352, - "step": 37130 - }, - { - "epoch": 1.18848, - "grad_norm": 46.07261276245117, - "learning_rate": 9.01688888888889e-06, - "loss": 1.6045, - "step": 37140 - }, - { - "epoch": 1.1888, - "grad_norm": 50.8298225402832, - "learning_rate": 9.013333333333334e-06, - "loss": 1.6127, - "step": 37150 - }, - { - "epoch": 1.18912, - "grad_norm": 45.64514923095703, - "learning_rate": 9.009777777777779e-06, - "loss": 1.6337, - "step": 37160 - }, - { - "epoch": 1.18944, - "grad_norm": 45.26530456542969, - "learning_rate": 9.006222222222223e-06, - "loss": 1.6126, - "step": 37170 - }, - { - "epoch": 1.18976, - "grad_norm": 44.7235107421875, - "learning_rate": 9.002666666666668e-06, - "loss": 1.6181, - "step": 37180 - }, - { - "epoch": 1.19008, - "grad_norm": 45.90180969238281, - "learning_rate": 8.999111111111112e-06, - "loss": 1.6258, - "step": 37190 - }, - { - "epoch": 1.1904, - "grad_norm": 42.16521072387695, - "learning_rate": 8.995555555555557e-06, - "loss": 1.6327, - "step": 37200 - }, - { - "epoch": 1.19072, - "grad_norm": 47.36225509643555, - "learning_rate": 8.992000000000001e-06, - "loss": 1.6321, - "step": 37210 - }, - { - "epoch": 1.19104, - "grad_norm": 45.1246337890625, - "learning_rate": 8.988444444444446e-06, - "loss": 1.6306, - "step": 37220 - }, - { - "epoch": 1.19136, - "grad_norm": 43.45327377319336, - "learning_rate": 8.98488888888889e-06, - "loss": 1.6261, - "step": 37230 - }, - { - "epoch": 1.19168, - "grad_norm": 47.724151611328125, - "learning_rate": 8.981333333333333e-06, - "loss": 1.6076, - "step": 37240 - }, - { - "epoch": 1.192, - "grad_norm": 43.875343322753906, - "learning_rate": 8.977777777777778e-06, - "loss": 1.6254, - "step": 37250 - }, - { - "epoch": 1.19232, - "grad_norm": 43.029109954833984, - "learning_rate": 8.974222222222222e-06, - "loss": 1.641, - "step": 37260 - }, - { - "epoch": 1.19264, - "grad_norm": 43.80043029785156, - "learning_rate": 8.970666666666667e-06, - "loss": 1.6183, - "step": 37270 - }, - { - "epoch": 1.19296, - "grad_norm": 45.554203033447266, - "learning_rate": 8.967111111111112e-06, - "loss": 1.6157, - "step": 37280 - }, - { - "epoch": 1.1932800000000001, - "grad_norm": 43.858116149902344, - "learning_rate": 8.963555555555556e-06, - "loss": 1.604, - "step": 37290 - }, - { - "epoch": 1.1936, - "grad_norm": 42.90398025512695, - "learning_rate": 8.96e-06, - "loss": 1.5865, - "step": 37300 - }, - { - "epoch": 1.19392, - "grad_norm": 45.20762634277344, - "learning_rate": 8.956444444444445e-06, - "loss": 1.6424, - "step": 37310 - }, - { - "epoch": 1.19424, - "grad_norm": 49.68057632446289, - "learning_rate": 8.95288888888889e-06, - "loss": 1.6262, - "step": 37320 - }, - { - "epoch": 1.19456, - "grad_norm": 45.9451904296875, - "learning_rate": 8.949333333333334e-06, - "loss": 1.6375, - "step": 37330 - }, - { - "epoch": 1.19488, - "grad_norm": 45.719268798828125, - "learning_rate": 8.945777777777779e-06, - "loss": 1.6278, - "step": 37340 - }, - { - "epoch": 1.1952, - "grad_norm": 48.056095123291016, - "learning_rate": 8.942222222222223e-06, - "loss": 1.6007, - "step": 37350 - }, - { - "epoch": 1.19552, - "grad_norm": 44.8497428894043, - "learning_rate": 8.938666666666668e-06, - "loss": 1.6129, - "step": 37360 - }, - { - "epoch": 1.19584, - "grad_norm": 44.91301727294922, - "learning_rate": 8.935111111111112e-06, - "loss": 1.5899, - "step": 37370 - }, - { - "epoch": 1.19616, - "grad_norm": 44.85026168823242, - "learning_rate": 8.931555555555557e-06, - "loss": 1.5988, - "step": 37380 - }, - { - "epoch": 1.19648, - "grad_norm": 44.962589263916016, - "learning_rate": 8.928000000000002e-06, - "loss": 1.6004, - "step": 37390 - }, - { - "epoch": 1.1968, - "grad_norm": 43.96834945678711, - "learning_rate": 8.924444444444446e-06, - "loss": 1.6294, - "step": 37400 - }, - { - "epoch": 1.19712, - "grad_norm": 45.65105056762695, - "learning_rate": 8.92088888888889e-06, - "loss": 1.6187, - "step": 37410 - }, - { - "epoch": 1.19744, - "grad_norm": 45.473331451416016, - "learning_rate": 8.917333333333335e-06, - "loss": 1.6166, - "step": 37420 - }, - { - "epoch": 1.19776, - "grad_norm": 46.43606185913086, - "learning_rate": 8.91377777777778e-06, - "loss": 1.6146, - "step": 37430 - }, - { - "epoch": 1.19808, - "grad_norm": 45.848690032958984, - "learning_rate": 8.910222222222223e-06, - "loss": 1.6137, - "step": 37440 - }, - { - "epoch": 1.1984, - "grad_norm": 44.7730827331543, - "learning_rate": 8.906666666666667e-06, - "loss": 1.6347, - "step": 37450 - }, - { - "epoch": 1.19872, - "grad_norm": 44.687313079833984, - "learning_rate": 8.903111111111112e-06, - "loss": 1.6101, - "step": 37460 - }, - { - "epoch": 1.19904, - "grad_norm": 43.56023025512695, - "learning_rate": 8.899555555555556e-06, - "loss": 1.6436, - "step": 37470 - }, - { - "epoch": 1.19936, - "grad_norm": 46.550960540771484, - "learning_rate": 8.896000000000001e-06, - "loss": 1.617, - "step": 37480 - }, - { - "epoch": 1.19968, - "grad_norm": 44.08442306518555, - "learning_rate": 8.892444444444445e-06, - "loss": 1.5949, - "step": 37490 - }, - { - "epoch": 1.2, - "grad_norm": 47.61814880371094, - "learning_rate": 8.888888888888888e-06, - "loss": 1.6127, - "step": 37500 - }, - { - "epoch": 1.20032, - "grad_norm": 45.213722229003906, - "learning_rate": 8.885333333333333e-06, - "loss": 1.6172, - "step": 37510 - }, - { - "epoch": 1.20064, - "grad_norm": 43.18437194824219, - "learning_rate": 8.881777777777777e-06, - "loss": 1.5815, - "step": 37520 - }, - { - "epoch": 1.20096, - "grad_norm": 44.670265197753906, - "learning_rate": 8.878222222222222e-06, - "loss": 1.6497, - "step": 37530 - }, - { - "epoch": 1.20128, - "grad_norm": 43.545494079589844, - "learning_rate": 8.874666666666667e-06, - "loss": 1.6373, - "step": 37540 - }, - { - "epoch": 1.2016, - "grad_norm": 46.32993698120117, - "learning_rate": 8.871111111111111e-06, - "loss": 1.6065, - "step": 37550 - }, - { - "epoch": 1.2019199999999999, - "grad_norm": 46.05718994140625, - "learning_rate": 8.867555555555556e-06, - "loss": 1.6116, - "step": 37560 - }, - { - "epoch": 1.20224, - "grad_norm": 43.07898712158203, - "learning_rate": 8.864e-06, - "loss": 1.6107, - "step": 37570 - }, - { - "epoch": 1.20256, - "grad_norm": 45.01841354370117, - "learning_rate": 8.860444444444445e-06, - "loss": 1.611, - "step": 37580 - }, - { - "epoch": 1.20288, - "grad_norm": 44.30756759643555, - "learning_rate": 8.85688888888889e-06, - "loss": 1.6408, - "step": 37590 - }, - { - "epoch": 1.2032, - "grad_norm": 44.6888427734375, - "learning_rate": 8.853333333333334e-06, - "loss": 1.5964, - "step": 37600 - }, - { - "epoch": 1.20352, - "grad_norm": 43.844486236572266, - "learning_rate": 8.849777777777778e-06, - "loss": 1.6205, - "step": 37610 - }, - { - "epoch": 1.20384, - "grad_norm": 44.9466552734375, - "learning_rate": 8.846222222222223e-06, - "loss": 1.6416, - "step": 37620 - }, - { - "epoch": 1.20416, - "grad_norm": 44.492984771728516, - "learning_rate": 8.842666666666667e-06, - "loss": 1.6136, - "step": 37630 - }, - { - "epoch": 1.20448, - "grad_norm": 45.26192855834961, - "learning_rate": 8.839111111111112e-06, - "loss": 1.5964, - "step": 37640 - }, - { - "epoch": 1.2048, - "grad_norm": 41.83114242553711, - "learning_rate": 8.835555555555557e-06, - "loss": 1.6058, - "step": 37650 - }, - { - "epoch": 1.20512, - "grad_norm": 45.18032455444336, - "learning_rate": 8.832000000000001e-06, - "loss": 1.6301, - "step": 37660 - }, - { - "epoch": 1.20544, - "grad_norm": 43.96016311645508, - "learning_rate": 8.828444444444446e-06, - "loss": 1.5992, - "step": 37670 - }, - { - "epoch": 1.20576, - "grad_norm": 44.86780548095703, - "learning_rate": 8.82488888888889e-06, - "loss": 1.6098, - "step": 37680 - }, - { - "epoch": 1.20608, - "grad_norm": 45.063751220703125, - "learning_rate": 8.821333333333333e-06, - "loss": 1.6084, - "step": 37690 - }, - { - "epoch": 1.2064, - "grad_norm": 44.57073211669922, - "learning_rate": 8.817777777777778e-06, - "loss": 1.6194, - "step": 37700 - }, - { - "epoch": 1.20672, - "grad_norm": 45.337684631347656, - "learning_rate": 8.814222222222222e-06, - "loss": 1.617, - "step": 37710 - }, - { - "epoch": 1.2070400000000001, - "grad_norm": 43.16669845581055, - "learning_rate": 8.810666666666667e-06, - "loss": 1.649, - "step": 37720 - }, - { - "epoch": 1.20736, - "grad_norm": 43.83868408203125, - "learning_rate": 8.807111111111111e-06, - "loss": 1.627, - "step": 37730 - }, - { - "epoch": 1.20768, - "grad_norm": 46.1161003112793, - "learning_rate": 8.803555555555556e-06, - "loss": 1.6164, - "step": 37740 - }, - { - "epoch": 1.208, - "grad_norm": 43.969825744628906, - "learning_rate": 8.8e-06, - "loss": 1.5992, - "step": 37750 - }, - { - "epoch": 1.20832, - "grad_norm": 45.1153450012207, - "learning_rate": 8.796444444444445e-06, - "loss": 1.601, - "step": 37760 - }, - { - "epoch": 1.20864, - "grad_norm": 45.29838180541992, - "learning_rate": 8.79288888888889e-06, - "loss": 1.6346, - "step": 37770 - }, - { - "epoch": 1.20896, - "grad_norm": 43.7877082824707, - "learning_rate": 8.789333333333334e-06, - "loss": 1.59, - "step": 37780 - }, - { - "epoch": 1.20928, - "grad_norm": 44.93843460083008, - "learning_rate": 8.785777777777779e-06, - "loss": 1.5945, - "step": 37790 - }, - { - "epoch": 1.2096, - "grad_norm": 46.926719665527344, - "learning_rate": 8.782222222222223e-06, - "loss": 1.622, - "step": 37800 - }, - { - "epoch": 1.2099199999999999, - "grad_norm": 46.099117279052734, - "learning_rate": 8.778666666666668e-06, - "loss": 1.6455, - "step": 37810 - }, - { - "epoch": 1.21024, - "grad_norm": 46.05337142944336, - "learning_rate": 8.775111111111112e-06, - "loss": 1.5997, - "step": 37820 - }, - { - "epoch": 1.21056, - "grad_norm": 43.47398376464844, - "learning_rate": 8.771555555555557e-06, - "loss": 1.6133, - "step": 37830 - }, - { - "epoch": 1.21088, - "grad_norm": 46.43062973022461, - "learning_rate": 8.768000000000001e-06, - "loss": 1.6512, - "step": 37840 - }, - { - "epoch": 1.2112, - "grad_norm": 49.61890411376953, - "learning_rate": 8.764444444444446e-06, - "loss": 1.5892, - "step": 37850 - }, - { - "epoch": 1.21152, - "grad_norm": 43.67453384399414, - "learning_rate": 8.76088888888889e-06, - "loss": 1.5901, - "step": 37860 - }, - { - "epoch": 1.21184, - "grad_norm": 42.86765670776367, - "learning_rate": 8.757333333333335e-06, - "loss": 1.6123, - "step": 37870 - }, - { - "epoch": 1.21216, - "grad_norm": 44.75029754638672, - "learning_rate": 8.75377777777778e-06, - "loss": 1.5822, - "step": 37880 - }, - { - "epoch": 1.21248, - "grad_norm": 45.341773986816406, - "learning_rate": 8.750222222222223e-06, - "loss": 1.6031, - "step": 37890 - }, - { - "epoch": 1.2128, - "grad_norm": 42.46044921875, - "learning_rate": 8.746666666666667e-06, - "loss": 1.6322, - "step": 37900 - }, - { - "epoch": 1.21312, - "grad_norm": 46.101863861083984, - "learning_rate": 8.743111111111112e-06, - "loss": 1.6122, - "step": 37910 - }, - { - "epoch": 1.21344, - "grad_norm": 45.06989669799805, - "learning_rate": 8.739555555555556e-06, - "loss": 1.6133, - "step": 37920 - }, - { - "epoch": 1.21376, - "grad_norm": 47.32184600830078, - "learning_rate": 8.736e-06, - "loss": 1.6447, - "step": 37930 - }, - { - "epoch": 1.21408, - "grad_norm": 43.49120330810547, - "learning_rate": 8.732444444444445e-06, - "loss": 1.6104, - "step": 37940 - }, - { - "epoch": 1.2144, - "grad_norm": 41.85742950439453, - "learning_rate": 8.72888888888889e-06, - "loss": 1.6254, - "step": 37950 - }, - { - "epoch": 1.21472, - "grad_norm": 44.84233093261719, - "learning_rate": 8.725333333333334e-06, - "loss": 1.6282, - "step": 37960 - }, - { - "epoch": 1.2150400000000001, - "grad_norm": 45.8273811340332, - "learning_rate": 8.721777777777779e-06, - "loss": 1.6182, - "step": 37970 - }, - { - "epoch": 1.21536, - "grad_norm": 44.68730163574219, - "learning_rate": 8.718222222222223e-06, - "loss": 1.6062, - "step": 37980 - }, - { - "epoch": 1.21568, - "grad_norm": 47.33452224731445, - "learning_rate": 8.714666666666666e-06, - "loss": 1.6, - "step": 37990 - }, - { - "epoch": 1.216, - "grad_norm": 43.193973541259766, - "learning_rate": 8.711111111111111e-06, - "loss": 1.6034, - "step": 38000 - }, - { - "epoch": 1.21632, - "grad_norm": 45.360347747802734, - "learning_rate": 8.707555555555555e-06, - "loss": 1.6528, - "step": 38010 - }, - { - "epoch": 1.21664, - "grad_norm": 43.036781311035156, - "learning_rate": 8.704e-06, - "loss": 1.6224, - "step": 38020 - }, - { - "epoch": 1.21696, - "grad_norm": 45.99064636230469, - "learning_rate": 8.700444444444445e-06, - "loss": 1.6257, - "step": 38030 - }, - { - "epoch": 1.21728, - "grad_norm": 44.47370147705078, - "learning_rate": 8.696888888888889e-06, - "loss": 1.6098, - "step": 38040 - }, - { - "epoch": 1.2176, - "grad_norm": 44.06339645385742, - "learning_rate": 8.693333333333334e-06, - "loss": 1.6181, - "step": 38050 - }, - { - "epoch": 1.21792, - "grad_norm": 43.332637786865234, - "learning_rate": 8.689777777777778e-06, - "loss": 1.6168, - "step": 38060 - }, - { - "epoch": 1.21824, - "grad_norm": 44.66054153442383, - "learning_rate": 8.686222222222223e-06, - "loss": 1.6235, - "step": 38070 - }, - { - "epoch": 1.21856, - "grad_norm": 43.89647674560547, - "learning_rate": 8.682666666666667e-06, - "loss": 1.6104, - "step": 38080 - }, - { - "epoch": 1.21888, - "grad_norm": 45.6687126159668, - "learning_rate": 8.679111111111112e-06, - "loss": 1.6235, - "step": 38090 - }, - { - "epoch": 1.2192, - "grad_norm": 46.06743621826172, - "learning_rate": 8.675555555555556e-06, - "loss": 1.615, - "step": 38100 - }, - { - "epoch": 1.21952, - "grad_norm": 43.16071701049805, - "learning_rate": 8.672000000000001e-06, - "loss": 1.6079, - "step": 38110 - }, - { - "epoch": 1.21984, - "grad_norm": 43.79576873779297, - "learning_rate": 8.668444444444446e-06, - "loss": 1.6133, - "step": 38120 - }, - { - "epoch": 1.22016, - "grad_norm": 43.21852111816406, - "learning_rate": 8.66488888888889e-06, - "loss": 1.6361, - "step": 38130 - }, - { - "epoch": 1.22048, - "grad_norm": 44.7907829284668, - "learning_rate": 8.661333333333335e-06, - "loss": 1.6229, - "step": 38140 - }, - { - "epoch": 1.2208, - "grad_norm": 45.25461196899414, - "learning_rate": 8.657777777777778e-06, - "loss": 1.6424, - "step": 38150 - }, - { - "epoch": 1.22112, - "grad_norm": 46.63352966308594, - "learning_rate": 8.654222222222222e-06, - "loss": 1.6004, - "step": 38160 - }, - { - "epoch": 1.22144, - "grad_norm": 44.5078125, - "learning_rate": 8.650666666666667e-06, - "loss": 1.6004, - "step": 38170 - }, - { - "epoch": 1.22176, - "grad_norm": 46.22889709472656, - "learning_rate": 8.647111111111111e-06, - "loss": 1.6127, - "step": 38180 - }, - { - "epoch": 1.22208, - "grad_norm": 44.953792572021484, - "learning_rate": 8.643555555555556e-06, - "loss": 1.5963, - "step": 38190 - }, - { - "epoch": 1.2224, - "grad_norm": 45.0565071105957, - "learning_rate": 8.64e-06, - "loss": 1.6602, - "step": 38200 - }, - { - "epoch": 1.22272, - "grad_norm": 44.015262603759766, - "learning_rate": 8.636444444444445e-06, - "loss": 1.5775, - "step": 38210 - }, - { - "epoch": 1.22304, - "grad_norm": 44.347557067871094, - "learning_rate": 8.63288888888889e-06, - "loss": 1.5998, - "step": 38220 - }, - { - "epoch": 1.22336, - "grad_norm": 43.69554901123047, - "learning_rate": 8.629333333333334e-06, - "loss": 1.6048, - "step": 38230 - }, - { - "epoch": 1.2236799999999999, - "grad_norm": 45.76776123046875, - "learning_rate": 8.625777777777779e-06, - "loss": 1.6488, - "step": 38240 - }, - { - "epoch": 1.224, - "grad_norm": 46.74126052856445, - "learning_rate": 8.622222222222223e-06, - "loss": 1.6095, - "step": 38250 - }, - { - "epoch": 1.22432, - "grad_norm": 45.625762939453125, - "learning_rate": 8.618666666666668e-06, - "loss": 1.645, - "step": 38260 - }, - { - "epoch": 1.22464, - "grad_norm": 46.620445251464844, - "learning_rate": 8.615111111111112e-06, - "loss": 1.6095, - "step": 38270 - }, - { - "epoch": 1.22496, - "grad_norm": 45.9921875, - "learning_rate": 8.611555555555557e-06, - "loss": 1.5838, - "step": 38280 - }, - { - "epoch": 1.22528, - "grad_norm": 46.95634078979492, - "learning_rate": 8.608000000000001e-06, - "loss": 1.6117, - "step": 38290 - }, - { - "epoch": 1.2256, - "grad_norm": 44.37270736694336, - "learning_rate": 8.604444444444446e-06, - "loss": 1.6037, - "step": 38300 - }, - { - "epoch": 1.22592, - "grad_norm": 44.26304626464844, - "learning_rate": 8.60088888888889e-06, - "loss": 1.6024, - "step": 38310 - }, - { - "epoch": 1.22624, - "grad_norm": 44.726661682128906, - "learning_rate": 8.597333333333335e-06, - "loss": 1.6381, - "step": 38320 - }, - { - "epoch": 1.22656, - "grad_norm": 45.11967849731445, - "learning_rate": 8.59377777777778e-06, - "loss": 1.602, - "step": 38330 - }, - { - "epoch": 1.22688, - "grad_norm": 43.5937385559082, - "learning_rate": 8.590222222222222e-06, - "loss": 1.6049, - "step": 38340 - }, - { - "epoch": 1.2272, - "grad_norm": 45.37797164916992, - "learning_rate": 8.586666666666667e-06, - "loss": 1.6027, - "step": 38350 - }, - { - "epoch": 1.22752, - "grad_norm": 45.6694221496582, - "learning_rate": 8.583111111111111e-06, - "loss": 1.6565, - "step": 38360 - }, - { - "epoch": 1.22784, - "grad_norm": 44.0611686706543, - "learning_rate": 8.579555555555556e-06, - "loss": 1.6051, - "step": 38370 - }, - { - "epoch": 1.22816, - "grad_norm": 46.050472259521484, - "learning_rate": 8.576e-06, - "loss": 1.6169, - "step": 38380 - }, - { - "epoch": 1.22848, - "grad_norm": 44.67304229736328, - "learning_rate": 8.572444444444445e-06, - "loss": 1.5995, - "step": 38390 - }, - { - "epoch": 1.2288000000000001, - "grad_norm": 52.205501556396484, - "learning_rate": 8.56888888888889e-06, - "loss": 1.6195, - "step": 38400 - }, - { - "epoch": 1.22912, - "grad_norm": 44.350059509277344, - "learning_rate": 8.565333333333334e-06, - "loss": 1.628, - "step": 38410 - }, - { - "epoch": 1.22944, - "grad_norm": 44.488616943359375, - "learning_rate": 8.561777777777779e-06, - "loss": 1.6148, - "step": 38420 - }, - { - "epoch": 1.22976, - "grad_norm": 44.28691482543945, - "learning_rate": 8.558222222222223e-06, - "loss": 1.623, - "step": 38430 - }, - { - "epoch": 1.23008, - "grad_norm": 45.6378288269043, - "learning_rate": 8.554666666666668e-06, - "loss": 1.6289, - "step": 38440 - }, - { - "epoch": 1.2304, - "grad_norm": 43.49380874633789, - "learning_rate": 8.551111111111112e-06, - "loss": 1.6431, - "step": 38450 - }, - { - "epoch": 1.23072, - "grad_norm": 44.094791412353516, - "learning_rate": 8.547555555555557e-06, - "loss": 1.615, - "step": 38460 - }, - { - "epoch": 1.23104, - "grad_norm": 43.89348602294922, - "learning_rate": 8.544000000000002e-06, - "loss": 1.6217, - "step": 38470 - }, - { - "epoch": 1.23136, - "grad_norm": 44.966827392578125, - "learning_rate": 8.540444444444446e-06, - "loss": 1.5995, - "step": 38480 - }, - { - "epoch": 1.2316799999999999, - "grad_norm": 46.915889739990234, - "learning_rate": 8.53688888888889e-06, - "loss": 1.6289, - "step": 38490 - }, - { - "epoch": 1.232, - "grad_norm": 44.71723937988281, - "learning_rate": 8.533333333333335e-06, - "loss": 1.603, - "step": 38500 - }, - { - "epoch": 1.23232, - "grad_norm": 43.65388870239258, - "learning_rate": 8.529777777777778e-06, - "loss": 1.5869, - "step": 38510 - }, - { - "epoch": 1.23264, - "grad_norm": 45.82659149169922, - "learning_rate": 8.526222222222223e-06, - "loss": 1.6324, - "step": 38520 - }, - { - "epoch": 1.23296, - "grad_norm": 46.039920806884766, - "learning_rate": 8.522666666666667e-06, - "loss": 1.5892, - "step": 38530 - }, - { - "epoch": 1.23328, - "grad_norm": 46.55792236328125, - "learning_rate": 8.519111111111112e-06, - "loss": 1.6527, - "step": 38540 - }, - { - "epoch": 1.2336, - "grad_norm": 45.67931365966797, - "learning_rate": 8.515555555555556e-06, - "loss": 1.6159, - "step": 38550 - }, - { - "epoch": 1.23392, - "grad_norm": 44.48359298706055, - "learning_rate": 8.512e-06, - "loss": 1.6629, - "step": 38560 - }, - { - "epoch": 1.23424, - "grad_norm": 45.672264099121094, - "learning_rate": 8.508444444444445e-06, - "loss": 1.6139, - "step": 38570 - }, - { - "epoch": 1.23456, - "grad_norm": 46.08909606933594, - "learning_rate": 8.50488888888889e-06, - "loss": 1.6346, - "step": 38580 - }, - { - "epoch": 1.23488, - "grad_norm": 43.823631286621094, - "learning_rate": 8.501333333333334e-06, - "loss": 1.6083, - "step": 38590 - }, - { - "epoch": 1.2352, - "grad_norm": 43.065364837646484, - "learning_rate": 8.497777777777777e-06, - "loss": 1.6003, - "step": 38600 - }, - { - "epoch": 1.23552, - "grad_norm": 46.16888427734375, - "learning_rate": 8.494222222222222e-06, - "loss": 1.6133, - "step": 38610 - }, - { - "epoch": 1.23584, - "grad_norm": 42.988792419433594, - "learning_rate": 8.490666666666666e-06, - "loss": 1.6111, - "step": 38620 - }, - { - "epoch": 1.23616, - "grad_norm": 44.04892349243164, - "learning_rate": 8.487111111111111e-06, - "loss": 1.5899, - "step": 38630 - }, - { - "epoch": 1.23648, - "grad_norm": 44.41130065917969, - "learning_rate": 8.483555555555556e-06, - "loss": 1.6107, - "step": 38640 - }, - { - "epoch": 1.2368000000000001, - "grad_norm": 45.48244857788086, - "learning_rate": 8.48e-06, - "loss": 1.6073, - "step": 38650 - }, - { - "epoch": 1.23712, - "grad_norm": 45.22336959838867, - "learning_rate": 8.476444444444445e-06, - "loss": 1.6161, - "step": 38660 - }, - { - "epoch": 1.23744, - "grad_norm": 42.87507247924805, - "learning_rate": 8.47288888888889e-06, - "loss": 1.6071, - "step": 38670 - }, - { - "epoch": 1.23776, - "grad_norm": 45.26953125, - "learning_rate": 8.469333333333334e-06, - "loss": 1.6351, - "step": 38680 - }, - { - "epoch": 1.23808, - "grad_norm": 45.478519439697266, - "learning_rate": 8.465777777777778e-06, - "loss": 1.6162, - "step": 38690 - }, - { - "epoch": 1.2384, - "grad_norm": 41.99089813232422, - "learning_rate": 8.462222222222223e-06, - "loss": 1.6087, - "step": 38700 - }, - { - "epoch": 1.23872, - "grad_norm": 45.846282958984375, - "learning_rate": 8.458666666666667e-06, - "loss": 1.6067, - "step": 38710 - }, - { - "epoch": 1.23904, - "grad_norm": 43.07404327392578, - "learning_rate": 8.455111111111112e-06, - "loss": 1.6027, - "step": 38720 - }, - { - "epoch": 1.23936, - "grad_norm": 44.055213928222656, - "learning_rate": 8.451555555555557e-06, - "loss": 1.6201, - "step": 38730 - }, - { - "epoch": 1.23968, - "grad_norm": 43.944400787353516, - "learning_rate": 8.448000000000001e-06, - "loss": 1.6016, - "step": 38740 - }, - { - "epoch": 1.24, - "grad_norm": 44.895599365234375, - "learning_rate": 8.444444444444446e-06, - "loss": 1.6301, - "step": 38750 - }, - { - "epoch": 1.24032, - "grad_norm": 46.72675323486328, - "learning_rate": 8.44088888888889e-06, - "loss": 1.6448, - "step": 38760 - }, - { - "epoch": 1.24064, - "grad_norm": 46.3919792175293, - "learning_rate": 8.437333333333335e-06, - "loss": 1.6113, - "step": 38770 - }, - { - "epoch": 1.24096, - "grad_norm": 46.99666976928711, - "learning_rate": 8.43377777777778e-06, - "loss": 1.6063, - "step": 38780 - }, - { - "epoch": 1.24128, - "grad_norm": 44.31560516357422, - "learning_rate": 8.430222222222222e-06, - "loss": 1.6138, - "step": 38790 - }, - { - "epoch": 1.2416, - "grad_norm": 44.34001922607422, - "learning_rate": 8.426666666666667e-06, - "loss": 1.622, - "step": 38800 - }, - { - "epoch": 1.24192, - "grad_norm": 45.2856559753418, - "learning_rate": 8.423111111111111e-06, - "loss": 1.6052, - "step": 38810 - }, - { - "epoch": 1.24224, - "grad_norm": 45.01423263549805, - "learning_rate": 8.419555555555556e-06, - "loss": 1.6357, - "step": 38820 - }, - { - "epoch": 1.24256, - "grad_norm": 43.551082611083984, - "learning_rate": 8.416e-06, - "loss": 1.5987, - "step": 38830 - }, - { - "epoch": 1.24288, - "grad_norm": 42.74105453491211, - "learning_rate": 8.412444444444445e-06, - "loss": 1.5585, - "step": 38840 - }, - { - "epoch": 1.2432, - "grad_norm": 44.34646224975586, - "learning_rate": 8.40888888888889e-06, - "loss": 1.6188, - "step": 38850 - }, - { - "epoch": 1.24352, - "grad_norm": 44.549407958984375, - "learning_rate": 8.405333333333334e-06, - "loss": 1.608, - "step": 38860 - }, - { - "epoch": 1.24384, - "grad_norm": 48.80892562866211, - "learning_rate": 8.401777777777779e-06, - "loss": 1.6355, - "step": 38870 - }, - { - "epoch": 1.24416, - "grad_norm": 45.6882209777832, - "learning_rate": 8.398222222222223e-06, - "loss": 1.6417, - "step": 38880 - }, - { - "epoch": 1.24448, - "grad_norm": 43.90581512451172, - "learning_rate": 8.394666666666668e-06, - "loss": 1.6485, - "step": 38890 - }, - { - "epoch": 1.2448, - "grad_norm": 44.31962966918945, - "learning_rate": 8.391111111111112e-06, - "loss": 1.638, - "step": 38900 - }, - { - "epoch": 1.24512, - "grad_norm": 45.7739372253418, - "learning_rate": 8.387555555555557e-06, - "loss": 1.6122, - "step": 38910 - }, - { - "epoch": 1.2454399999999999, - "grad_norm": 45.06602096557617, - "learning_rate": 8.384000000000001e-06, - "loss": 1.6004, - "step": 38920 - }, - { - "epoch": 1.24576, - "grad_norm": 47.67781066894531, - "learning_rate": 8.380444444444446e-06, - "loss": 1.6465, - "step": 38930 - }, - { - "epoch": 1.24608, - "grad_norm": 45.568111419677734, - "learning_rate": 8.37688888888889e-06, - "loss": 1.5947, - "step": 38940 - }, - { - "epoch": 1.2464, - "grad_norm": 45.62173080444336, - "learning_rate": 8.373333333333335e-06, - "loss": 1.6228, - "step": 38950 - }, - { - "epoch": 1.24672, - "grad_norm": 43.738197326660156, - "learning_rate": 8.36977777777778e-06, - "loss": 1.6312, - "step": 38960 - }, - { - "epoch": 1.24704, - "grad_norm": 43.58649444580078, - "learning_rate": 8.366222222222224e-06, - "loss": 1.6031, - "step": 38970 - }, - { - "epoch": 1.24736, - "grad_norm": 43.96977615356445, - "learning_rate": 8.362666666666667e-06, - "loss": 1.6238, - "step": 38980 - }, - { - "epoch": 1.24768, - "grad_norm": 42.86772155761719, - "learning_rate": 8.359111111111112e-06, - "loss": 1.6035, - "step": 38990 - }, - { - "epoch": 1.248, - "grad_norm": 45.149566650390625, - "learning_rate": 8.355555555555556e-06, - "loss": 1.6048, - "step": 39000 - }, - { - "epoch": 1.24832, - "grad_norm": 50.66899871826172, - "learning_rate": 8.352e-06, - "loss": 1.6034, - "step": 39010 - }, - { - "epoch": 1.24864, - "grad_norm": 44.265892028808594, - "learning_rate": 8.348444444444445e-06, - "loss": 1.6194, - "step": 39020 - }, - { - "epoch": 1.24896, - "grad_norm": 43.48944854736328, - "learning_rate": 8.34488888888889e-06, - "loss": 1.6136, - "step": 39030 - }, - { - "epoch": 1.24928, - "grad_norm": 45.849212646484375, - "learning_rate": 8.341333333333334e-06, - "loss": 1.6193, - "step": 39040 - }, - { - "epoch": 1.2496, - "grad_norm": 45.44252395629883, - "learning_rate": 8.337777777777777e-06, - "loss": 1.6313, - "step": 39050 - }, - { - "epoch": 1.24992, - "grad_norm": 43.57227325439453, - "learning_rate": 8.334222222222222e-06, - "loss": 1.6059, - "step": 39060 - }, - { - "epoch": 1.25024, - "grad_norm": 46.95555114746094, - "learning_rate": 8.330666666666666e-06, - "loss": 1.6226, - "step": 39070 - }, - { - "epoch": 1.2505600000000001, - "grad_norm": 44.509010314941406, - "learning_rate": 8.327111111111111e-06, - "loss": 1.5949, - "step": 39080 - }, - { - "epoch": 1.25088, - "grad_norm": 46.533512115478516, - "learning_rate": 8.323555555555555e-06, - "loss": 1.6491, - "step": 39090 - }, - { - "epoch": 1.2511999999999999, - "grad_norm": 45.26840591430664, - "learning_rate": 8.32e-06, - "loss": 1.609, - "step": 39100 - }, - { - "epoch": 1.25152, - "grad_norm": 43.79453659057617, - "learning_rate": 8.316444444444445e-06, - "loss": 1.6116, - "step": 39110 - }, - { - "epoch": 1.25184, - "grad_norm": 45.80735778808594, - "learning_rate": 8.312888888888889e-06, - "loss": 1.5951, - "step": 39120 - }, - { - "epoch": 1.25216, - "grad_norm": 44.02614212036133, - "learning_rate": 8.309333333333334e-06, - "loss": 1.6093, - "step": 39130 - }, - { - "epoch": 1.25248, - "grad_norm": 44.34676742553711, - "learning_rate": 8.305777777777778e-06, - "loss": 1.5938, - "step": 39140 - }, - { - "epoch": 1.2528000000000001, - "grad_norm": 44.790584564208984, - "learning_rate": 8.302222222222223e-06, - "loss": 1.5792, - "step": 39150 - }, - { - "epoch": 1.25312, - "grad_norm": 44.13827133178711, - "learning_rate": 8.298666666666667e-06, - "loss": 1.606, - "step": 39160 - }, - { - "epoch": 1.2534399999999999, - "grad_norm": 44.26396179199219, - "learning_rate": 8.295111111111112e-06, - "loss": 1.6278, - "step": 39170 - }, - { - "epoch": 1.25376, - "grad_norm": 44.31110763549805, - "learning_rate": 8.291555555555556e-06, - "loss": 1.6014, - "step": 39180 - }, - { - "epoch": 1.25408, - "grad_norm": 46.95048522949219, - "learning_rate": 8.288000000000001e-06, - "loss": 1.6161, - "step": 39190 - }, - { - "epoch": 1.2544, - "grad_norm": 44.23671340942383, - "learning_rate": 8.284444444444446e-06, - "loss": 1.5999, - "step": 39200 - }, - { - "epoch": 1.25472, - "grad_norm": 44.4106330871582, - "learning_rate": 8.28088888888889e-06, - "loss": 1.5989, - "step": 39210 - }, - { - "epoch": 1.25504, - "grad_norm": 46.071533203125, - "learning_rate": 8.277333333333335e-06, - "loss": 1.6218, - "step": 39220 - }, - { - "epoch": 1.25536, - "grad_norm": 45.08268737792969, - "learning_rate": 8.27377777777778e-06, - "loss": 1.6325, - "step": 39230 - }, - { - "epoch": 1.25568, - "grad_norm": 44.903846740722656, - "learning_rate": 8.270222222222222e-06, - "loss": 1.5971, - "step": 39240 - }, - { - "epoch": 1.256, - "grad_norm": 43.16526412963867, - "learning_rate": 8.266666666666667e-06, - "loss": 1.6335, - "step": 39250 - }, - { - "epoch": 1.25632, - "grad_norm": 45.51958084106445, - "learning_rate": 8.263111111111111e-06, - "loss": 1.6138, - "step": 39260 - }, - { - "epoch": 1.25664, - "grad_norm": 44.03669738769531, - "learning_rate": 8.259555555555556e-06, - "loss": 1.5963, - "step": 39270 - }, - { - "epoch": 1.25696, - "grad_norm": 43.991878509521484, - "learning_rate": 8.256e-06, - "loss": 1.5921, - "step": 39280 - }, - { - "epoch": 1.25728, - "grad_norm": 45.92048263549805, - "learning_rate": 8.252444444444445e-06, - "loss": 1.6665, - "step": 39290 - }, - { - "epoch": 1.2576, - "grad_norm": 44.205162048339844, - "learning_rate": 8.24888888888889e-06, - "loss": 1.5928, - "step": 39300 - }, - { - "epoch": 1.25792, - "grad_norm": 45.7902946472168, - "learning_rate": 8.245333333333334e-06, - "loss": 1.5982, - "step": 39310 - }, - { - "epoch": 1.25824, - "grad_norm": 45.62728500366211, - "learning_rate": 8.241777777777778e-06, - "loss": 1.6445, - "step": 39320 - }, - { - "epoch": 1.2585600000000001, - "grad_norm": 46.222023010253906, - "learning_rate": 8.238222222222223e-06, - "loss": 1.5962, - "step": 39330 - }, - { - "epoch": 1.25888, - "grad_norm": 45.977638244628906, - "learning_rate": 8.234666666666668e-06, - "loss": 1.612, - "step": 39340 - }, - { - "epoch": 1.2591999999999999, - "grad_norm": 46.27615737915039, - "learning_rate": 8.231111111111112e-06, - "loss": 1.6264, - "step": 39350 - }, - { - "epoch": 1.25952, - "grad_norm": 45.03913497924805, - "learning_rate": 8.227555555555557e-06, - "loss": 1.6275, - "step": 39360 - }, - { - "epoch": 1.25984, - "grad_norm": 44.86140060424805, - "learning_rate": 8.224000000000001e-06, - "loss": 1.6316, - "step": 39370 - }, - { - "epoch": 1.26016, - "grad_norm": 44.86174392700195, - "learning_rate": 8.220444444444446e-06, - "loss": 1.6095, - "step": 39380 - }, - { - "epoch": 1.26048, - "grad_norm": 43.89189910888672, - "learning_rate": 8.21688888888889e-06, - "loss": 1.6291, - "step": 39390 - }, - { - "epoch": 1.2608, - "grad_norm": 45.40138626098633, - "learning_rate": 8.213333333333335e-06, - "loss": 1.6167, - "step": 39400 - }, - { - "epoch": 1.26112, - "grad_norm": 43.08382797241211, - "learning_rate": 8.20977777777778e-06, - "loss": 1.6063, - "step": 39410 - }, - { - "epoch": 1.26144, - "grad_norm": 45.12299728393555, - "learning_rate": 8.206222222222224e-06, - "loss": 1.5989, - "step": 39420 - }, - { - "epoch": 1.26176, - "grad_norm": 42.76207733154297, - "learning_rate": 8.202666666666667e-06, - "loss": 1.5841, - "step": 39430 - }, - { - "epoch": 1.26208, - "grad_norm": 43.17430114746094, - "learning_rate": 8.199111111111111e-06, - "loss": 1.5885, - "step": 39440 - }, - { - "epoch": 1.2624, - "grad_norm": 45.012516021728516, - "learning_rate": 8.195555555555556e-06, - "loss": 1.5991, - "step": 39450 - }, - { - "epoch": 1.26272, - "grad_norm": 44.140464782714844, - "learning_rate": 8.192e-06, - "loss": 1.5906, - "step": 39460 - }, - { - "epoch": 1.26304, - "grad_norm": 44.850975036621094, - "learning_rate": 8.188444444444445e-06, - "loss": 1.609, - "step": 39470 - }, - { - "epoch": 1.26336, - "grad_norm": 46.2093620300293, - "learning_rate": 8.18488888888889e-06, - "loss": 1.6166, - "step": 39480 - }, - { - "epoch": 1.26368, - "grad_norm": 44.715633392333984, - "learning_rate": 8.181333333333334e-06, - "loss": 1.5741, - "step": 39490 - }, - { - "epoch": 1.264, - "grad_norm": 43.745506286621094, - "learning_rate": 8.177777777777779e-06, - "loss": 1.607, - "step": 39500 - }, - { - "epoch": 1.26432, - "grad_norm": 44.87568283081055, - "learning_rate": 8.174222222222223e-06, - "loss": 1.623, - "step": 39510 - }, - { - "epoch": 1.26464, - "grad_norm": 44.68089294433594, - "learning_rate": 8.170666666666668e-06, - "loss": 1.5934, - "step": 39520 - }, - { - "epoch": 1.2649599999999999, - "grad_norm": 42.680606842041016, - "learning_rate": 8.167111111111112e-06, - "loss": 1.602, - "step": 39530 - }, - { - "epoch": 1.26528, - "grad_norm": 48.34457015991211, - "learning_rate": 8.163555555555555e-06, - "loss": 1.6289, - "step": 39540 - }, - { - "epoch": 1.2656, - "grad_norm": 43.446624755859375, - "learning_rate": 8.16e-06, - "loss": 1.6195, - "step": 39550 - }, - { - "epoch": 1.26592, - "grad_norm": 46.034645080566406, - "learning_rate": 8.156444444444444e-06, - "loss": 1.6067, - "step": 39560 - }, - { - "epoch": 1.26624, - "grad_norm": 41.680397033691406, - "learning_rate": 8.152888888888889e-06, - "loss": 1.6123, - "step": 39570 - }, - { - "epoch": 1.2665600000000001, - "grad_norm": 46.30161666870117, - "learning_rate": 8.149333333333333e-06, - "loss": 1.6132, - "step": 39580 - }, - { - "epoch": 1.26688, - "grad_norm": 44.9428596496582, - "learning_rate": 8.145777777777778e-06, - "loss": 1.6079, - "step": 39590 - }, - { - "epoch": 1.2671999999999999, - "grad_norm": 45.387638092041016, - "learning_rate": 8.142222222222223e-06, - "loss": 1.5962, - "step": 39600 - }, - { - "epoch": 1.26752, - "grad_norm": 44.43503189086914, - "learning_rate": 8.138666666666667e-06, - "loss": 1.6002, - "step": 39610 - }, - { - "epoch": 1.26784, - "grad_norm": 46.8868522644043, - "learning_rate": 8.135111111111112e-06, - "loss": 1.6457, - "step": 39620 - }, - { - "epoch": 1.26816, - "grad_norm": 44.164154052734375, - "learning_rate": 8.131555555555556e-06, - "loss": 1.6158, - "step": 39630 - }, - { - "epoch": 1.26848, - "grad_norm": 45.72709274291992, - "learning_rate": 8.128e-06, - "loss": 1.6293, - "step": 39640 - }, - { - "epoch": 1.2688, - "grad_norm": 44.85696029663086, - "learning_rate": 8.124444444444445e-06, - "loss": 1.6238, - "step": 39650 - }, - { - "epoch": 1.26912, - "grad_norm": 43.19333267211914, - "learning_rate": 8.12088888888889e-06, - "loss": 1.5928, - "step": 39660 - }, - { - "epoch": 1.26944, - "grad_norm": 46.190467834472656, - "learning_rate": 8.117333333333334e-06, - "loss": 1.6359, - "step": 39670 - }, - { - "epoch": 1.26976, - "grad_norm": 44.76688003540039, - "learning_rate": 8.113777777777779e-06, - "loss": 1.6088, - "step": 39680 - }, - { - "epoch": 1.27008, - "grad_norm": 43.66759490966797, - "learning_rate": 8.110222222222222e-06, - "loss": 1.6151, - "step": 39690 - }, - { - "epoch": 1.2704, - "grad_norm": 44.64482879638672, - "learning_rate": 8.106666666666666e-06, - "loss": 1.6206, - "step": 39700 - }, - { - "epoch": 1.27072, - "grad_norm": 44.56056594848633, - "learning_rate": 8.103111111111111e-06, - "loss": 1.6027, - "step": 39710 - }, - { - "epoch": 1.27104, - "grad_norm": 45.265018463134766, - "learning_rate": 8.099555555555556e-06, - "loss": 1.5946, - "step": 39720 - }, - { - "epoch": 1.27136, - "grad_norm": 45.138187408447266, - "learning_rate": 8.096e-06, - "loss": 1.6091, - "step": 39730 - }, - { - "epoch": 1.27168, - "grad_norm": 45.547271728515625, - "learning_rate": 8.092444444444445e-06, - "loss": 1.581, - "step": 39740 - }, - { - "epoch": 1.272, - "grad_norm": 43.008277893066406, - "learning_rate": 8.08888888888889e-06, - "loss": 1.6314, - "step": 39750 - }, - { - "epoch": 1.2723200000000001, - "grad_norm": 45.12823486328125, - "learning_rate": 8.085333333333334e-06, - "loss": 1.5787, - "step": 39760 - }, - { - "epoch": 1.27264, - "grad_norm": 46.13259506225586, - "learning_rate": 8.081777777777778e-06, - "loss": 1.5724, - "step": 39770 - }, - { - "epoch": 1.2729599999999999, - "grad_norm": 44.257450103759766, - "learning_rate": 8.078222222222223e-06, - "loss": 1.6312, - "step": 39780 - }, - { - "epoch": 1.27328, - "grad_norm": 44.41106414794922, - "learning_rate": 8.074666666666667e-06, - "loss": 1.6285, - "step": 39790 - }, - { - "epoch": 1.2736, - "grad_norm": 43.90785217285156, - "learning_rate": 8.071111111111112e-06, - "loss": 1.5971, - "step": 39800 - }, - { - "epoch": 1.27392, - "grad_norm": 43.35899353027344, - "learning_rate": 8.067555555555557e-06, - "loss": 1.6186, - "step": 39810 - }, - { - "epoch": 1.27424, - "grad_norm": 47.12132263183594, - "learning_rate": 8.064000000000001e-06, - "loss": 1.6181, - "step": 39820 - }, - { - "epoch": 1.2745600000000001, - "grad_norm": 43.785736083984375, - "learning_rate": 8.060444444444446e-06, - "loss": 1.5915, - "step": 39830 - }, - { - "epoch": 1.27488, - "grad_norm": 44.73807907104492, - "learning_rate": 8.05688888888889e-06, - "loss": 1.6149, - "step": 39840 - }, - { - "epoch": 1.2752, - "grad_norm": 44.06450271606445, - "learning_rate": 8.053333333333335e-06, - "loss": 1.6082, - "step": 39850 - }, - { - "epoch": 1.27552, - "grad_norm": 44.62391662597656, - "learning_rate": 8.04977777777778e-06, - "loss": 1.6064, - "step": 39860 - }, - { - "epoch": 1.27584, - "grad_norm": 48.22390365600586, - "learning_rate": 8.046222222222224e-06, - "loss": 1.6159, - "step": 39870 - }, - { - "epoch": 1.27616, - "grad_norm": 45.42574691772461, - "learning_rate": 8.042666666666667e-06, - "loss": 1.625, - "step": 39880 - }, - { - "epoch": 1.27648, - "grad_norm": 45.23716735839844, - "learning_rate": 8.039111111111111e-06, - "loss": 1.6274, - "step": 39890 - }, - { - "epoch": 1.2768, - "grad_norm": 44.92580795288086, - "learning_rate": 8.035555555555556e-06, - "loss": 1.596, - "step": 39900 - }, - { - "epoch": 1.27712, - "grad_norm": 45.10259246826172, - "learning_rate": 8.032e-06, - "loss": 1.5999, - "step": 39910 - }, - { - "epoch": 1.27744, - "grad_norm": 44.05598068237305, - "learning_rate": 8.028444444444445e-06, - "loss": 1.616, - "step": 39920 - }, - { - "epoch": 1.27776, - "grad_norm": 43.27324676513672, - "learning_rate": 8.02488888888889e-06, - "loss": 1.613, - "step": 39930 - }, - { - "epoch": 1.27808, - "grad_norm": 45.73674392700195, - "learning_rate": 8.021333333333334e-06, - "loss": 1.612, - "step": 39940 - }, - { - "epoch": 1.2784, - "grad_norm": 44.60195541381836, - "learning_rate": 8.017777777777779e-06, - "loss": 1.6288, - "step": 39950 - }, - { - "epoch": 1.27872, - "grad_norm": 45.1451416015625, - "learning_rate": 8.014222222222223e-06, - "loss": 1.577, - "step": 39960 - }, - { - "epoch": 1.27904, - "grad_norm": 44.034568786621094, - "learning_rate": 8.010666666666668e-06, - "loss": 1.632, - "step": 39970 - }, - { - "epoch": 1.27936, - "grad_norm": 46.17295455932617, - "learning_rate": 8.007111111111112e-06, - "loss": 1.6267, - "step": 39980 - }, - { - "epoch": 1.27968, - "grad_norm": 45.9102668762207, - "learning_rate": 8.003555555555557e-06, - "loss": 1.6077, - "step": 39990 - }, - { - "epoch": 1.28, - "grad_norm": 43.993289947509766, - "learning_rate": 8.000000000000001e-06, - "loss": 1.5885, - "step": 40000 - }, - { - "epoch": 1.2803200000000001, - "grad_norm": 45.628448486328125, - "learning_rate": 7.996444444444446e-06, - "loss": 1.6315, - "step": 40010 - }, - { - "epoch": 1.28064, - "grad_norm": 44.6242561340332, - "learning_rate": 7.99288888888889e-06, - "loss": 1.6086, - "step": 40020 - }, - { - "epoch": 1.2809599999999999, - "grad_norm": 47.21349334716797, - "learning_rate": 7.989333333333335e-06, - "loss": 1.6256, - "step": 40030 - }, - { - "epoch": 1.28128, - "grad_norm": 46.001495361328125, - "learning_rate": 7.98577777777778e-06, - "loss": 1.6245, - "step": 40040 - }, - { - "epoch": 1.2816, - "grad_norm": 43.2928466796875, - "learning_rate": 7.982222222222224e-06, - "loss": 1.6257, - "step": 40050 - }, - { - "epoch": 1.28192, - "grad_norm": 44.0401496887207, - "learning_rate": 7.978666666666667e-06, - "loss": 1.6124, - "step": 40060 - }, - { - "epoch": 1.28224, - "grad_norm": 45.14518737792969, - "learning_rate": 7.975111111111112e-06, - "loss": 1.6006, - "step": 40070 - }, - { - "epoch": 1.28256, - "grad_norm": 43.09754180908203, - "learning_rate": 7.971555555555556e-06, - "loss": 1.5802, - "step": 40080 - }, - { - "epoch": 1.28288, - "grad_norm": 45.571258544921875, - "learning_rate": 7.968e-06, - "loss": 1.6197, - "step": 40090 - }, - { - "epoch": 1.2832, - "grad_norm": 45.45378112792969, - "learning_rate": 7.964444444444445e-06, - "loss": 1.623, - "step": 40100 - }, - { - "epoch": 1.28352, - "grad_norm": 44.5523796081543, - "learning_rate": 7.96088888888889e-06, - "loss": 1.6224, - "step": 40110 - }, - { - "epoch": 1.28384, - "grad_norm": 44.05735397338867, - "learning_rate": 7.957333333333334e-06, - "loss": 1.6143, - "step": 40120 - }, - { - "epoch": 1.28416, - "grad_norm": 44.72103500366211, - "learning_rate": 7.953777777777779e-06, - "loss": 1.6412, - "step": 40130 - }, - { - "epoch": 1.28448, - "grad_norm": 47.94450378417969, - "learning_rate": 7.950222222222222e-06, - "loss": 1.6276, - "step": 40140 - }, - { - "epoch": 1.2848, - "grad_norm": 46.01536560058594, - "learning_rate": 7.946666666666666e-06, - "loss": 1.6162, - "step": 40150 - }, - { - "epoch": 1.28512, - "grad_norm": 44.166263580322266, - "learning_rate": 7.94311111111111e-06, - "loss": 1.5908, - "step": 40160 - }, - { - "epoch": 1.28544, - "grad_norm": 45.8519287109375, - "learning_rate": 7.939555555555555e-06, - "loss": 1.6292, - "step": 40170 - }, - { - "epoch": 1.28576, - "grad_norm": 45.5851936340332, - "learning_rate": 7.936e-06, - "loss": 1.6074, - "step": 40180 - }, - { - "epoch": 1.2860800000000001, - "grad_norm": 45.92979431152344, - "learning_rate": 7.932444444444444e-06, - "loss": 1.5674, - "step": 40190 - }, - { - "epoch": 1.2864, - "grad_norm": 43.50894546508789, - "learning_rate": 7.928888888888889e-06, - "loss": 1.5989, - "step": 40200 - }, - { - "epoch": 1.2867199999999999, - "grad_norm": 46.01730728149414, - "learning_rate": 7.925333333333334e-06, - "loss": 1.6008, - "step": 40210 - }, - { - "epoch": 1.28704, - "grad_norm": 45.77237319946289, - "learning_rate": 7.921777777777778e-06, - "loss": 1.6168, - "step": 40220 - }, - { - "epoch": 1.28736, - "grad_norm": 44.72514343261719, - "learning_rate": 7.918222222222223e-06, - "loss": 1.6021, - "step": 40230 - }, - { - "epoch": 1.28768, - "grad_norm": 44.8151969909668, - "learning_rate": 7.914666666666667e-06, - "loss": 1.5952, - "step": 40240 - }, - { - "epoch": 1.288, - "grad_norm": 44.08286666870117, - "learning_rate": 7.911111111111112e-06, - "loss": 1.6163, - "step": 40250 - }, - { - "epoch": 1.2883200000000001, - "grad_norm": 44.34801483154297, - "learning_rate": 7.907555555555556e-06, - "loss": 1.6181, - "step": 40260 - }, - { - "epoch": 1.28864, - "grad_norm": 43.255027770996094, - "learning_rate": 7.904000000000001e-06, - "loss": 1.6056, - "step": 40270 - }, - { - "epoch": 1.2889599999999999, - "grad_norm": 45.57148361206055, - "learning_rate": 7.900444444444445e-06, - "loss": 1.6077, - "step": 40280 - }, - { - "epoch": 1.28928, - "grad_norm": 46.56278991699219, - "learning_rate": 7.89688888888889e-06, - "loss": 1.6375, - "step": 40290 - }, - { - "epoch": 1.2896, - "grad_norm": 46.41355514526367, - "learning_rate": 7.893333333333335e-06, - "loss": 1.6111, - "step": 40300 - }, - { - "epoch": 1.28992, - "grad_norm": 43.413761138916016, - "learning_rate": 7.889777777777779e-06, - "loss": 1.6313, - "step": 40310 - }, - { - "epoch": 1.29024, - "grad_norm": 43.898345947265625, - "learning_rate": 7.886222222222224e-06, - "loss": 1.6062, - "step": 40320 - }, - { - "epoch": 1.29056, - "grad_norm": 43.248104095458984, - "learning_rate": 7.882666666666667e-06, - "loss": 1.61, - "step": 40330 - }, - { - "epoch": 1.29088, - "grad_norm": 42.96113967895508, - "learning_rate": 7.879111111111111e-06, - "loss": 1.5724, - "step": 40340 - }, - { - "epoch": 1.2912, - "grad_norm": 43.738590240478516, - "learning_rate": 7.875555555555556e-06, - "loss": 1.6262, - "step": 40350 - }, - { - "epoch": 1.29152, - "grad_norm": 45.08527755737305, - "learning_rate": 7.872e-06, - "loss": 1.6212, - "step": 40360 - }, - { - "epoch": 1.29184, - "grad_norm": 44.45360565185547, - "learning_rate": 7.868444444444445e-06, - "loss": 1.625, - "step": 40370 - }, - { - "epoch": 1.29216, - "grad_norm": 47.08750534057617, - "learning_rate": 7.86488888888889e-06, - "loss": 1.6254, - "step": 40380 - }, - { - "epoch": 1.29248, - "grad_norm": 42.82807540893555, - "learning_rate": 7.861333333333334e-06, - "loss": 1.6114, - "step": 40390 - }, - { - "epoch": 1.2928, - "grad_norm": 43.76680374145508, - "learning_rate": 7.857777777777778e-06, - "loss": 1.6219, - "step": 40400 - }, - { - "epoch": 1.29312, - "grad_norm": 43.96428680419922, - "learning_rate": 7.854222222222223e-06, - "loss": 1.5789, - "step": 40410 - }, - { - "epoch": 1.29344, - "grad_norm": 46.33516311645508, - "learning_rate": 7.850666666666668e-06, - "loss": 1.6083, - "step": 40420 - }, - { - "epoch": 1.29376, - "grad_norm": 44.434627532958984, - "learning_rate": 7.847111111111112e-06, - "loss": 1.6226, - "step": 40430 - }, - { - "epoch": 1.2940800000000001, - "grad_norm": 43.88167953491211, - "learning_rate": 7.843555555555557e-06, - "loss": 1.5743, - "step": 40440 - }, - { - "epoch": 1.2944, - "grad_norm": 46.007728576660156, - "learning_rate": 7.840000000000001e-06, - "loss": 1.6166, - "step": 40450 - }, - { - "epoch": 1.2947199999999999, - "grad_norm": 44.59865951538086, - "learning_rate": 7.836444444444446e-06, - "loss": 1.6035, - "step": 40460 - }, - { - "epoch": 1.29504, - "grad_norm": 44.90102005004883, - "learning_rate": 7.83288888888889e-06, - "loss": 1.6403, - "step": 40470 - }, - { - "epoch": 1.29536, - "grad_norm": 46.19745635986328, - "learning_rate": 7.829333333333335e-06, - "loss": 1.6255, - "step": 40480 - }, - { - "epoch": 1.29568, - "grad_norm": 43.79157638549805, - "learning_rate": 7.82577777777778e-06, - "loss": 1.6345, - "step": 40490 - }, - { - "epoch": 1.296, - "grad_norm": 44.32958221435547, - "learning_rate": 7.822222222222224e-06, - "loss": 1.5844, - "step": 40500 - }, - { - "epoch": 1.29632, - "grad_norm": 46.034942626953125, - "learning_rate": 7.818666666666668e-06, - "loss": 1.6114, - "step": 40510 - }, - { - "epoch": 1.29664, - "grad_norm": 46.09269714355469, - "learning_rate": 7.815111111111113e-06, - "loss": 1.6044, - "step": 40520 - }, - { - "epoch": 1.29696, - "grad_norm": 45.11857986450195, - "learning_rate": 7.811555555555556e-06, - "loss": 1.6074, - "step": 40530 - }, - { - "epoch": 1.29728, - "grad_norm": 42.98341369628906, - "learning_rate": 7.808e-06, - "loss": 1.597, - "step": 40540 - }, - { - "epoch": 1.2976, - "grad_norm": 45.964637756347656, - "learning_rate": 7.804444444444445e-06, - "loss": 1.6156, - "step": 40550 - }, - { - "epoch": 1.29792, - "grad_norm": 43.90168762207031, - "learning_rate": 7.80088888888889e-06, - "loss": 1.5891, - "step": 40560 - }, - { - "epoch": 1.29824, - "grad_norm": 45.83740997314453, - "learning_rate": 7.797333333333334e-06, - "loss": 1.592, - "step": 40570 - }, - { - "epoch": 1.29856, - "grad_norm": 44.05854797363281, - "learning_rate": 7.793777777777779e-06, - "loss": 1.5761, - "step": 40580 - }, - { - "epoch": 1.29888, - "grad_norm": 47.54555130004883, - "learning_rate": 7.790222222222222e-06, - "loss": 1.6345, - "step": 40590 - }, - { - "epoch": 1.2992, - "grad_norm": 44.667266845703125, - "learning_rate": 7.786666666666666e-06, - "loss": 1.5964, - "step": 40600 - }, - { - "epoch": 1.29952, - "grad_norm": 46.65251922607422, - "learning_rate": 7.78311111111111e-06, - "loss": 1.6193, - "step": 40610 - }, - { - "epoch": 1.29984, - "grad_norm": 43.384761810302734, - "learning_rate": 7.779555555555555e-06, - "loss": 1.6043, - "step": 40620 - }, - { - "epoch": 1.30016, - "grad_norm": 44.91738510131836, - "learning_rate": 7.776e-06, - "loss": 1.5873, - "step": 40630 - }, - { - "epoch": 1.30048, - "grad_norm": 45.923622131347656, - "learning_rate": 7.772444444444444e-06, - "loss": 1.6069, - "step": 40640 - }, - { - "epoch": 1.3008, - "grad_norm": 45.634765625, - "learning_rate": 7.768888888888889e-06, - "loss": 1.6204, - "step": 40650 - }, - { - "epoch": 1.30112, - "grad_norm": 46.069881439208984, - "learning_rate": 7.765333333333333e-06, - "loss": 1.6164, - "step": 40660 - }, - { - "epoch": 1.30144, - "grad_norm": 43.63170623779297, - "learning_rate": 7.761777777777778e-06, - "loss": 1.6255, - "step": 40670 - }, - { - "epoch": 1.30176, - "grad_norm": 43.349246978759766, - "learning_rate": 7.758222222222223e-06, - "loss": 1.6029, - "step": 40680 - }, - { - "epoch": 1.3020800000000001, - "grad_norm": 45.24386215209961, - "learning_rate": 7.754666666666667e-06, - "loss": 1.6056, - "step": 40690 - }, - { - "epoch": 1.3024, - "grad_norm": 43.81452560424805, - "learning_rate": 7.751111111111112e-06, - "loss": 1.6055, - "step": 40700 - }, - { - "epoch": 1.3027199999999999, - "grad_norm": 46.62950134277344, - "learning_rate": 7.747555555555556e-06, - "loss": 1.624, - "step": 40710 - }, - { - "epoch": 1.30304, - "grad_norm": 46.991554260253906, - "learning_rate": 7.744e-06, - "loss": 1.642, - "step": 40720 - }, - { - "epoch": 1.30336, - "grad_norm": 46.24224853515625, - "learning_rate": 7.740444444444445e-06, - "loss": 1.6157, - "step": 40730 - }, - { - "epoch": 1.30368, - "grad_norm": 44.421443939208984, - "learning_rate": 7.73688888888889e-06, - "loss": 1.603, - "step": 40740 - }, - { - "epoch": 1.304, - "grad_norm": 45.14869689941406, - "learning_rate": 7.733333333333334e-06, - "loss": 1.6015, - "step": 40750 - }, - { - "epoch": 1.30432, - "grad_norm": 43.42634582519531, - "learning_rate": 7.729777777777779e-06, - "loss": 1.6017, - "step": 40760 - }, - { - "epoch": 1.30464, - "grad_norm": 45.673404693603516, - "learning_rate": 7.726222222222224e-06, - "loss": 1.6036, - "step": 40770 - }, - { - "epoch": 1.30496, - "grad_norm": 45.315757751464844, - "learning_rate": 7.722666666666666e-06, - "loss": 1.6234, - "step": 40780 - }, - { - "epoch": 1.30528, - "grad_norm": 45.67897415161133, - "learning_rate": 7.719111111111111e-06, - "loss": 1.5891, - "step": 40790 - }, - { - "epoch": 1.3056, - "grad_norm": 45.099693298339844, - "learning_rate": 7.715555555555555e-06, - "loss": 1.6294, - "step": 40800 - }, - { - "epoch": 1.30592, - "grad_norm": 44.96028518676758, - "learning_rate": 7.712e-06, - "loss": 1.626, - "step": 40810 - }, - { - "epoch": 1.30624, - "grad_norm": 45.61831283569336, - "learning_rate": 7.708444444444445e-06, - "loss": 1.6303, - "step": 40820 - }, - { - "epoch": 1.30656, - "grad_norm": 44.800445556640625, - "learning_rate": 7.704888888888889e-06, - "loss": 1.5906, - "step": 40830 - }, - { - "epoch": 1.30688, - "grad_norm": 45.365909576416016, - "learning_rate": 7.701333333333334e-06, - "loss": 1.6078, - "step": 40840 - }, - { - "epoch": 1.3072, - "grad_norm": 47.193538665771484, - "learning_rate": 7.697777777777778e-06, - "loss": 1.6195, - "step": 40850 - }, - { - "epoch": 1.30752, - "grad_norm": 44.04536437988281, - "learning_rate": 7.694222222222223e-06, - "loss": 1.6339, - "step": 40860 - }, - { - "epoch": 1.3078400000000001, - "grad_norm": 44.930973052978516, - "learning_rate": 7.690666666666667e-06, - "loss": 1.6223, - "step": 40870 - }, - { - "epoch": 1.30816, - "grad_norm": 46.143306732177734, - "learning_rate": 7.687111111111112e-06, - "loss": 1.5855, - "step": 40880 - }, - { - "epoch": 1.3084799999999999, - "grad_norm": 44.251708984375, - "learning_rate": 7.683555555555556e-06, - "loss": 1.5935, - "step": 40890 - }, - { - "epoch": 1.3088, - "grad_norm": 44.97640609741211, - "learning_rate": 7.680000000000001e-06, - "loss": 1.6104, - "step": 40900 - }, - { - "epoch": 1.30912, - "grad_norm": 46.09316635131836, - "learning_rate": 7.676444444444446e-06, - "loss": 1.6225, - "step": 40910 - }, - { - "epoch": 1.30944, - "grad_norm": 44.81732940673828, - "learning_rate": 7.67288888888889e-06, - "loss": 1.6374, - "step": 40920 - }, - { - "epoch": 1.30976, - "grad_norm": 43.48009490966797, - "learning_rate": 7.669333333333335e-06, - "loss": 1.5855, - "step": 40930 - }, - { - "epoch": 1.3100800000000001, - "grad_norm": 42.132633209228516, - "learning_rate": 7.66577777777778e-06, - "loss": 1.6129, - "step": 40940 - }, - { - "epoch": 1.3104, - "grad_norm": 43.00667190551758, - "learning_rate": 7.662222222222224e-06, - "loss": 1.5724, - "step": 40950 - }, - { - "epoch": 1.3107199999999999, - "grad_norm": 47.47547149658203, - "learning_rate": 7.658666666666668e-06, - "loss": 1.6237, - "step": 40960 - }, - { - "epoch": 1.31104, - "grad_norm": 44.40707778930664, - "learning_rate": 7.655111111111113e-06, - "loss": 1.6235, - "step": 40970 - }, - { - "epoch": 1.31136, - "grad_norm": 45.595489501953125, - "learning_rate": 7.651555555555556e-06, - "loss": 1.5843, - "step": 40980 - }, - { - "epoch": 1.31168, - "grad_norm": 45.07762908935547, - "learning_rate": 7.648e-06, - "loss": 1.6275, - "step": 40990 - }, - { - "epoch": 1.312, - "grad_norm": 44.50121307373047, - "learning_rate": 7.644444444444445e-06, - "loss": 1.6012, - "step": 41000 - }, - { - "epoch": 1.31232, - "grad_norm": 44.96818923950195, - "learning_rate": 7.64088888888889e-06, - "loss": 1.5821, - "step": 41010 - }, - { - "epoch": 1.31264, - "grad_norm": 44.83430862426758, - "learning_rate": 7.637333333333334e-06, - "loss": 1.6233, - "step": 41020 - }, - { - "epoch": 1.31296, - "grad_norm": 45.33064270019531, - "learning_rate": 7.633777777777779e-06, - "loss": 1.582, - "step": 41030 - }, - { - "epoch": 1.31328, - "grad_norm": 44.263946533203125, - "learning_rate": 7.630222222222223e-06, - "loss": 1.5741, - "step": 41040 - }, - { - "epoch": 1.3136, - "grad_norm": 44.7042350769043, - "learning_rate": 7.626666666666668e-06, - "loss": 1.6065, - "step": 41050 - }, - { - "epoch": 1.31392, - "grad_norm": 45.056270599365234, - "learning_rate": 7.623111111111112e-06, - "loss": 1.6121, - "step": 41060 - }, - { - "epoch": 1.31424, - "grad_norm": 46.58217239379883, - "learning_rate": 7.619555555555557e-06, - "loss": 1.6193, - "step": 41070 - }, - { - "epoch": 1.31456, - "grad_norm": 44.109134674072266, - "learning_rate": 7.616000000000001e-06, - "loss": 1.5887, - "step": 41080 - }, - { - "epoch": 1.31488, - "grad_norm": 45.42112350463867, - "learning_rate": 7.612444444444444e-06, - "loss": 1.616, - "step": 41090 - }, - { - "epoch": 1.3152, - "grad_norm": 45.975318908691406, - "learning_rate": 7.608888888888889e-06, - "loss": 1.5935, - "step": 41100 - }, - { - "epoch": 1.31552, - "grad_norm": 45.71128845214844, - "learning_rate": 7.605333333333333e-06, - "loss": 1.6239, - "step": 41110 - }, - { - "epoch": 1.3158400000000001, - "grad_norm": 45.99323272705078, - "learning_rate": 7.601777777777778e-06, - "loss": 1.6085, - "step": 41120 - }, - { - "epoch": 1.31616, - "grad_norm": 44.56714630126953, - "learning_rate": 7.598222222222222e-06, - "loss": 1.5948, - "step": 41130 - }, - { - "epoch": 1.3164799999999999, - "grad_norm": 44.28768539428711, - "learning_rate": 7.594666666666667e-06, - "loss": 1.6283, - "step": 41140 - }, - { - "epoch": 1.3168, - "grad_norm": 46.32910919189453, - "learning_rate": 7.5911111111111115e-06, - "loss": 1.6269, - "step": 41150 - }, - { - "epoch": 1.31712, - "grad_norm": 44.03658676147461, - "learning_rate": 7.587555555555556e-06, - "loss": 1.5864, - "step": 41160 - }, - { - "epoch": 1.31744, - "grad_norm": 46.9725227355957, - "learning_rate": 7.5840000000000006e-06, - "loss": 1.5915, - "step": 41170 - }, - { - "epoch": 1.31776, - "grad_norm": 44.8245849609375, - "learning_rate": 7.580444444444445e-06, - "loss": 1.6049, - "step": 41180 - }, - { - "epoch": 1.31808, - "grad_norm": 44.639652252197266, - "learning_rate": 7.576888888888889e-06, - "loss": 1.6207, - "step": 41190 - }, - { - "epoch": 1.3184, - "grad_norm": 44.07933044433594, - "learning_rate": 7.573333333333333e-06, - "loss": 1.6292, - "step": 41200 - }, - { - "epoch": 1.31872, - "grad_norm": 46.18787384033203, - "learning_rate": 7.569777777777778e-06, - "loss": 1.6415, - "step": 41210 - }, - { - "epoch": 1.31904, - "grad_norm": 47.621158599853516, - "learning_rate": 7.5662222222222225e-06, - "loss": 1.616, - "step": 41220 - }, - { - "epoch": 1.31936, - "grad_norm": 46.72417449951172, - "learning_rate": 7.562666666666667e-06, - "loss": 1.6256, - "step": 41230 - }, - { - "epoch": 1.31968, - "grad_norm": 45.38915252685547, - "learning_rate": 7.559111111111112e-06, - "loss": 1.6002, - "step": 41240 - }, - { - "epoch": 1.32, - "grad_norm": 45.14508819580078, - "learning_rate": 7.555555555555556e-06, - "loss": 1.6152, - "step": 41250 - }, - { - "epoch": 1.32032, - "grad_norm": 44.36618423461914, - "learning_rate": 7.552000000000001e-06, - "loss": 1.6239, - "step": 41260 - }, - { - "epoch": 1.32064, - "grad_norm": 46.560482025146484, - "learning_rate": 7.548444444444445e-06, - "loss": 1.6267, - "step": 41270 - }, - { - "epoch": 1.32096, - "grad_norm": 44.61228561401367, - "learning_rate": 7.544888888888889e-06, - "loss": 1.6021, - "step": 41280 - }, - { - "epoch": 1.32128, - "grad_norm": 45.6788444519043, - "learning_rate": 7.5413333333333335e-06, - "loss": 1.5753, - "step": 41290 - }, - { - "epoch": 1.3216, - "grad_norm": 44.486167907714844, - "learning_rate": 7.537777777777778e-06, - "loss": 1.5784, - "step": 41300 - }, - { - "epoch": 1.32192, - "grad_norm": 46.06887435913086, - "learning_rate": 7.534222222222223e-06, - "loss": 1.6227, - "step": 41310 - }, - { - "epoch": 1.32224, - "grad_norm": 42.2825813293457, - "learning_rate": 7.530666666666667e-06, - "loss": 1.6255, - "step": 41320 - }, - { - "epoch": 1.32256, - "grad_norm": 44.45964813232422, - "learning_rate": 7.527111111111112e-06, - "loss": 1.5935, - "step": 41330 - }, - { - "epoch": 1.32288, - "grad_norm": 43.408695220947266, - "learning_rate": 7.523555555555556e-06, - "loss": 1.6149, - "step": 41340 - }, - { - "epoch": 1.3232, - "grad_norm": 44.885047912597656, - "learning_rate": 7.520000000000001e-06, - "loss": 1.6165, - "step": 41350 - }, - { - "epoch": 1.32352, - "grad_norm": 45.36357498168945, - "learning_rate": 7.516444444444445e-06, - "loss": 1.5888, - "step": 41360 - }, - { - "epoch": 1.3238400000000001, - "grad_norm": 43.71674346923828, - "learning_rate": 7.51288888888889e-06, - "loss": 1.6012, - "step": 41370 - }, - { - "epoch": 1.32416, - "grad_norm": 44.332122802734375, - "learning_rate": 7.509333333333334e-06, - "loss": 1.614, - "step": 41380 - }, - { - "epoch": 1.3244799999999999, - "grad_norm": 44.52497100830078, - "learning_rate": 7.505777777777778e-06, - "loss": 1.6144, - "step": 41390 - }, - { - "epoch": 1.3248, - "grad_norm": 45.074867248535156, - "learning_rate": 7.502222222222223e-06, - "loss": 1.6412, - "step": 41400 - }, - { - "epoch": 1.32512, - "grad_norm": 44.016639709472656, - "learning_rate": 7.498666666666667e-06, - "loss": 1.6299, - "step": 41410 - }, - { - "epoch": 1.32544, - "grad_norm": 44.419776916503906, - "learning_rate": 7.495111111111112e-06, - "loss": 1.6163, - "step": 41420 - }, - { - "epoch": 1.32576, - "grad_norm": 46.50283432006836, - "learning_rate": 7.4915555555555564e-06, - "loss": 1.573, - "step": 41430 - }, - { - "epoch": 1.32608, - "grad_norm": 46.12255859375, - "learning_rate": 7.488000000000001e-06, - "loss": 1.6028, - "step": 41440 - }, - { - "epoch": 1.3264, - "grad_norm": 45.1977653503418, - "learning_rate": 7.4844444444444455e-06, - "loss": 1.6403, - "step": 41450 - }, - { - "epoch": 1.32672, - "grad_norm": 46.167457580566406, - "learning_rate": 7.48088888888889e-06, - "loss": 1.5972, - "step": 41460 - }, - { - "epoch": 1.32704, - "grad_norm": 46.73789978027344, - "learning_rate": 7.477333333333335e-06, - "loss": 1.6071, - "step": 41470 - }, - { - "epoch": 1.32736, - "grad_norm": 45.93767547607422, - "learning_rate": 7.473777777777778e-06, - "loss": 1.6254, - "step": 41480 - }, - { - "epoch": 1.32768, - "grad_norm": 44.71085739135742, - "learning_rate": 7.470222222222223e-06, - "loss": 1.5951, - "step": 41490 - }, - { - "epoch": 1.328, - "grad_norm": 45.44325256347656, - "learning_rate": 7.4666666666666675e-06, - "loss": 1.6183, - "step": 41500 - }, - { - "epoch": 1.32832, - "grad_norm": 45.813087463378906, - "learning_rate": 7.463111111111112e-06, - "loss": 1.6019, - "step": 41510 - }, - { - "epoch": 1.32864, - "grad_norm": 43.0788459777832, - "learning_rate": 7.4595555555555566e-06, - "loss": 1.6332, - "step": 41520 - }, - { - "epoch": 1.32896, - "grad_norm": 50.90683364868164, - "learning_rate": 7.456000000000001e-06, - "loss": 1.6036, - "step": 41530 - }, - { - "epoch": 1.32928, - "grad_norm": 44.96916198730469, - "learning_rate": 7.452444444444446e-06, - "loss": 1.6276, - "step": 41540 - }, - { - "epoch": 1.3296000000000001, - "grad_norm": 45.77922439575195, - "learning_rate": 7.44888888888889e-06, - "loss": 1.6187, - "step": 41550 - }, - { - "epoch": 1.32992, - "grad_norm": 46.79462432861328, - "learning_rate": 7.445333333333335e-06, - "loss": 1.6277, - "step": 41560 - }, - { - "epoch": 1.3302399999999999, - "grad_norm": 45.878135681152344, - "learning_rate": 7.4417777777777785e-06, - "loss": 1.605, - "step": 41570 - }, - { - "epoch": 1.33056, - "grad_norm": 45.22720718383789, - "learning_rate": 7.438222222222223e-06, - "loss": 1.601, - "step": 41580 - }, - { - "epoch": 1.33088, - "grad_norm": 43.85465621948242, - "learning_rate": 7.434666666666668e-06, - "loss": 1.6047, - "step": 41590 - }, - { - "epoch": 1.3312, - "grad_norm": 43.823081970214844, - "learning_rate": 7.431111111111111e-06, - "loss": 1.6103, - "step": 41600 - }, - { - "epoch": 1.33152, - "grad_norm": 43.73310089111328, - "learning_rate": 7.427555555555556e-06, - "loss": 1.6175, - "step": 41610 - }, - { - "epoch": 1.3318400000000001, - "grad_norm": 45.74242401123047, - "learning_rate": 7.424e-06, - "loss": 1.6308, - "step": 41620 - }, - { - "epoch": 1.33216, - "grad_norm": 49.32040023803711, - "learning_rate": 7.420444444444445e-06, - "loss": 1.6227, - "step": 41630 - }, - { - "epoch": 1.3324799999999999, - "grad_norm": 44.08414840698242, - "learning_rate": 7.416888888888889e-06, - "loss": 1.618, - "step": 41640 - }, - { - "epoch": 1.3328, - "grad_norm": 42.85681915283203, - "learning_rate": 7.413333333333333e-06, - "loss": 1.6222, - "step": 41650 - }, - { - "epoch": 1.33312, - "grad_norm": 44.588043212890625, - "learning_rate": 7.409777777777778e-06, - "loss": 1.6339, - "step": 41660 - }, - { - "epoch": 1.33344, - "grad_norm": 46.16299057006836, - "learning_rate": 7.406222222222222e-06, - "loss": 1.6058, - "step": 41670 - }, - { - "epoch": 1.33376, - "grad_norm": 42.70748519897461, - "learning_rate": 7.402666666666667e-06, - "loss": 1.6168, - "step": 41680 - }, - { - "epoch": 1.33408, - "grad_norm": 45.55185317993164, - "learning_rate": 7.3991111111111114e-06, - "loss": 1.6146, - "step": 41690 - }, - { - "epoch": 1.3344, - "grad_norm": 44.13607406616211, - "learning_rate": 7.395555555555556e-06, - "loss": 1.5932, - "step": 41700 - }, - { - "epoch": 1.33472, - "grad_norm": 44.90840530395508, - "learning_rate": 7.3920000000000005e-06, - "loss": 1.6389, - "step": 41710 - }, - { - "epoch": 1.33504, - "grad_norm": 45.11433792114258, - "learning_rate": 7.388444444444445e-06, - "loss": 1.6186, - "step": 41720 - }, - { - "epoch": 1.33536, - "grad_norm": 44.75900650024414, - "learning_rate": 7.384888888888889e-06, - "loss": 1.617, - "step": 41730 - }, - { - "epoch": 1.33568, - "grad_norm": 45.705841064453125, - "learning_rate": 7.381333333333333e-06, - "loss": 1.6117, - "step": 41740 - }, - { - "epoch": 1.336, - "grad_norm": 46.18943405151367, - "learning_rate": 7.377777777777778e-06, - "loss": 1.6025, - "step": 41750 - }, - { - "epoch": 1.33632, - "grad_norm": 46.8182258605957, - "learning_rate": 7.3742222222222225e-06, - "loss": 1.5902, - "step": 41760 - }, - { - "epoch": 1.33664, - "grad_norm": 46.080604553222656, - "learning_rate": 7.370666666666667e-06, - "loss": 1.6143, - "step": 41770 - }, - { - "epoch": 1.33696, - "grad_norm": 45.11826705932617, - "learning_rate": 7.3671111111111116e-06, - "loss": 1.5883, - "step": 41780 - }, - { - "epoch": 1.33728, - "grad_norm": 44.33161926269531, - "learning_rate": 7.363555555555556e-06, - "loss": 1.6075, - "step": 41790 - }, - { - "epoch": 1.3376000000000001, - "grad_norm": 45.10640335083008, - "learning_rate": 7.360000000000001e-06, - "loss": 1.623, - "step": 41800 - }, - { - "epoch": 1.33792, - "grad_norm": 43.860267639160156, - "learning_rate": 7.356444444444445e-06, - "loss": 1.5983, - "step": 41810 - }, - { - "epoch": 1.3382399999999999, - "grad_norm": 45.181495666503906, - "learning_rate": 7.35288888888889e-06, - "loss": 1.6088, - "step": 41820 - }, - { - "epoch": 1.33856, - "grad_norm": 46.86771774291992, - "learning_rate": 7.3493333333333335e-06, - "loss": 1.5976, - "step": 41830 - }, - { - "epoch": 1.33888, - "grad_norm": 43.91908264160156, - "learning_rate": 7.345777777777778e-06, - "loss": 1.6186, - "step": 41840 - }, - { - "epoch": 1.3392, - "grad_norm": 45.94272994995117, - "learning_rate": 7.342222222222223e-06, - "loss": 1.6071, - "step": 41850 - }, - { - "epoch": 1.33952, - "grad_norm": 44.59323501586914, - "learning_rate": 7.338666666666667e-06, - "loss": 1.6292, - "step": 41860 - }, - { - "epoch": 1.33984, - "grad_norm": 46.60374450683594, - "learning_rate": 7.335111111111112e-06, - "loss": 1.6075, - "step": 41870 - }, - { - "epoch": 1.34016, - "grad_norm": 44.81357192993164, - "learning_rate": 7.331555555555556e-06, - "loss": 1.5965, - "step": 41880 - }, - { - "epoch": 1.34048, - "grad_norm": 44.506500244140625, - "learning_rate": 7.328000000000001e-06, - "loss": 1.589, - "step": 41890 - }, - { - "epoch": 1.3408, - "grad_norm": 45.52299880981445, - "learning_rate": 7.324444444444445e-06, - "loss": 1.622, - "step": 41900 - }, - { - "epoch": 1.34112, - "grad_norm": 45.362396240234375, - "learning_rate": 7.32088888888889e-06, - "loss": 1.5961, - "step": 41910 - }, - { - "epoch": 1.34144, - "grad_norm": 48.476627349853516, - "learning_rate": 7.3173333333333345e-06, - "loss": 1.6273, - "step": 41920 - }, - { - "epoch": 1.34176, - "grad_norm": 46.45351791381836, - "learning_rate": 7.313777777777778e-06, - "loss": 1.6076, - "step": 41930 - }, - { - "epoch": 1.34208, - "grad_norm": 44.8245849609375, - "learning_rate": 7.310222222222223e-06, - "loss": 1.6257, - "step": 41940 - }, - { - "epoch": 1.3424, - "grad_norm": 46.39732360839844, - "learning_rate": 7.306666666666667e-06, - "loss": 1.617, - "step": 41950 - }, - { - "epoch": 1.34272, - "grad_norm": 44.4480094909668, - "learning_rate": 7.303111111111112e-06, - "loss": 1.6128, - "step": 41960 - }, - { - "epoch": 1.34304, - "grad_norm": 46.01774597167969, - "learning_rate": 7.299555555555556e-06, - "loss": 1.6377, - "step": 41970 - }, - { - "epoch": 1.34336, - "grad_norm": 47.32066345214844, - "learning_rate": 7.296000000000001e-06, - "loss": 1.5866, - "step": 41980 - }, - { - "epoch": 1.34368, - "grad_norm": 44.58960723876953, - "learning_rate": 7.2924444444444455e-06, - "loss": 1.6074, - "step": 41990 - }, - { - "epoch": 1.3439999999999999, - "grad_norm": 44.3505973815918, - "learning_rate": 7.28888888888889e-06, - "loss": 1.6045, - "step": 42000 - }, - { - "epoch": 1.34432, - "grad_norm": 44.56719207763672, - "learning_rate": 7.285333333333335e-06, - "loss": 1.6252, - "step": 42010 - }, - { - "epoch": 1.34464, - "grad_norm": 45.77865219116211, - "learning_rate": 7.281777777777778e-06, - "loss": 1.6433, - "step": 42020 - }, - { - "epoch": 1.34496, - "grad_norm": 46.384979248046875, - "learning_rate": 7.278222222222223e-06, - "loss": 1.6384, - "step": 42030 - }, - { - "epoch": 1.34528, - "grad_norm": 45.07937240600586, - "learning_rate": 7.2746666666666674e-06, - "loss": 1.5896, - "step": 42040 - }, - { - "epoch": 1.3456000000000001, - "grad_norm": 43.41228485107422, - "learning_rate": 7.271111111111112e-06, - "loss": 1.5997, - "step": 42050 - }, - { - "epoch": 1.34592, - "grad_norm": 45.3345947265625, - "learning_rate": 7.2675555555555565e-06, - "loss": 1.5932, - "step": 42060 - }, - { - "epoch": 1.3462399999999999, - "grad_norm": 52.81439208984375, - "learning_rate": 7.264000000000001e-06, - "loss": 1.6182, - "step": 42070 - }, - { - "epoch": 1.34656, - "grad_norm": 77.6665267944336, - "learning_rate": 7.260444444444446e-06, - "loss": 1.5891, - "step": 42080 - }, - { - "epoch": 1.34688, - "grad_norm": 43.742164611816406, - "learning_rate": 7.25688888888889e-06, - "loss": 1.5789, - "step": 42090 - }, - { - "epoch": 1.3472, - "grad_norm": 46.076324462890625, - "learning_rate": 7.253333333333335e-06, - "loss": 1.6129, - "step": 42100 - }, - { - "epoch": 1.34752, - "grad_norm": 45.37517166137695, - "learning_rate": 7.249777777777779e-06, - "loss": 1.5823, - "step": 42110 - }, - { - "epoch": 1.34784, - "grad_norm": 45.582679748535156, - "learning_rate": 7.246222222222222e-06, - "loss": 1.6309, - "step": 42120 - }, - { - "epoch": 1.34816, - "grad_norm": 45.533573150634766, - "learning_rate": 7.242666666666667e-06, - "loss": 1.6029, - "step": 42130 - }, - { - "epoch": 1.34848, - "grad_norm": 46.39393615722656, - "learning_rate": 7.239111111111111e-06, - "loss": 1.5989, - "step": 42140 - }, - { - "epoch": 1.3488, - "grad_norm": 41.84919738769531, - "learning_rate": 7.235555555555556e-06, - "loss": 1.6176, - "step": 42150 - }, - { - "epoch": 1.34912, - "grad_norm": 45.00162887573242, - "learning_rate": 7.232e-06, - "loss": 1.6299, - "step": 42160 - }, - { - "epoch": 1.34944, - "grad_norm": 47.52516174316406, - "learning_rate": 7.228444444444445e-06, - "loss": 1.5941, - "step": 42170 - }, - { - "epoch": 1.34976, - "grad_norm": 43.75261306762695, - "learning_rate": 7.224888888888889e-06, - "loss": 1.6165, - "step": 42180 - }, - { - "epoch": 1.35008, - "grad_norm": 45.649635314941406, - "learning_rate": 7.221333333333333e-06, - "loss": 1.606, - "step": 42190 - }, - { - "epoch": 1.3504, - "grad_norm": 43.680885314941406, - "learning_rate": 7.217777777777778e-06, - "loss": 1.6042, - "step": 42200 - }, - { - "epoch": 1.35072, - "grad_norm": 45.03094482421875, - "learning_rate": 7.214222222222222e-06, - "loss": 1.5914, - "step": 42210 - }, - { - "epoch": 1.35104, - "grad_norm": 44.120704650878906, - "learning_rate": 7.210666666666667e-06, - "loss": 1.622, - "step": 42220 - }, - { - "epoch": 1.3513600000000001, - "grad_norm": 44.486473083496094, - "learning_rate": 7.207111111111111e-06, - "loss": 1.6104, - "step": 42230 - }, - { - "epoch": 1.35168, - "grad_norm": 44.795597076416016, - "learning_rate": 7.203555555555556e-06, - "loss": 1.5942, - "step": 42240 - }, - { - "epoch": 1.3519999999999999, - "grad_norm": 45.60823059082031, - "learning_rate": 7.2000000000000005e-06, - "loss": 1.5909, - "step": 42250 - }, - { - "epoch": 1.35232, - "grad_norm": 45.53849792480469, - "learning_rate": 7.196444444444445e-06, - "loss": 1.6015, - "step": 42260 - }, - { - "epoch": 1.35264, - "grad_norm": 45.30690002441406, - "learning_rate": 7.19288888888889e-06, - "loss": 1.6008, - "step": 42270 - }, - { - "epoch": 1.35296, - "grad_norm": 45.725833892822266, - "learning_rate": 7.189333333333333e-06, - "loss": 1.6238, - "step": 42280 - }, - { - "epoch": 1.35328, - "grad_norm": 45.394039154052734, - "learning_rate": 7.185777777777778e-06, - "loss": 1.6399, - "step": 42290 - }, - { - "epoch": 1.3536000000000001, - "grad_norm": 43.31956481933594, - "learning_rate": 7.1822222222222224e-06, - "loss": 1.5954, - "step": 42300 - }, - { - "epoch": 1.35392, - "grad_norm": 45.152774810791016, - "learning_rate": 7.178666666666667e-06, - "loss": 1.6, - "step": 42310 - }, - { - "epoch": 1.3542399999999999, - "grad_norm": 45.98184585571289, - "learning_rate": 7.1751111111111116e-06, - "loss": 1.6067, - "step": 42320 - }, - { - "epoch": 1.35456, - "grad_norm": 44.53315734863281, - "learning_rate": 7.171555555555556e-06, - "loss": 1.6073, - "step": 42330 - }, - { - "epoch": 1.35488, - "grad_norm": 50.22261047363281, - "learning_rate": 7.168000000000001e-06, - "loss": 1.6198, - "step": 42340 - }, - { - "epoch": 1.3552, - "grad_norm": 44.62266159057617, - "learning_rate": 7.164444444444445e-06, - "loss": 1.623, - "step": 42350 - }, - { - "epoch": 1.35552, - "grad_norm": 44.818748474121094, - "learning_rate": 7.16088888888889e-06, - "loss": 1.5935, - "step": 42360 - }, - { - "epoch": 1.35584, - "grad_norm": 43.746421813964844, - "learning_rate": 7.157333333333334e-06, - "loss": 1.5749, - "step": 42370 - }, - { - "epoch": 1.35616, - "grad_norm": 46.356353759765625, - "learning_rate": 7.153777777777778e-06, - "loss": 1.6146, - "step": 42380 - }, - { - "epoch": 1.35648, - "grad_norm": 47.375152587890625, - "learning_rate": 7.150222222222223e-06, - "loss": 1.6118, - "step": 42390 - }, - { - "epoch": 1.3568, - "grad_norm": 46.35893630981445, - "learning_rate": 7.146666666666667e-06, - "loss": 1.6158, - "step": 42400 - }, - { - "epoch": 1.35712, - "grad_norm": 46.352848052978516, - "learning_rate": 7.143111111111112e-06, - "loss": 1.5878, - "step": 42410 - }, - { - "epoch": 1.35744, - "grad_norm": 43.320281982421875, - "learning_rate": 7.139555555555556e-06, - "loss": 1.6167, - "step": 42420 - }, - { - "epoch": 1.35776, - "grad_norm": 42.796791076660156, - "learning_rate": 7.136000000000001e-06, - "loss": 1.6257, - "step": 42430 - }, - { - "epoch": 1.35808, - "grad_norm": 45.10505294799805, - "learning_rate": 7.132444444444445e-06, - "loss": 1.5949, - "step": 42440 - }, - { - "epoch": 1.3584, - "grad_norm": 45.902618408203125, - "learning_rate": 7.12888888888889e-06, - "loss": 1.635, - "step": 42450 - }, - { - "epoch": 1.35872, - "grad_norm": 46.71749496459961, - "learning_rate": 7.1253333333333345e-06, - "loss": 1.6115, - "step": 42460 - }, - { - "epoch": 1.35904, - "grad_norm": 43.46259689331055, - "learning_rate": 7.121777777777778e-06, - "loss": 1.5951, - "step": 42470 - }, - { - "epoch": 1.3593600000000001, - "grad_norm": 47.766231536865234, - "learning_rate": 7.118222222222223e-06, - "loss": 1.6121, - "step": 42480 - }, - { - "epoch": 1.35968, - "grad_norm": 44.797122955322266, - "learning_rate": 7.114666666666667e-06, - "loss": 1.5854, - "step": 42490 - }, - { - "epoch": 1.3599999999999999, - "grad_norm": 45.8564567565918, - "learning_rate": 7.111111111111112e-06, - "loss": 1.6086, - "step": 42500 - }, - { - "epoch": 1.36032, - "grad_norm": 45.51469802856445, - "learning_rate": 7.107555555555556e-06, - "loss": 1.6422, - "step": 42510 - }, - { - "epoch": 1.36064, - "grad_norm": 45.435096740722656, - "learning_rate": 7.104000000000001e-06, - "loss": 1.6109, - "step": 42520 - }, - { - "epoch": 1.36096, - "grad_norm": 46.81600570678711, - "learning_rate": 7.1004444444444455e-06, - "loss": 1.6098, - "step": 42530 - }, - { - "epoch": 1.36128, - "grad_norm": 44.625938415527344, - "learning_rate": 7.09688888888889e-06, - "loss": 1.615, - "step": 42540 - }, - { - "epoch": 1.3616, - "grad_norm": 44.89677429199219, - "learning_rate": 7.093333333333335e-06, - "loss": 1.613, - "step": 42550 - }, - { - "epoch": 1.36192, - "grad_norm": 44.43601989746094, - "learning_rate": 7.089777777777779e-06, - "loss": 1.5775, - "step": 42560 - }, - { - "epoch": 1.36224, - "grad_norm": 45.43404006958008, - "learning_rate": 7.086222222222223e-06, - "loss": 1.6413, - "step": 42570 - }, - { - "epoch": 1.36256, - "grad_norm": 45.00292205810547, - "learning_rate": 7.082666666666667e-06, - "loss": 1.6518, - "step": 42580 - }, - { - "epoch": 1.36288, - "grad_norm": 46.85793685913086, - "learning_rate": 7.079111111111112e-06, - "loss": 1.5907, - "step": 42590 - }, - { - "epoch": 1.3632, - "grad_norm": 42.96330642700195, - "learning_rate": 7.0755555555555565e-06, - "loss": 1.6054, - "step": 42600 - }, - { - "epoch": 1.36352, - "grad_norm": 44.131099700927734, - "learning_rate": 7.072000000000001e-06, - "loss": 1.6187, - "step": 42610 - }, - { - "epoch": 1.36384, - "grad_norm": 44.26656723022461, - "learning_rate": 7.068444444444446e-06, - "loss": 1.6102, - "step": 42620 - }, - { - "epoch": 1.36416, - "grad_norm": 44.00492858886719, - "learning_rate": 7.06488888888889e-06, - "loss": 1.6255, - "step": 42630 - }, - { - "epoch": 1.36448, - "grad_norm": 45.8413200378418, - "learning_rate": 7.061333333333333e-06, - "loss": 1.5964, - "step": 42640 - }, - { - "epoch": 1.3648, - "grad_norm": 46.41115951538086, - "learning_rate": 7.057777777777778e-06, - "loss": 1.5851, - "step": 42650 - }, - { - "epoch": 1.3651200000000001, - "grad_norm": 45.85788345336914, - "learning_rate": 7.054222222222222e-06, - "loss": 1.6009, - "step": 42660 - }, - { - "epoch": 1.36544, - "grad_norm": 44.54059600830078, - "learning_rate": 7.050666666666667e-06, - "loss": 1.5774, - "step": 42670 - }, - { - "epoch": 1.3657599999999999, - "grad_norm": 43.62693786621094, - "learning_rate": 7.047111111111111e-06, - "loss": 1.6298, - "step": 42680 - }, - { - "epoch": 1.36608, - "grad_norm": 44.06049346923828, - "learning_rate": 7.043555555555556e-06, - "loss": 1.6006, - "step": 42690 - }, - { - "epoch": 1.3664, - "grad_norm": 45.65952682495117, - "learning_rate": 7.04e-06, - "loss": 1.6428, - "step": 42700 - }, - { - "epoch": 1.36672, - "grad_norm": 45.37552261352539, - "learning_rate": 7.036444444444445e-06, - "loss": 1.6304, - "step": 42710 - }, - { - "epoch": 1.36704, - "grad_norm": 45.344573974609375, - "learning_rate": 7.0328888888888895e-06, - "loss": 1.5978, - "step": 42720 - }, - { - "epoch": 1.3673600000000001, - "grad_norm": 45.009918212890625, - "learning_rate": 7.029333333333333e-06, - "loss": 1.6036, - "step": 42730 - }, - { - "epoch": 1.36768, - "grad_norm": 47.78025436401367, - "learning_rate": 7.025777777777778e-06, - "loss": 1.6182, - "step": 42740 - }, - { - "epoch": 1.3679999999999999, - "grad_norm": 43.18815612792969, - "learning_rate": 7.022222222222222e-06, - "loss": 1.6248, - "step": 42750 - }, - { - "epoch": 1.36832, - "grad_norm": 44.154388427734375, - "learning_rate": 7.018666666666667e-06, - "loss": 1.5975, - "step": 42760 - }, - { - "epoch": 1.36864, - "grad_norm": 44.73991775512695, - "learning_rate": 7.015111111111111e-06, - "loss": 1.5802, - "step": 42770 - }, - { - "epoch": 1.36896, - "grad_norm": 45.32402801513672, - "learning_rate": 7.011555555555556e-06, - "loss": 1.6301, - "step": 42780 - }, - { - "epoch": 1.36928, - "grad_norm": 45.557464599609375, - "learning_rate": 7.0080000000000005e-06, - "loss": 1.6012, - "step": 42790 - }, - { - "epoch": 1.3696, - "grad_norm": 44.68947982788086, - "learning_rate": 7.004444444444445e-06, - "loss": 1.5771, - "step": 42800 - }, - { - "epoch": 1.36992, - "grad_norm": 47.947853088378906, - "learning_rate": 7.00088888888889e-06, - "loss": 1.6116, - "step": 42810 - }, - { - "epoch": 1.37024, - "grad_norm": 46.1669807434082, - "learning_rate": 6.997333333333334e-06, - "loss": 1.6344, - "step": 42820 - }, - { - "epoch": 1.37056, - "grad_norm": 42.729087829589844, - "learning_rate": 6.993777777777778e-06, - "loss": 1.6033, - "step": 42830 - }, - { - "epoch": 1.37088, - "grad_norm": 43.983116149902344, - "learning_rate": 6.990222222222222e-06, - "loss": 1.5901, - "step": 42840 - }, - { - "epoch": 1.3712, - "grad_norm": 47.0743408203125, - "learning_rate": 6.986666666666667e-06, - "loss": 1.5971, - "step": 42850 - }, - { - "epoch": 1.37152, - "grad_norm": 45.89794158935547, - "learning_rate": 6.9831111111111115e-06, - "loss": 1.5891, - "step": 42860 - }, - { - "epoch": 1.37184, - "grad_norm": 47.009464263916016, - "learning_rate": 6.979555555555556e-06, - "loss": 1.624, - "step": 42870 - }, - { - "epoch": 1.37216, - "grad_norm": 44.28515625, - "learning_rate": 6.976000000000001e-06, - "loss": 1.6088, - "step": 42880 - }, - { - "epoch": 1.37248, - "grad_norm": 46.215370178222656, - "learning_rate": 6.972444444444445e-06, - "loss": 1.5858, - "step": 42890 - }, - { - "epoch": 1.3728, - "grad_norm": 43.991416931152344, - "learning_rate": 6.96888888888889e-06, - "loss": 1.5814, - "step": 42900 - }, - { - "epoch": 1.3731200000000001, - "grad_norm": 46.066654205322266, - "learning_rate": 6.965333333333334e-06, - "loss": 1.6109, - "step": 42910 - }, - { - "epoch": 1.37344, - "grad_norm": 44.713748931884766, - "learning_rate": 6.961777777777778e-06, - "loss": 1.5892, - "step": 42920 - }, - { - "epoch": 1.3737599999999999, - "grad_norm": 45.68426513671875, - "learning_rate": 6.9582222222222226e-06, - "loss": 1.5985, - "step": 42930 - }, - { - "epoch": 1.37408, - "grad_norm": 44.62791442871094, - "learning_rate": 6.954666666666667e-06, - "loss": 1.5908, - "step": 42940 - }, - { - "epoch": 1.3744, - "grad_norm": 44.590370178222656, - "learning_rate": 6.951111111111112e-06, - "loss": 1.5942, - "step": 42950 - }, - { - "epoch": 1.37472, - "grad_norm": 46.614261627197266, - "learning_rate": 6.947555555555556e-06, - "loss": 1.6259, - "step": 42960 - }, - { - "epoch": 1.37504, - "grad_norm": 45.88407897949219, - "learning_rate": 6.944000000000001e-06, - "loss": 1.6235, - "step": 42970 - }, - { - "epoch": 1.3753600000000001, - "grad_norm": 46.90294647216797, - "learning_rate": 6.940444444444445e-06, - "loss": 1.5938, - "step": 42980 - }, - { - "epoch": 1.37568, - "grad_norm": 44.70612716674805, - "learning_rate": 6.93688888888889e-06, - "loss": 1.605, - "step": 42990 - }, - { - "epoch": 1.376, - "grad_norm": 43.3810920715332, - "learning_rate": 6.9333333333333344e-06, - "loss": 1.6153, - "step": 43000 - }, - { - "epoch": 1.37632, - "grad_norm": 43.62282180786133, - "learning_rate": 6.929777777777779e-06, - "loss": 1.5724, - "step": 43010 - }, - { - "epoch": 1.37664, - "grad_norm": 44.38874816894531, - "learning_rate": 6.926222222222223e-06, - "loss": 1.637, - "step": 43020 - }, - { - "epoch": 1.37696, - "grad_norm": 44.056522369384766, - "learning_rate": 6.922666666666667e-06, - "loss": 1.6198, - "step": 43030 - }, - { - "epoch": 1.37728, - "grad_norm": 44.81947326660156, - "learning_rate": 6.919111111111112e-06, - "loss": 1.6117, - "step": 43040 - }, - { - "epoch": 1.3776, - "grad_norm": 43.83502197265625, - "learning_rate": 6.915555555555556e-06, - "loss": 1.5649, - "step": 43050 - }, - { - "epoch": 1.37792, - "grad_norm": 44.356773376464844, - "learning_rate": 6.912000000000001e-06, - "loss": 1.6297, - "step": 43060 - }, - { - "epoch": 1.37824, - "grad_norm": 48.54316711425781, - "learning_rate": 6.9084444444444455e-06, - "loss": 1.6002, - "step": 43070 - }, - { - "epoch": 1.37856, - "grad_norm": 45.56771469116211, - "learning_rate": 6.90488888888889e-06, - "loss": 1.6219, - "step": 43080 - }, - { - "epoch": 1.37888, - "grad_norm": 43.97990036010742, - "learning_rate": 6.9013333333333346e-06, - "loss": 1.6474, - "step": 43090 - }, - { - "epoch": 1.3792, - "grad_norm": 44.80003356933594, - "learning_rate": 6.897777777777779e-06, - "loss": 1.5931, - "step": 43100 - }, - { - "epoch": 1.37952, - "grad_norm": 43.22229766845703, - "learning_rate": 6.894222222222224e-06, - "loss": 1.5845, - "step": 43110 - }, - { - "epoch": 1.37984, - "grad_norm": 45.73298263549805, - "learning_rate": 6.890666666666667e-06, - "loss": 1.6028, - "step": 43120 - }, - { - "epoch": 1.38016, - "grad_norm": 44.59761047363281, - "learning_rate": 6.887111111111112e-06, - "loss": 1.5751, - "step": 43130 - }, - { - "epoch": 1.38048, - "grad_norm": 45.15092849731445, - "learning_rate": 6.8835555555555565e-06, - "loss": 1.6216, - "step": 43140 - }, - { - "epoch": 1.3808, - "grad_norm": 45.5977897644043, - "learning_rate": 6.88e-06, - "loss": 1.6063, - "step": 43150 - }, - { - "epoch": 1.3811200000000001, - "grad_norm": 43.23210525512695, - "learning_rate": 6.876444444444445e-06, - "loss": 1.6057, - "step": 43160 - }, - { - "epoch": 1.38144, - "grad_norm": 44.85715866088867, - "learning_rate": 6.872888888888889e-06, - "loss": 1.6179, - "step": 43170 - }, - { - "epoch": 1.3817599999999999, - "grad_norm": 45.367855072021484, - "learning_rate": 6.869333333333333e-06, - "loss": 1.5685, - "step": 43180 - }, - { - "epoch": 1.38208, - "grad_norm": 43.84122085571289, - "learning_rate": 6.8657777777777776e-06, - "loss": 1.6033, - "step": 43190 - }, - { - "epoch": 1.3824, - "grad_norm": 44.58828353881836, - "learning_rate": 6.862222222222222e-06, - "loss": 1.6334, - "step": 43200 - }, - { - "epoch": 1.38272, - "grad_norm": 45.26247787475586, - "learning_rate": 6.858666666666667e-06, - "loss": 1.6057, - "step": 43210 - }, - { - "epoch": 1.38304, - "grad_norm": 42.196754455566406, - "learning_rate": 6.855111111111111e-06, - "loss": 1.6067, - "step": 43220 - }, - { - "epoch": 1.38336, - "grad_norm": 47.0052375793457, - "learning_rate": 6.851555555555556e-06, - "loss": 1.6125, - "step": 43230 - }, - { - "epoch": 1.38368, - "grad_norm": 44.26320266723633, - "learning_rate": 6.848e-06, - "loss": 1.6237, - "step": 43240 - }, - { - "epoch": 1.384, - "grad_norm": 44.31113052368164, - "learning_rate": 6.844444444444445e-06, - "loss": 1.6111, - "step": 43250 - }, - { - "epoch": 1.38432, - "grad_norm": 45.720027923583984, - "learning_rate": 6.8408888888888894e-06, - "loss": 1.5989, - "step": 43260 - }, - { - "epoch": 1.38464, - "grad_norm": 46.495758056640625, - "learning_rate": 6.837333333333334e-06, - "loss": 1.605, - "step": 43270 - }, - { - "epoch": 1.38496, - "grad_norm": 45.887367248535156, - "learning_rate": 6.833777777777778e-06, - "loss": 1.5901, - "step": 43280 - }, - { - "epoch": 1.38528, - "grad_norm": 44.445804595947266, - "learning_rate": 6.830222222222222e-06, - "loss": 1.5793, - "step": 43290 - }, - { - "epoch": 1.3856, - "grad_norm": 46.46500778198242, - "learning_rate": 6.826666666666667e-06, - "loss": 1.6001, - "step": 43300 - }, - { - "epoch": 1.38592, - "grad_norm": 44.54768753051758, - "learning_rate": 6.823111111111111e-06, - "loss": 1.6045, - "step": 43310 - }, - { - "epoch": 1.38624, - "grad_norm": 45.025169372558594, - "learning_rate": 6.819555555555556e-06, - "loss": 1.6225, - "step": 43320 - }, - { - "epoch": 1.38656, - "grad_norm": 44.17384719848633, - "learning_rate": 6.8160000000000005e-06, - "loss": 1.576, - "step": 43330 - }, - { - "epoch": 1.3868800000000001, - "grad_norm": 44.44326400756836, - "learning_rate": 6.812444444444445e-06, - "loss": 1.5932, - "step": 43340 - }, - { - "epoch": 1.3872, - "grad_norm": 45.172157287597656, - "learning_rate": 6.80888888888889e-06, - "loss": 1.5878, - "step": 43350 - }, - { - "epoch": 1.3875199999999999, - "grad_norm": 45.87239074707031, - "learning_rate": 6.805333333333334e-06, - "loss": 1.6009, - "step": 43360 - }, - { - "epoch": 1.38784, - "grad_norm": 44.63097381591797, - "learning_rate": 6.801777777777778e-06, - "loss": 1.6016, - "step": 43370 - }, - { - "epoch": 1.38816, - "grad_norm": 43.94593811035156, - "learning_rate": 6.798222222222222e-06, - "loss": 1.5834, - "step": 43380 - }, - { - "epoch": 1.38848, - "grad_norm": 46.12236785888672, - "learning_rate": 6.794666666666667e-06, - "loss": 1.5865, - "step": 43390 - }, - { - "epoch": 1.3888, - "grad_norm": 45.60383605957031, - "learning_rate": 6.7911111111111115e-06, - "loss": 1.5848, - "step": 43400 - }, - { - "epoch": 1.3891200000000001, - "grad_norm": 45.66219711303711, - "learning_rate": 6.787555555555556e-06, - "loss": 1.6455, - "step": 43410 - }, - { - "epoch": 1.38944, - "grad_norm": 43.15108108520508, - "learning_rate": 6.784000000000001e-06, - "loss": 1.6232, - "step": 43420 - }, - { - "epoch": 1.3897599999999999, - "grad_norm": 44.42317581176758, - "learning_rate": 6.780444444444445e-06, - "loss": 1.6398, - "step": 43430 - }, - { - "epoch": 1.39008, - "grad_norm": 46.42605972290039, - "learning_rate": 6.77688888888889e-06, - "loss": 1.6111, - "step": 43440 - }, - { - "epoch": 1.3904, - "grad_norm": 44.38256072998047, - "learning_rate": 6.773333333333334e-06, - "loss": 1.5726, - "step": 43450 - }, - { - "epoch": 1.39072, - "grad_norm": 45.85690689086914, - "learning_rate": 6.769777777777779e-06, - "loss": 1.6, - "step": 43460 - }, - { - "epoch": 1.39104, - "grad_norm": 44.399715423583984, - "learning_rate": 6.7662222222222225e-06, - "loss": 1.6328, - "step": 43470 - }, - { - "epoch": 1.39136, - "grad_norm": 45.213165283203125, - "learning_rate": 6.762666666666667e-06, - "loss": 1.6013, - "step": 43480 - }, - { - "epoch": 1.39168, - "grad_norm": 44.65245819091797, - "learning_rate": 6.759111111111112e-06, - "loss": 1.6072, - "step": 43490 - }, - { - "epoch": 1.392, - "grad_norm": 47.13508987426758, - "learning_rate": 6.755555555555556e-06, - "loss": 1.6562, - "step": 43500 - }, - { - "epoch": 1.39232, - "grad_norm": 46.34877395629883, - "learning_rate": 6.752000000000001e-06, - "loss": 1.6082, - "step": 43510 - }, - { - "epoch": 1.39264, - "grad_norm": 48.1592903137207, - "learning_rate": 6.748444444444445e-06, - "loss": 1.6268, - "step": 43520 - }, - { - "epoch": 1.39296, - "grad_norm": 43.81608581542969, - "learning_rate": 6.74488888888889e-06, - "loss": 1.5885, - "step": 43530 - }, - { - "epoch": 1.39328, - "grad_norm": 44.738468170166016, - "learning_rate": 6.741333333333334e-06, - "loss": 1.6022, - "step": 43540 - }, - { - "epoch": 1.3936, - "grad_norm": 45.94084930419922, - "learning_rate": 6.737777777777779e-06, - "loss": 1.6218, - "step": 43550 - }, - { - "epoch": 1.39392, - "grad_norm": 43.44023132324219, - "learning_rate": 6.7342222222222235e-06, - "loss": 1.6135, - "step": 43560 - }, - { - "epoch": 1.39424, - "grad_norm": 45.58948516845703, - "learning_rate": 6.730666666666667e-06, - "loss": 1.598, - "step": 43570 - }, - { - "epoch": 1.39456, - "grad_norm": 48.460453033447266, - "learning_rate": 6.727111111111112e-06, - "loss": 1.6107, - "step": 43580 - }, - { - "epoch": 1.3948800000000001, - "grad_norm": 43.78839111328125, - "learning_rate": 6.723555555555556e-06, - "loss": 1.5936, - "step": 43590 - }, - { - "epoch": 1.3952, - "grad_norm": 43.97420883178711, - "learning_rate": 6.720000000000001e-06, - "loss": 1.5876, - "step": 43600 - }, - { - "epoch": 1.3955199999999999, - "grad_norm": 46.42491149902344, - "learning_rate": 6.7164444444444454e-06, - "loss": 1.5972, - "step": 43610 - }, - { - "epoch": 1.39584, - "grad_norm": 45.202362060546875, - "learning_rate": 6.71288888888889e-06, - "loss": 1.6245, - "step": 43620 - }, - { - "epoch": 1.39616, - "grad_norm": 44.71420669555664, - "learning_rate": 6.7093333333333345e-06, - "loss": 1.6262, - "step": 43630 - }, - { - "epoch": 1.39648, - "grad_norm": 45.896907806396484, - "learning_rate": 6.705777777777779e-06, - "loss": 1.5995, - "step": 43640 - }, - { - "epoch": 1.3968, - "grad_norm": 44.3741455078125, - "learning_rate": 6.702222222222224e-06, - "loss": 1.6059, - "step": 43650 - }, - { - "epoch": 1.39712, - "grad_norm": 43.47319412231445, - "learning_rate": 6.698666666666667e-06, - "loss": 1.588, - "step": 43660 - }, - { - "epoch": 1.39744, - "grad_norm": 45.12184143066406, - "learning_rate": 6.695111111111111e-06, - "loss": 1.5827, - "step": 43670 - }, - { - "epoch": 1.39776, - "grad_norm": 46.409576416015625, - "learning_rate": 6.691555555555556e-06, - "loss": 1.617, - "step": 43680 - }, - { - "epoch": 1.39808, - "grad_norm": 45.21237564086914, - "learning_rate": 6.688e-06, - "loss": 1.627, - "step": 43690 - }, - { - "epoch": 1.3984, - "grad_norm": 45.58821105957031, - "learning_rate": 6.684444444444445e-06, - "loss": 1.6018, - "step": 43700 - }, - { - "epoch": 1.39872, - "grad_norm": 45.74357604980469, - "learning_rate": 6.680888888888889e-06, - "loss": 1.6084, - "step": 43710 - }, - { - "epoch": 1.39904, - "grad_norm": 44.05308151245117, - "learning_rate": 6.677333333333334e-06, - "loss": 1.5947, - "step": 43720 - }, - { - "epoch": 1.39936, - "grad_norm": 45.306331634521484, - "learning_rate": 6.6737777777777775e-06, - "loss": 1.6247, - "step": 43730 - }, - { - "epoch": 1.39968, - "grad_norm": 44.71645736694336, - "learning_rate": 6.670222222222222e-06, - "loss": 1.6086, - "step": 43740 - }, - { - "epoch": 1.4, - "grad_norm": 43.80806350708008, - "learning_rate": 6.666666666666667e-06, - "loss": 1.6023, - "step": 43750 - }, - { - "epoch": 1.40032, - "grad_norm": 44.42367172241211, - "learning_rate": 6.663111111111111e-06, - "loss": 1.557, - "step": 43760 - }, - { - "epoch": 1.40064, - "grad_norm": 44.97092056274414, - "learning_rate": 6.659555555555556e-06, - "loss": 1.6089, - "step": 43770 - }, - { - "epoch": 1.40096, - "grad_norm": 43.39394760131836, - "learning_rate": 6.656e-06, - "loss": 1.6084, - "step": 43780 - }, - { - "epoch": 1.40128, - "grad_norm": 47.56688690185547, - "learning_rate": 6.652444444444445e-06, - "loss": 1.5989, - "step": 43790 - }, - { - "epoch": 1.4016, - "grad_norm": 44.7179069519043, - "learning_rate": 6.648888888888889e-06, - "loss": 1.5982, - "step": 43800 - }, - { - "epoch": 1.40192, - "grad_norm": 44.70976257324219, - "learning_rate": 6.645333333333334e-06, - "loss": 1.6146, - "step": 43810 - }, - { - "epoch": 1.40224, - "grad_norm": 43.4413948059082, - "learning_rate": 6.6417777777777785e-06, - "loss": 1.6083, - "step": 43820 - }, - { - "epoch": 1.40256, - "grad_norm": 44.52791976928711, - "learning_rate": 6.638222222222222e-06, - "loss": 1.5993, - "step": 43830 - }, - { - "epoch": 1.4028800000000001, - "grad_norm": 44.93545150756836, - "learning_rate": 6.634666666666667e-06, - "loss": 1.5721, - "step": 43840 - }, - { - "epoch": 1.4032, - "grad_norm": 46.78096389770508, - "learning_rate": 6.631111111111111e-06, - "loss": 1.6, - "step": 43850 - }, - { - "epoch": 1.4035199999999999, - "grad_norm": 49.197288513183594, - "learning_rate": 6.627555555555556e-06, - "loss": 1.6161, - "step": 43860 - }, - { - "epoch": 1.40384, - "grad_norm": 45.466251373291016, - "learning_rate": 6.6240000000000004e-06, - "loss": 1.6268, - "step": 43870 - }, - { - "epoch": 1.40416, - "grad_norm": 45.11639404296875, - "learning_rate": 6.620444444444445e-06, - "loss": 1.5927, - "step": 43880 - }, - { - "epoch": 1.40448, - "grad_norm": 43.47707748413086, - "learning_rate": 6.6168888888888896e-06, - "loss": 1.6192, - "step": 43890 - }, - { - "epoch": 1.4048, - "grad_norm": 43.626075744628906, - "learning_rate": 6.613333333333334e-06, - "loss": 1.588, - "step": 43900 - }, - { - "epoch": 1.40512, - "grad_norm": 43.15353775024414, - "learning_rate": 6.609777777777779e-06, - "loss": 1.6191, - "step": 43910 - }, - { - "epoch": 1.40544, - "grad_norm": 47.36770248413086, - "learning_rate": 6.606222222222222e-06, - "loss": 1.6061, - "step": 43920 - }, - { - "epoch": 1.40576, - "grad_norm": 44.976531982421875, - "learning_rate": 6.602666666666667e-06, - "loss": 1.6152, - "step": 43930 - }, - { - "epoch": 1.40608, - "grad_norm": 44.371978759765625, - "learning_rate": 6.5991111111111115e-06, - "loss": 1.5464, - "step": 43940 - }, - { - "epoch": 1.4064, - "grad_norm": 43.97093200683594, - "learning_rate": 6.595555555555556e-06, - "loss": 1.5851, - "step": 43950 - }, - { - "epoch": 1.40672, - "grad_norm": 43.370418548583984, - "learning_rate": 6.592000000000001e-06, - "loss": 1.5712, - "step": 43960 - }, - { - "epoch": 1.40704, - "grad_norm": 45.69548797607422, - "learning_rate": 6.588444444444445e-06, - "loss": 1.5918, - "step": 43970 - }, - { - "epoch": 1.40736, - "grad_norm": 44.62795639038086, - "learning_rate": 6.58488888888889e-06, - "loss": 1.569, - "step": 43980 - }, - { - "epoch": 1.40768, - "grad_norm": 45.75224304199219, - "learning_rate": 6.581333333333334e-06, - "loss": 1.6054, - "step": 43990 - }, - { - "epoch": 1.408, - "grad_norm": 45.715789794921875, - "learning_rate": 6.577777777777779e-06, - "loss": 1.6067, - "step": 44000 - }, - { - "epoch": 1.40832, - "grad_norm": 44.8160285949707, - "learning_rate": 6.574222222222223e-06, - "loss": 1.6198, - "step": 44010 - }, - { - "epoch": 1.4086400000000001, - "grad_norm": 46.13142395019531, - "learning_rate": 6.570666666666667e-06, - "loss": 1.6129, - "step": 44020 - }, - { - "epoch": 1.40896, - "grad_norm": 45.89512634277344, - "learning_rate": 6.567111111111112e-06, - "loss": 1.6048, - "step": 44030 - }, - { - "epoch": 1.4092799999999999, - "grad_norm": 48.633846282958984, - "learning_rate": 6.563555555555556e-06, - "loss": 1.6084, - "step": 44040 - }, - { - "epoch": 1.4096, - "grad_norm": 45.565189361572266, - "learning_rate": 6.560000000000001e-06, - "loss": 1.5784, - "step": 44050 - }, - { - "epoch": 1.40992, - "grad_norm": 44.69120407104492, - "learning_rate": 6.556444444444445e-06, - "loss": 1.5901, - "step": 44060 - }, - { - "epoch": 1.41024, - "grad_norm": 44.379817962646484, - "learning_rate": 6.55288888888889e-06, - "loss": 1.5949, - "step": 44070 - }, - { - "epoch": 1.41056, - "grad_norm": 47.116477966308594, - "learning_rate": 6.549333333333334e-06, - "loss": 1.6227, - "step": 44080 - }, - { - "epoch": 1.4108800000000001, - "grad_norm": 44.753299713134766, - "learning_rate": 6.545777777777779e-06, - "loss": 1.5981, - "step": 44090 - }, - { - "epoch": 1.4112, - "grad_norm": 44.2072639465332, - "learning_rate": 6.5422222222222235e-06, - "loss": 1.6162, - "step": 44100 - }, - { - "epoch": 1.4115199999999999, - "grad_norm": 47.168739318847656, - "learning_rate": 6.538666666666667e-06, - "loss": 1.5781, - "step": 44110 - }, - { - "epoch": 1.41184, - "grad_norm": 47.949363708496094, - "learning_rate": 6.535111111111112e-06, - "loss": 1.6039, - "step": 44120 - }, - { - "epoch": 1.41216, - "grad_norm": 45.053653717041016, - "learning_rate": 6.531555555555556e-06, - "loss": 1.6081, - "step": 44130 - }, - { - "epoch": 1.41248, - "grad_norm": 44.90990447998047, - "learning_rate": 6.528000000000001e-06, - "loss": 1.5493, - "step": 44140 - }, - { - "epoch": 1.4128, - "grad_norm": 46.25327682495117, - "learning_rate": 6.524444444444445e-06, - "loss": 1.6253, - "step": 44150 - }, - { - "epoch": 1.41312, - "grad_norm": 43.844017028808594, - "learning_rate": 6.52088888888889e-06, - "loss": 1.5917, - "step": 44160 - }, - { - "epoch": 1.41344, - "grad_norm": 44.53428268432617, - "learning_rate": 6.5173333333333345e-06, - "loss": 1.5935, - "step": 44170 - }, - { - "epoch": 1.41376, - "grad_norm": 46.895931243896484, - "learning_rate": 6.513777777777779e-06, - "loss": 1.6101, - "step": 44180 - }, - { - "epoch": 1.41408, - "grad_norm": 45.996299743652344, - "learning_rate": 6.510222222222222e-06, - "loss": 1.6097, - "step": 44190 - }, - { - "epoch": 1.4144, - "grad_norm": 45.133392333984375, - "learning_rate": 6.5066666666666665e-06, - "loss": 1.615, - "step": 44200 - }, - { - "epoch": 1.41472, - "grad_norm": 44.23000717163086, - "learning_rate": 6.503111111111111e-06, - "loss": 1.5988, - "step": 44210 - }, - { - "epoch": 1.41504, - "grad_norm": 44.77185821533203, - "learning_rate": 6.499555555555556e-06, - "loss": 1.5761, - "step": 44220 - }, - { - "epoch": 1.41536, - "grad_norm": 43.65971374511719, - "learning_rate": 6.496e-06, - "loss": 1.5773, - "step": 44230 - }, - { - "epoch": 1.41568, - "grad_norm": 44.112850189208984, - "learning_rate": 6.492444444444445e-06, - "loss": 1.6143, - "step": 44240 - }, - { - "epoch": 1.416, - "grad_norm": 44.179534912109375, - "learning_rate": 6.488888888888889e-06, - "loss": 1.6028, - "step": 44250 - }, - { - "epoch": 1.41632, - "grad_norm": 44.71501922607422, - "learning_rate": 6.485333333333334e-06, - "loss": 1.6185, - "step": 44260 - }, - { - "epoch": 1.4166400000000001, - "grad_norm": 44.295318603515625, - "learning_rate": 6.481777777777778e-06, - "loss": 1.575, - "step": 44270 - }, - { - "epoch": 1.41696, - "grad_norm": 43.224884033203125, - "learning_rate": 6.478222222222222e-06, - "loss": 1.5957, - "step": 44280 - }, - { - "epoch": 1.4172799999999999, - "grad_norm": 43.866905212402344, - "learning_rate": 6.474666666666667e-06, - "loss": 1.6108, - "step": 44290 - }, - { - "epoch": 1.4176, - "grad_norm": 46.90814208984375, - "learning_rate": 6.471111111111111e-06, - "loss": 1.609, - "step": 44300 - }, - { - "epoch": 1.41792, - "grad_norm": 45.48236083984375, - "learning_rate": 6.467555555555556e-06, - "loss": 1.5964, - "step": 44310 - }, - { - "epoch": 1.41824, - "grad_norm": 46.96796417236328, - "learning_rate": 6.464e-06, - "loss": 1.6068, - "step": 44320 - }, - { - "epoch": 1.41856, - "grad_norm": 43.96994400024414, - "learning_rate": 6.460444444444445e-06, - "loss": 1.5838, - "step": 44330 - }, - { - "epoch": 1.41888, - "grad_norm": 45.90093231201172, - "learning_rate": 6.456888888888889e-06, - "loss": 1.611, - "step": 44340 - }, - { - "epoch": 1.4192, - "grad_norm": 45.67726516723633, - "learning_rate": 6.453333333333334e-06, - "loss": 1.6252, - "step": 44350 - }, - { - "epoch": 1.41952, - "grad_norm": 46.475929260253906, - "learning_rate": 6.4497777777777785e-06, - "loss": 1.5988, - "step": 44360 - }, - { - "epoch": 1.41984, - "grad_norm": 46.8716926574707, - "learning_rate": 6.446222222222222e-06, - "loss": 1.6138, - "step": 44370 - }, - { - "epoch": 1.42016, - "grad_norm": 44.12192916870117, - "learning_rate": 6.442666666666667e-06, - "loss": 1.6223, - "step": 44380 - }, - { - "epoch": 1.42048, - "grad_norm": 43.43100357055664, - "learning_rate": 6.439111111111111e-06, - "loss": 1.6221, - "step": 44390 - }, - { - "epoch": 1.4208, - "grad_norm": 45.15028381347656, - "learning_rate": 6.435555555555556e-06, - "loss": 1.6043, - "step": 44400 - }, - { - "epoch": 1.42112, - "grad_norm": 46.52452087402344, - "learning_rate": 6.432e-06, - "loss": 1.6164, - "step": 44410 - }, - { - "epoch": 1.42144, - "grad_norm": 45.797088623046875, - "learning_rate": 6.428444444444445e-06, - "loss": 1.5884, - "step": 44420 - }, - { - "epoch": 1.42176, - "grad_norm": 46.37058639526367, - "learning_rate": 6.4248888888888895e-06, - "loss": 1.567, - "step": 44430 - }, - { - "epoch": 1.42208, - "grad_norm": 43.526065826416016, - "learning_rate": 6.421333333333334e-06, - "loss": 1.6117, - "step": 44440 - }, - { - "epoch": 1.4224, - "grad_norm": 43.67570114135742, - "learning_rate": 6.417777777777779e-06, - "loss": 1.6155, - "step": 44450 - }, - { - "epoch": 1.42272, - "grad_norm": 44.37187576293945, - "learning_rate": 6.414222222222223e-06, - "loss": 1.594, - "step": 44460 - }, - { - "epoch": 1.42304, - "grad_norm": 45.66731262207031, - "learning_rate": 6.410666666666667e-06, - "loss": 1.6081, - "step": 44470 - }, - { - "epoch": 1.42336, - "grad_norm": 46.02083206176758, - "learning_rate": 6.4071111111111114e-06, - "loss": 1.6136, - "step": 44480 - }, - { - "epoch": 1.42368, - "grad_norm": 44.25553894042969, - "learning_rate": 6.403555555555556e-06, - "loss": 1.6016, - "step": 44490 - }, - { - "epoch": 1.424, - "grad_norm": 44.574031829833984, - "learning_rate": 6.4000000000000006e-06, - "loss": 1.6203, - "step": 44500 - }, - { - "epoch": 1.42432, - "grad_norm": 45.1065788269043, - "learning_rate": 6.396444444444445e-06, - "loss": 1.6158, - "step": 44510 - }, - { - "epoch": 1.4246400000000001, - "grad_norm": 45.92455291748047, - "learning_rate": 6.39288888888889e-06, - "loss": 1.6105, - "step": 44520 - }, - { - "epoch": 1.42496, - "grad_norm": 45.41129684448242, - "learning_rate": 6.389333333333334e-06, - "loss": 1.6069, - "step": 44530 - }, - { - "epoch": 1.4252799999999999, - "grad_norm": 45.73555374145508, - "learning_rate": 6.385777777777779e-06, - "loss": 1.6026, - "step": 44540 - }, - { - "epoch": 1.4256, - "grad_norm": 44.59724426269531, - "learning_rate": 6.382222222222223e-06, - "loss": 1.6014, - "step": 44550 - }, - { - "epoch": 1.42592, - "grad_norm": 46.82771301269531, - "learning_rate": 6.378666666666667e-06, - "loss": 1.6057, - "step": 44560 - }, - { - "epoch": 1.42624, - "grad_norm": 43.83695602416992, - "learning_rate": 6.375111111111112e-06, - "loss": 1.6141, - "step": 44570 - }, - { - "epoch": 1.42656, - "grad_norm": 42.86970138549805, - "learning_rate": 6.371555555555556e-06, - "loss": 1.6237, - "step": 44580 - }, - { - "epoch": 1.42688, - "grad_norm": 43.6492919921875, - "learning_rate": 6.368000000000001e-06, - "loss": 1.6163, - "step": 44590 - }, - { - "epoch": 1.4272, - "grad_norm": 42.75118637084961, - "learning_rate": 6.364444444444445e-06, - "loss": 1.5957, - "step": 44600 - }, - { - "epoch": 1.42752, - "grad_norm": 45.4508056640625, - "learning_rate": 6.36088888888889e-06, - "loss": 1.5855, - "step": 44610 - }, - { - "epoch": 1.42784, - "grad_norm": 45.386810302734375, - "learning_rate": 6.357333333333334e-06, - "loss": 1.5955, - "step": 44620 - }, - { - "epoch": 1.42816, - "grad_norm": 47.42978286743164, - "learning_rate": 6.353777777777779e-06, - "loss": 1.6036, - "step": 44630 - }, - { - "epoch": 1.42848, - "grad_norm": 44.261627197265625, - "learning_rate": 6.3502222222222235e-06, - "loss": 1.589, - "step": 44640 - }, - { - "epoch": 1.4288, - "grad_norm": 42.385276794433594, - "learning_rate": 6.346666666666668e-06, - "loss": 1.6108, - "step": 44650 - }, - { - "epoch": 1.42912, - "grad_norm": 47.136287689208984, - "learning_rate": 6.343111111111112e-06, - "loss": 1.6159, - "step": 44660 - }, - { - "epoch": 1.42944, - "grad_norm": 47.89680480957031, - "learning_rate": 6.339555555555556e-06, - "loss": 1.6096, - "step": 44670 - }, - { - "epoch": 1.42976, - "grad_norm": 43.50481414794922, - "learning_rate": 6.336000000000001e-06, - "loss": 1.603, - "step": 44680 - }, - { - "epoch": 1.43008, - "grad_norm": 44.940738677978516, - "learning_rate": 6.332444444444445e-06, - "loss": 1.605, - "step": 44690 - }, - { - "epoch": 1.4304000000000001, - "grad_norm": 45.36077117919922, - "learning_rate": 6.328888888888889e-06, - "loss": 1.5932, - "step": 44700 - }, - { - "epoch": 1.43072, - "grad_norm": 44.730690002441406, - "learning_rate": 6.325333333333334e-06, - "loss": 1.6017, - "step": 44710 - }, - { - "epoch": 1.4310399999999999, - "grad_norm": 43.576080322265625, - "learning_rate": 6.321777777777778e-06, - "loss": 1.6213, - "step": 44720 - }, - { - "epoch": 1.43136, - "grad_norm": 44.608856201171875, - "learning_rate": 6.318222222222222e-06, - "loss": 1.5938, - "step": 44730 - }, - { - "epoch": 1.43168, - "grad_norm": 47.434425354003906, - "learning_rate": 6.3146666666666665e-06, - "loss": 1.6161, - "step": 44740 - }, - { - "epoch": 1.432, - "grad_norm": 44.625858306884766, - "learning_rate": 6.311111111111111e-06, - "loss": 1.6059, - "step": 44750 - }, - { - "epoch": 1.43232, - "grad_norm": 44.30402374267578, - "learning_rate": 6.3075555555555556e-06, - "loss": 1.5754, - "step": 44760 - }, - { - "epoch": 1.4326400000000001, - "grad_norm": 44.90681838989258, - "learning_rate": 6.304e-06, - "loss": 1.5993, - "step": 44770 - }, - { - "epoch": 1.43296, - "grad_norm": 42.78556442260742, - "learning_rate": 6.300444444444445e-06, - "loss": 1.5762, - "step": 44780 - }, - { - "epoch": 1.4332799999999999, - "grad_norm": 44.31805419921875, - "learning_rate": 6.296888888888889e-06, - "loss": 1.5601, - "step": 44790 - }, - { - "epoch": 1.4336, - "grad_norm": 46.09400939941406, - "learning_rate": 6.293333333333334e-06, - "loss": 1.5807, - "step": 44800 - }, - { - "epoch": 1.43392, - "grad_norm": 43.824424743652344, - "learning_rate": 6.289777777777778e-06, - "loss": 1.5881, - "step": 44810 - }, - { - "epoch": 1.43424, - "grad_norm": 44.83427047729492, - "learning_rate": 6.286222222222222e-06, - "loss": 1.6233, - "step": 44820 - }, - { - "epoch": 1.43456, - "grad_norm": 45.876094818115234, - "learning_rate": 6.282666666666667e-06, - "loss": 1.6087, - "step": 44830 - }, - { - "epoch": 1.43488, - "grad_norm": 48.019325256347656, - "learning_rate": 6.279111111111111e-06, - "loss": 1.6231, - "step": 44840 - }, - { - "epoch": 1.4352, - "grad_norm": 44.39468765258789, - "learning_rate": 6.275555555555556e-06, - "loss": 1.6105, - "step": 44850 - }, - { - "epoch": 1.43552, - "grad_norm": 43.1924934387207, - "learning_rate": 6.272e-06, - "loss": 1.6201, - "step": 44860 - }, - { - "epoch": 1.43584, - "grad_norm": 45.5162239074707, - "learning_rate": 6.268444444444445e-06, - "loss": 1.6036, - "step": 44870 - }, - { - "epoch": 1.43616, - "grad_norm": 47.22616958618164, - "learning_rate": 6.264888888888889e-06, - "loss": 1.6227, - "step": 44880 - }, - { - "epoch": 1.43648, - "grad_norm": 45.4078483581543, - "learning_rate": 6.261333333333334e-06, - "loss": 1.5907, - "step": 44890 - }, - { - "epoch": 1.4368, - "grad_norm": 48.54788589477539, - "learning_rate": 6.2577777777777785e-06, - "loss": 1.5929, - "step": 44900 - }, - { - "epoch": 1.43712, - "grad_norm": 46.870609283447266, - "learning_rate": 6.254222222222223e-06, - "loss": 1.5986, - "step": 44910 - }, - { - "epoch": 1.43744, - "grad_norm": 46.138065338134766, - "learning_rate": 6.250666666666667e-06, - "loss": 1.6462, - "step": 44920 - }, - { - "epoch": 1.43776, - "grad_norm": 49.69923400878906, - "learning_rate": 6.247111111111111e-06, - "loss": 1.63, - "step": 44930 - }, - { - "epoch": 1.43808, - "grad_norm": 44.56926345825195, - "learning_rate": 6.243555555555556e-06, - "loss": 1.6069, - "step": 44940 - }, - { - "epoch": 1.4384000000000001, - "grad_norm": 46.90085220336914, - "learning_rate": 6.24e-06, - "loss": 1.6143, - "step": 44950 - }, - { - "epoch": 1.43872, - "grad_norm": 45.12166213989258, - "learning_rate": 6.236444444444445e-06, - "loss": 1.6029, - "step": 44960 - }, - { - "epoch": 1.4390399999999999, - "grad_norm": 45.84695053100586, - "learning_rate": 6.2328888888888895e-06, - "loss": 1.5996, - "step": 44970 - }, - { - "epoch": 1.43936, - "grad_norm": 46.82448196411133, - "learning_rate": 6.229333333333334e-06, - "loss": 1.609, - "step": 44980 - }, - { - "epoch": 1.43968, - "grad_norm": 44.80758285522461, - "learning_rate": 6.225777777777779e-06, - "loss": 1.5981, - "step": 44990 - }, - { - "epoch": 1.44, - "grad_norm": 44.90853500366211, - "learning_rate": 6.222222222222223e-06, - "loss": 1.5888, - "step": 45000 - }, - { - "epoch": 1.44032, - "grad_norm": 45.14985275268555, - "learning_rate": 6.218666666666668e-06, - "loss": 1.6153, - "step": 45010 - }, - { - "epoch": 1.44064, - "grad_norm": 46.903263092041016, - "learning_rate": 6.2151111111111114e-06, - "loss": 1.6259, - "step": 45020 - }, - { - "epoch": 1.44096, - "grad_norm": 46.76694869995117, - "learning_rate": 6.211555555555556e-06, - "loss": 1.6259, - "step": 45030 - }, - { - "epoch": 1.44128, - "grad_norm": 45.14419174194336, - "learning_rate": 6.2080000000000005e-06, - "loss": 1.6069, - "step": 45040 - }, - { - "epoch": 1.4416, - "grad_norm": 43.43990707397461, - "learning_rate": 6.204444444444445e-06, - "loss": 1.5894, - "step": 45050 - }, - { - "epoch": 1.44192, - "grad_norm": 46.003562927246094, - "learning_rate": 6.20088888888889e-06, - "loss": 1.6213, - "step": 45060 - }, - { - "epoch": 1.44224, - "grad_norm": 45.48784637451172, - "learning_rate": 6.197333333333334e-06, - "loss": 1.5838, - "step": 45070 - }, - { - "epoch": 1.44256, - "grad_norm": 45.909122467041016, - "learning_rate": 6.193777777777779e-06, - "loss": 1.579, - "step": 45080 - }, - { - "epoch": 1.44288, - "grad_norm": 43.655574798583984, - "learning_rate": 6.190222222222223e-06, - "loss": 1.6093, - "step": 45090 - }, - { - "epoch": 1.4432, - "grad_norm": 45.166168212890625, - "learning_rate": 6.186666666666668e-06, - "loss": 1.6342, - "step": 45100 - }, - { - "epoch": 1.44352, - "grad_norm": 45.9638671875, - "learning_rate": 6.1831111111111116e-06, - "loss": 1.6118, - "step": 45110 - }, - { - "epoch": 1.44384, - "grad_norm": 45.01591873168945, - "learning_rate": 6.179555555555556e-06, - "loss": 1.6021, - "step": 45120 - }, - { - "epoch": 1.44416, - "grad_norm": 46.7148323059082, - "learning_rate": 6.176000000000001e-06, - "loss": 1.5778, - "step": 45130 - }, - { - "epoch": 1.44448, - "grad_norm": 47.17043685913086, - "learning_rate": 6.172444444444445e-06, - "loss": 1.5896, - "step": 45140 - }, - { - "epoch": 1.4447999999999999, - "grad_norm": 42.31821823120117, - "learning_rate": 6.16888888888889e-06, - "loss": 1.5922, - "step": 45150 - }, - { - "epoch": 1.44512, - "grad_norm": 47.12434387207031, - "learning_rate": 6.165333333333334e-06, - "loss": 1.5817, - "step": 45160 - }, - { - "epoch": 1.44544, - "grad_norm": 43.81653594970703, - "learning_rate": 6.161777777777779e-06, - "loss": 1.6213, - "step": 45170 - }, - { - "epoch": 1.44576, - "grad_norm": 47.70396041870117, - "learning_rate": 6.1582222222222234e-06, - "loss": 1.6112, - "step": 45180 - }, - { - "epoch": 1.44608, - "grad_norm": 46.75013732910156, - "learning_rate": 6.154666666666668e-06, - "loss": 1.6432, - "step": 45190 - }, - { - "epoch": 1.4464000000000001, - "grad_norm": 43.111427307128906, - "learning_rate": 6.1511111111111125e-06, - "loss": 1.5941, - "step": 45200 - }, - { - "epoch": 1.44672, - "grad_norm": 43.45669174194336, - "learning_rate": 6.147555555555556e-06, - "loss": 1.6131, - "step": 45210 - }, - { - "epoch": 1.4470399999999999, - "grad_norm": 44.20582962036133, - "learning_rate": 6.144e-06, - "loss": 1.6047, - "step": 45220 - }, - { - "epoch": 1.44736, - "grad_norm": 47.43238830566406, - "learning_rate": 6.1404444444444445e-06, - "loss": 1.5997, - "step": 45230 - }, - { - "epoch": 1.44768, - "grad_norm": 45.39116668701172, - "learning_rate": 6.136888888888889e-06, - "loss": 1.5715, - "step": 45240 - }, - { - "epoch": 1.448, - "grad_norm": 47.27134704589844, - "learning_rate": 6.133333333333334e-06, - "loss": 1.6172, - "step": 45250 - }, - { - "epoch": 1.44832, - "grad_norm": 45.037811279296875, - "learning_rate": 6.129777777777778e-06, - "loss": 1.6083, - "step": 45260 - }, - { - "epoch": 1.44864, - "grad_norm": 46.992244720458984, - "learning_rate": 6.126222222222222e-06, - "loss": 1.6296, - "step": 45270 - }, - { - "epoch": 1.44896, - "grad_norm": 44.33993911743164, - "learning_rate": 6.1226666666666664e-06, - "loss": 1.6257, - "step": 45280 - }, - { - "epoch": 1.44928, - "grad_norm": 45.206356048583984, - "learning_rate": 6.119111111111111e-06, - "loss": 1.6328, - "step": 45290 - }, - { - "epoch": 1.4496, - "grad_norm": 43.93304443359375, - "learning_rate": 6.1155555555555555e-06, - "loss": 1.6025, - "step": 45300 - }, - { - "epoch": 1.44992, - "grad_norm": 44.74345397949219, - "learning_rate": 6.112e-06, - "loss": 1.5956, - "step": 45310 - }, - { - "epoch": 1.45024, - "grad_norm": 44.77165603637695, - "learning_rate": 6.108444444444445e-06, - "loss": 1.612, - "step": 45320 - }, - { - "epoch": 1.45056, - "grad_norm": 43.939292907714844, - "learning_rate": 6.104888888888889e-06, - "loss": 1.6043, - "step": 45330 - }, - { - "epoch": 1.45088, - "grad_norm": 45.1342658996582, - "learning_rate": 6.101333333333334e-06, - "loss": 1.6139, - "step": 45340 - }, - { - "epoch": 1.4512, - "grad_norm": 43.455413818359375, - "learning_rate": 6.097777777777778e-06, - "loss": 1.6196, - "step": 45350 - }, - { - "epoch": 1.45152, - "grad_norm": 45.01537322998047, - "learning_rate": 6.094222222222223e-06, - "loss": 1.6553, - "step": 45360 - }, - { - "epoch": 1.45184, - "grad_norm": 45.8966178894043, - "learning_rate": 6.0906666666666666e-06, - "loss": 1.6301, - "step": 45370 - }, - { - "epoch": 1.4521600000000001, - "grad_norm": 44.561424255371094, - "learning_rate": 6.087111111111111e-06, - "loss": 1.6058, - "step": 45380 - }, - { - "epoch": 1.45248, - "grad_norm": 43.578922271728516, - "learning_rate": 6.083555555555556e-06, - "loss": 1.5702, - "step": 45390 - }, - { - "epoch": 1.4527999999999999, - "grad_norm": 44.27152633666992, - "learning_rate": 6.08e-06, - "loss": 1.6097, - "step": 45400 - }, - { - "epoch": 1.45312, - "grad_norm": 44.5775032043457, - "learning_rate": 6.076444444444445e-06, - "loss": 1.5524, - "step": 45410 - }, - { - "epoch": 1.45344, - "grad_norm": 47.667057037353516, - "learning_rate": 6.072888888888889e-06, - "loss": 1.6013, - "step": 45420 - }, - { - "epoch": 1.45376, - "grad_norm": 45.95570373535156, - "learning_rate": 6.069333333333334e-06, - "loss": 1.5831, - "step": 45430 - }, - { - "epoch": 1.45408, - "grad_norm": 47.346534729003906, - "learning_rate": 6.0657777777777784e-06, - "loss": 1.588, - "step": 45440 - }, - { - "epoch": 1.4544000000000001, - "grad_norm": 43.89128875732422, - "learning_rate": 6.062222222222223e-06, - "loss": 1.5948, - "step": 45450 - }, - { - "epoch": 1.45472, - "grad_norm": 45.23402786254883, - "learning_rate": 6.0586666666666676e-06, - "loss": 1.6128, - "step": 45460 - }, - { - "epoch": 1.45504, - "grad_norm": 44.87834930419922, - "learning_rate": 6.055111111111111e-06, - "loss": 1.5972, - "step": 45470 - }, - { - "epoch": 1.45536, - "grad_norm": 44.89365768432617, - "learning_rate": 6.051555555555556e-06, - "loss": 1.5834, - "step": 45480 - }, - { - "epoch": 1.45568, - "grad_norm": 47.11971664428711, - "learning_rate": 6.048e-06, - "loss": 1.5941, - "step": 45490 - }, - { - "epoch": 1.456, - "grad_norm": 44.76010513305664, - "learning_rate": 6.044444444444445e-06, - "loss": 1.6244, - "step": 45500 - }, - { - "epoch": 1.45632, - "grad_norm": 42.880149841308594, - "learning_rate": 6.0408888888888895e-06, - "loss": 1.5966, - "step": 45510 - }, - { - "epoch": 1.45664, - "grad_norm": 46.598670959472656, - "learning_rate": 6.037333333333334e-06, - "loss": 1.5942, - "step": 45520 - }, - { - "epoch": 1.45696, - "grad_norm": 45.492271423339844, - "learning_rate": 6.033777777777779e-06, - "loss": 1.5833, - "step": 45530 - }, - { - "epoch": 1.45728, - "grad_norm": 47.63541793823242, - "learning_rate": 6.030222222222223e-06, - "loss": 1.5963, - "step": 45540 - }, - { - "epoch": 1.4576, - "grad_norm": 45.30315017700195, - "learning_rate": 6.026666666666668e-06, - "loss": 1.5825, - "step": 45550 - }, - { - "epoch": 1.45792, - "grad_norm": 45.10110855102539, - "learning_rate": 6.023111111111111e-06, - "loss": 1.5897, - "step": 45560 - }, - { - "epoch": 1.45824, - "grad_norm": 45.75407028198242, - "learning_rate": 6.019555555555556e-06, - "loss": 1.6144, - "step": 45570 - }, - { - "epoch": 1.45856, - "grad_norm": 46.492149353027344, - "learning_rate": 6.0160000000000005e-06, - "loss": 1.6031, - "step": 45580 - }, - { - "epoch": 1.45888, - "grad_norm": 44.73735809326172, - "learning_rate": 6.012444444444445e-06, - "loss": 1.5914, - "step": 45590 - }, - { - "epoch": 1.4592, - "grad_norm": 45.741764068603516, - "learning_rate": 6.00888888888889e-06, - "loss": 1.6092, - "step": 45600 - }, - { - "epoch": 1.45952, - "grad_norm": 46.8515510559082, - "learning_rate": 6.005333333333334e-06, - "loss": 1.6302, - "step": 45610 - }, - { - "epoch": 1.45984, - "grad_norm": 44.54631805419922, - "learning_rate": 6.001777777777779e-06, - "loss": 1.6044, - "step": 45620 - }, - { - "epoch": 1.4601600000000001, - "grad_norm": 45.480140686035156, - "learning_rate": 5.998222222222223e-06, - "loss": 1.6075, - "step": 45630 - }, - { - "epoch": 1.46048, - "grad_norm": 45.182369232177734, - "learning_rate": 5.994666666666668e-06, - "loss": 1.5839, - "step": 45640 - }, - { - "epoch": 1.4607999999999999, - "grad_norm": 46.069602966308594, - "learning_rate": 5.991111111111112e-06, - "loss": 1.6244, - "step": 45650 - }, - { - "epoch": 1.46112, - "grad_norm": 45.65216827392578, - "learning_rate": 5.987555555555556e-06, - "loss": 1.6179, - "step": 45660 - }, - { - "epoch": 1.46144, - "grad_norm": 44.958251953125, - "learning_rate": 5.984000000000001e-06, - "loss": 1.6392, - "step": 45670 - }, - { - "epoch": 1.46176, - "grad_norm": 46.477901458740234, - "learning_rate": 5.980444444444445e-06, - "loss": 1.6115, - "step": 45680 - }, - { - "epoch": 1.46208, - "grad_norm": 47.5823974609375, - "learning_rate": 5.97688888888889e-06, - "loss": 1.6286, - "step": 45690 - }, - { - "epoch": 1.4624, - "grad_norm": 46.62644958496094, - "learning_rate": 5.973333333333334e-06, - "loss": 1.5928, - "step": 45700 - }, - { - "epoch": 1.46272, - "grad_norm": 45.06728744506836, - "learning_rate": 5.969777777777779e-06, - "loss": 1.6042, - "step": 45710 - }, - { - "epoch": 1.46304, - "grad_norm": 43.426483154296875, - "learning_rate": 5.966222222222223e-06, - "loss": 1.5828, - "step": 45720 - }, - { - "epoch": 1.46336, - "grad_norm": 47.46270751953125, - "learning_rate": 5.962666666666666e-06, - "loss": 1.5998, - "step": 45730 - }, - { - "epoch": 1.46368, - "grad_norm": 45.45689392089844, - "learning_rate": 5.959111111111111e-06, - "loss": 1.5961, - "step": 45740 - }, - { - "epoch": 1.464, - "grad_norm": 45.77447509765625, - "learning_rate": 5.955555555555555e-06, - "loss": 1.612, - "step": 45750 - }, - { - "epoch": 1.46432, - "grad_norm": 44.932464599609375, - "learning_rate": 5.952e-06, - "loss": 1.6301, - "step": 45760 - }, - { - "epoch": 1.46464, - "grad_norm": 43.912498474121094, - "learning_rate": 5.9484444444444445e-06, - "loss": 1.6006, - "step": 45770 - }, - { - "epoch": 1.46496, - "grad_norm": 44.95744705200195, - "learning_rate": 5.944888888888889e-06, - "loss": 1.5926, - "step": 45780 - }, - { - "epoch": 1.46528, - "grad_norm": 45.56084060668945, - "learning_rate": 5.941333333333334e-06, - "loss": 1.5965, - "step": 45790 - }, - { - "epoch": 1.4656, - "grad_norm": 45.82440185546875, - "learning_rate": 5.937777777777778e-06, - "loss": 1.6084, - "step": 45800 - }, - { - "epoch": 1.4659200000000001, - "grad_norm": 45.97123718261719, - "learning_rate": 5.934222222222223e-06, - "loss": 1.6145, - "step": 45810 - }, - { - "epoch": 1.46624, - "grad_norm": 42.941612243652344, - "learning_rate": 5.930666666666666e-06, - "loss": 1.5818, - "step": 45820 - }, - { - "epoch": 1.4665599999999999, - "grad_norm": 47.44602966308594, - "learning_rate": 5.927111111111111e-06, - "loss": 1.5927, - "step": 45830 - }, - { - "epoch": 1.46688, - "grad_norm": 44.84784698486328, - "learning_rate": 5.9235555555555555e-06, - "loss": 1.5655, - "step": 45840 - }, - { - "epoch": 1.4672, - "grad_norm": 44.033390045166016, - "learning_rate": 5.92e-06, - "loss": 1.5958, - "step": 45850 - }, - { - "epoch": 1.46752, - "grad_norm": 42.78225326538086, - "learning_rate": 5.916444444444445e-06, - "loss": 1.6028, - "step": 45860 - }, - { - "epoch": 1.46784, - "grad_norm": 42.93524932861328, - "learning_rate": 5.912888888888889e-06, - "loss": 1.5768, - "step": 45870 - }, - { - "epoch": 1.4681600000000001, - "grad_norm": 47.09729766845703, - "learning_rate": 5.909333333333334e-06, - "loss": 1.6107, - "step": 45880 - }, - { - "epoch": 1.46848, - "grad_norm": 45.86977005004883, - "learning_rate": 5.905777777777778e-06, - "loss": 1.6132, - "step": 45890 - }, - { - "epoch": 1.4687999999999999, - "grad_norm": 46.18256759643555, - "learning_rate": 5.902222222222223e-06, - "loss": 1.6015, - "step": 45900 - }, - { - "epoch": 1.46912, - "grad_norm": 44.45927810668945, - "learning_rate": 5.898666666666667e-06, - "loss": 1.604, - "step": 45910 - }, - { - "epoch": 1.46944, - "grad_norm": 46.4095344543457, - "learning_rate": 5.895111111111111e-06, - "loss": 1.6144, - "step": 45920 - }, - { - "epoch": 1.46976, - "grad_norm": 47.12724304199219, - "learning_rate": 5.891555555555556e-06, - "loss": 1.606, - "step": 45930 - }, - { - "epoch": 1.47008, - "grad_norm": 45.679046630859375, - "learning_rate": 5.888e-06, - "loss": 1.5945, - "step": 45940 - }, - { - "epoch": 1.4704, - "grad_norm": 46.56907272338867, - "learning_rate": 5.884444444444445e-06, - "loss": 1.5931, - "step": 45950 - }, - { - "epoch": 1.47072, - "grad_norm": 44.32508850097656, - "learning_rate": 5.880888888888889e-06, - "loss": 1.5924, - "step": 45960 - }, - { - "epoch": 1.47104, - "grad_norm": 45.68732452392578, - "learning_rate": 5.877333333333334e-06, - "loss": 1.6142, - "step": 45970 - }, - { - "epoch": 1.47136, - "grad_norm": 46.45209503173828, - "learning_rate": 5.873777777777778e-06, - "loss": 1.6032, - "step": 45980 - }, - { - "epoch": 1.47168, - "grad_norm": 45.467323303222656, - "learning_rate": 5.870222222222223e-06, - "loss": 1.5986, - "step": 45990 - }, - { - "epoch": 1.472, - "grad_norm": 46.137996673583984, - "learning_rate": 5.8666666666666675e-06, - "loss": 1.6074, - "step": 46000 - }, - { - "epoch": 1.47232, - "grad_norm": 44.18669128417969, - "learning_rate": 5.863111111111111e-06, - "loss": 1.5913, - "step": 46010 - }, - { - "epoch": 1.47264, - "grad_norm": 44.52817153930664, - "learning_rate": 5.859555555555556e-06, - "loss": 1.6008, - "step": 46020 - }, - { - "epoch": 1.47296, - "grad_norm": 44.14411163330078, - "learning_rate": 5.856e-06, - "loss": 1.593, - "step": 46030 - }, - { - "epoch": 1.47328, - "grad_norm": 46.95811462402344, - "learning_rate": 5.852444444444445e-06, - "loss": 1.6368, - "step": 46040 - }, - { - "epoch": 1.4736, - "grad_norm": 46.106109619140625, - "learning_rate": 5.8488888888888895e-06, - "loss": 1.6116, - "step": 46050 - }, - { - "epoch": 1.4739200000000001, - "grad_norm": 45.18093490600586, - "learning_rate": 5.845333333333334e-06, - "loss": 1.5901, - "step": 46060 - }, - { - "epoch": 1.47424, - "grad_norm": 44.52831268310547, - "learning_rate": 5.8417777777777786e-06, - "loss": 1.6002, - "step": 46070 - }, - { - "epoch": 1.4745599999999999, - "grad_norm": 46.35838317871094, - "learning_rate": 5.838222222222223e-06, - "loss": 1.604, - "step": 46080 - }, - { - "epoch": 1.47488, - "grad_norm": 47.06918716430664, - "learning_rate": 5.834666666666668e-06, - "loss": 1.6245, - "step": 46090 - }, - { - "epoch": 1.4752, - "grad_norm": 47.2197151184082, - "learning_rate": 5.831111111111112e-06, - "loss": 1.5986, - "step": 46100 - }, - { - "epoch": 1.47552, - "grad_norm": 45.36792755126953, - "learning_rate": 5.827555555555556e-06, - "loss": 1.61, - "step": 46110 - }, - { - "epoch": 1.47584, - "grad_norm": 48.5582275390625, - "learning_rate": 5.8240000000000005e-06, - "loss": 1.6092, - "step": 46120 - }, - { - "epoch": 1.4761600000000001, - "grad_norm": 46.83791732788086, - "learning_rate": 5.820444444444445e-06, - "loss": 1.6158, - "step": 46130 - }, - { - "epoch": 1.47648, - "grad_norm": 45.138389587402344, - "learning_rate": 5.81688888888889e-06, - "loss": 1.6268, - "step": 46140 - }, - { - "epoch": 1.4768, - "grad_norm": 45.886837005615234, - "learning_rate": 5.813333333333334e-06, - "loss": 1.6131, - "step": 46150 - }, - { - "epoch": 1.47712, - "grad_norm": 45.56637191772461, - "learning_rate": 5.809777777777779e-06, - "loss": 1.6202, - "step": 46160 - }, - { - "epoch": 1.47744, - "grad_norm": 44.13711929321289, - "learning_rate": 5.806222222222223e-06, - "loss": 1.5829, - "step": 46170 - }, - { - "epoch": 1.47776, - "grad_norm": 44.50336456298828, - "learning_rate": 5.802666666666668e-06, - "loss": 1.5959, - "step": 46180 - }, - { - "epoch": 1.47808, - "grad_norm": 45.071685791015625, - "learning_rate": 5.799111111111112e-06, - "loss": 1.6065, - "step": 46190 - }, - { - "epoch": 1.4784, - "grad_norm": 42.92729568481445, - "learning_rate": 5.795555555555557e-06, - "loss": 1.6215, - "step": 46200 - }, - { - "epoch": 1.47872, - "grad_norm": 43.77521896362305, - "learning_rate": 5.792000000000001e-06, - "loss": 1.5754, - "step": 46210 - }, - { - "epoch": 1.47904, - "grad_norm": 46.85535430908203, - "learning_rate": 5.788444444444445e-06, - "loss": 1.6214, - "step": 46220 - }, - { - "epoch": 1.47936, - "grad_norm": 44.22446823120117, - "learning_rate": 5.78488888888889e-06, - "loss": 1.5873, - "step": 46230 - }, - { - "epoch": 1.47968, - "grad_norm": 44.680049896240234, - "learning_rate": 5.781333333333334e-06, - "loss": 1.6033, - "step": 46240 - }, - { - "epoch": 1.48, - "grad_norm": 44.47894287109375, - "learning_rate": 5.777777777777778e-06, - "loss": 1.5551, - "step": 46250 - }, - { - "epoch": 1.48032, - "grad_norm": 45.85274887084961, - "learning_rate": 5.7742222222222225e-06, - "loss": 1.5752, - "step": 46260 - }, - { - "epoch": 1.48064, - "grad_norm": 43.49520492553711, - "learning_rate": 5.770666666666666e-06, - "loss": 1.603, - "step": 46270 - }, - { - "epoch": 1.48096, - "grad_norm": 46.607933044433594, - "learning_rate": 5.767111111111111e-06, - "loss": 1.6183, - "step": 46280 - }, - { - "epoch": 1.48128, - "grad_norm": 43.7589225769043, - "learning_rate": 5.763555555555555e-06, - "loss": 1.5868, - "step": 46290 - }, - { - "epoch": 1.4816, - "grad_norm": 45.31899642944336, - "learning_rate": 5.76e-06, - "loss": 1.5847, - "step": 46300 - }, - { - "epoch": 1.4819200000000001, - "grad_norm": 44.8742790222168, - "learning_rate": 5.7564444444444445e-06, - "loss": 1.6009, - "step": 46310 - }, - { - "epoch": 1.48224, - "grad_norm": 44.30742645263672, - "learning_rate": 5.752888888888889e-06, - "loss": 1.618, - "step": 46320 - }, - { - "epoch": 1.4825599999999999, - "grad_norm": 48.276710510253906, - "learning_rate": 5.7493333333333336e-06, - "loss": 1.6021, - "step": 46330 - }, - { - "epoch": 1.48288, - "grad_norm": 45.02067947387695, - "learning_rate": 5.745777777777778e-06, - "loss": 1.6088, - "step": 46340 - }, - { - "epoch": 1.4832, - "grad_norm": 43.991207122802734, - "learning_rate": 5.742222222222223e-06, - "loss": 1.6089, - "step": 46350 - }, - { - "epoch": 1.48352, - "grad_norm": 44.636775970458984, - "learning_rate": 5.738666666666667e-06, - "loss": 1.5817, - "step": 46360 - }, - { - "epoch": 1.48384, - "grad_norm": 45.58132553100586, - "learning_rate": 5.735111111111111e-06, - "loss": 1.6172, - "step": 46370 - }, - { - "epoch": 1.48416, - "grad_norm": 46.23253631591797, - "learning_rate": 5.7315555555555555e-06, - "loss": 1.597, - "step": 46380 - }, - { - "epoch": 1.48448, - "grad_norm": 45.27110290527344, - "learning_rate": 5.728e-06, - "loss": 1.5837, - "step": 46390 - }, - { - "epoch": 1.4848, - "grad_norm": 44.85041046142578, - "learning_rate": 5.724444444444445e-06, - "loss": 1.6112, - "step": 46400 - }, - { - "epoch": 1.48512, - "grad_norm": 44.14605712890625, - "learning_rate": 5.720888888888889e-06, - "loss": 1.6024, - "step": 46410 - }, - { - "epoch": 1.48544, - "grad_norm": 44.91823196411133, - "learning_rate": 5.717333333333334e-06, - "loss": 1.6021, - "step": 46420 - }, - { - "epoch": 1.48576, - "grad_norm": 47.42062759399414, - "learning_rate": 5.713777777777778e-06, - "loss": 1.5879, - "step": 46430 - }, - { - "epoch": 1.48608, - "grad_norm": 43.79571533203125, - "learning_rate": 5.710222222222223e-06, - "loss": 1.5729, - "step": 46440 - }, - { - "epoch": 1.4864, - "grad_norm": 43.80337905883789, - "learning_rate": 5.706666666666667e-06, - "loss": 1.6017, - "step": 46450 - }, - { - "epoch": 1.48672, - "grad_norm": 44.557674407958984, - "learning_rate": 5.703111111111111e-06, - "loss": 1.6278, - "step": 46460 - }, - { - "epoch": 1.48704, - "grad_norm": 43.0338134765625, - "learning_rate": 5.699555555555556e-06, - "loss": 1.593, - "step": 46470 - }, - { - "epoch": 1.48736, - "grad_norm": 44.92878341674805, - "learning_rate": 5.696e-06, - "loss": 1.6161, - "step": 46480 - }, - { - "epoch": 1.4876800000000001, - "grad_norm": 46.56928253173828, - "learning_rate": 5.692444444444445e-06, - "loss": 1.5964, - "step": 46490 - }, - { - "epoch": 1.488, - "grad_norm": 44.472347259521484, - "learning_rate": 5.688888888888889e-06, - "loss": 1.6109, - "step": 46500 - }, - { - "epoch": 1.4883199999999999, - "grad_norm": 47.81435012817383, - "learning_rate": 5.685333333333334e-06, - "loss": 1.6079, - "step": 46510 - }, - { - "epoch": 1.48864, - "grad_norm": 45.43522644042969, - "learning_rate": 5.681777777777778e-06, - "loss": 1.5983, - "step": 46520 - }, - { - "epoch": 1.48896, - "grad_norm": 43.59268569946289, - "learning_rate": 5.678222222222223e-06, - "loss": 1.6173, - "step": 46530 - }, - { - "epoch": 1.48928, - "grad_norm": 46.434661865234375, - "learning_rate": 5.6746666666666675e-06, - "loss": 1.637, - "step": 46540 - }, - { - "epoch": 1.4896, - "grad_norm": 45.176727294921875, - "learning_rate": 5.671111111111112e-06, - "loss": 1.5801, - "step": 46550 - }, - { - "epoch": 1.4899200000000001, - "grad_norm": 44.13080596923828, - "learning_rate": 5.667555555555556e-06, - "loss": 1.606, - "step": 46560 - }, - { - "epoch": 1.49024, - "grad_norm": 45.61445617675781, - "learning_rate": 5.664e-06, - "loss": 1.6441, - "step": 46570 - }, - { - "epoch": 1.4905599999999999, - "grad_norm": 45.91599655151367, - "learning_rate": 5.660444444444445e-06, - "loss": 1.5853, - "step": 46580 - }, - { - "epoch": 1.49088, - "grad_norm": 48.271461486816406, - "learning_rate": 5.6568888888888894e-06, - "loss": 1.6122, - "step": 46590 - }, - { - "epoch": 1.4912, - "grad_norm": 44.87429428100586, - "learning_rate": 5.653333333333334e-06, - "loss": 1.6104, - "step": 46600 - }, - { - "epoch": 1.49152, - "grad_norm": 44.66387939453125, - "learning_rate": 5.6497777777777785e-06, - "loss": 1.6249, - "step": 46610 - }, - { - "epoch": 1.49184, - "grad_norm": 44.479129791259766, - "learning_rate": 5.646222222222223e-06, - "loss": 1.6212, - "step": 46620 - }, - { - "epoch": 1.49216, - "grad_norm": 45.07963180541992, - "learning_rate": 5.642666666666668e-06, - "loss": 1.6018, - "step": 46630 - }, - { - "epoch": 1.49248, - "grad_norm": 44.61531448364258, - "learning_rate": 5.639111111111112e-06, - "loss": 1.6329, - "step": 46640 - }, - { - "epoch": 1.4928, - "grad_norm": 44.30540084838867, - "learning_rate": 5.635555555555557e-06, - "loss": 1.5831, - "step": 46650 - }, - { - "epoch": 1.49312, - "grad_norm": 46.067989349365234, - "learning_rate": 5.6320000000000005e-06, - "loss": 1.5975, - "step": 46660 - }, - { - "epoch": 1.49344, - "grad_norm": 42.87896728515625, - "learning_rate": 5.628444444444445e-06, - "loss": 1.5897, - "step": 46670 - }, - { - "epoch": 1.49376, - "grad_norm": 44.7100830078125, - "learning_rate": 5.6248888888888896e-06, - "loss": 1.6028, - "step": 46680 - }, - { - "epoch": 1.49408, - "grad_norm": 45.40373229980469, - "learning_rate": 5.621333333333334e-06, - "loss": 1.603, - "step": 46690 - }, - { - "epoch": 1.4944, - "grad_norm": 45.81529235839844, - "learning_rate": 5.617777777777779e-06, - "loss": 1.5886, - "step": 46700 - }, - { - "epoch": 1.49472, - "grad_norm": 45.32423782348633, - "learning_rate": 5.614222222222223e-06, - "loss": 1.6118, - "step": 46710 - }, - { - "epoch": 1.49504, - "grad_norm": 53.411312103271484, - "learning_rate": 5.610666666666668e-06, - "loss": 1.6016, - "step": 46720 - }, - { - "epoch": 1.49536, - "grad_norm": 44.250362396240234, - "learning_rate": 5.607111111111112e-06, - "loss": 1.5979, - "step": 46730 - }, - { - "epoch": 1.4956800000000001, - "grad_norm": 43.215576171875, - "learning_rate": 5.603555555555557e-06, - "loss": 1.5766, - "step": 46740 - }, - { - "epoch": 1.496, - "grad_norm": 45.43357849121094, - "learning_rate": 5.600000000000001e-06, - "loss": 1.6177, - "step": 46750 - }, - { - "epoch": 1.4963199999999999, - "grad_norm": 43.38033676147461, - "learning_rate": 5.596444444444445e-06, - "loss": 1.6082, - "step": 46760 - }, - { - "epoch": 1.49664, - "grad_norm": 45.17076873779297, - "learning_rate": 5.592888888888889e-06, - "loss": 1.5966, - "step": 46770 - }, - { - "epoch": 1.49696, - "grad_norm": 44.70008850097656, - "learning_rate": 5.589333333333333e-06, - "loss": 1.5992, - "step": 46780 - }, - { - "epoch": 1.49728, - "grad_norm": 44.854427337646484, - "learning_rate": 5.585777777777778e-06, - "loss": 1.6058, - "step": 46790 - }, - { - "epoch": 1.4976, - "grad_norm": 45.27376937866211, - "learning_rate": 5.5822222222222225e-06, - "loss": 1.5978, - "step": 46800 - }, - { - "epoch": 1.49792, - "grad_norm": 45.82780838012695, - "learning_rate": 5.578666666666667e-06, - "loss": 1.6005, - "step": 46810 - }, - { - "epoch": 1.49824, - "grad_norm": 45.11985397338867, - "learning_rate": 5.575111111111111e-06, - "loss": 1.6075, - "step": 46820 - }, - { - "epoch": 1.49856, - "grad_norm": 45.486507415771484, - "learning_rate": 5.571555555555555e-06, - "loss": 1.58, - "step": 46830 - }, - { - "epoch": 1.49888, - "grad_norm": 46.610107421875, - "learning_rate": 5.568e-06, - "loss": 1.5867, - "step": 46840 - }, - { - "epoch": 1.4992, - "grad_norm": 45.96479415893555, - "learning_rate": 5.5644444444444444e-06, - "loss": 1.6277, - "step": 46850 - }, - { - "epoch": 1.49952, - "grad_norm": 44.71353530883789, - "learning_rate": 5.560888888888889e-06, - "loss": 1.5942, - "step": 46860 - }, - { - "epoch": 1.49984, - "grad_norm": 44.52565002441406, - "learning_rate": 5.5573333333333335e-06, - "loss": 1.5795, - "step": 46870 - }, - { - "epoch": 1.5001600000000002, - "grad_norm": 44.66649627685547, - "learning_rate": 5.553777777777778e-06, - "loss": 1.5923, - "step": 46880 - }, - { - "epoch": 1.50048, - "grad_norm": 44.97325134277344, - "learning_rate": 5.550222222222223e-06, - "loss": 1.5878, - "step": 46890 - }, - { - "epoch": 1.5008, - "grad_norm": 45.9375114440918, - "learning_rate": 5.546666666666667e-06, - "loss": 1.5763, - "step": 46900 - }, - { - "epoch": 1.50112, - "grad_norm": 45.02996063232422, - "learning_rate": 5.543111111111111e-06, - "loss": 1.6027, - "step": 46910 - }, - { - "epoch": 1.50144, - "grad_norm": 46.09992980957031, - "learning_rate": 5.5395555555555555e-06, - "loss": 1.5972, - "step": 46920 - }, - { - "epoch": 1.50176, - "grad_norm": 43.5955696105957, - "learning_rate": 5.536e-06, - "loss": 1.6164, - "step": 46930 - }, - { - "epoch": 1.5020799999999999, - "grad_norm": 43.62666320800781, - "learning_rate": 5.5324444444444446e-06, - "loss": 1.5872, - "step": 46940 - }, - { - "epoch": 1.5024, - "grad_norm": 45.09193801879883, - "learning_rate": 5.528888888888889e-06, - "loss": 1.5942, - "step": 46950 - }, - { - "epoch": 1.50272, - "grad_norm": 43.42613220214844, - "learning_rate": 5.525333333333334e-06, - "loss": 1.5889, - "step": 46960 - }, - { - "epoch": 1.50304, - "grad_norm": 45.32449722290039, - "learning_rate": 5.521777777777778e-06, - "loss": 1.5731, - "step": 46970 - }, - { - "epoch": 1.50336, - "grad_norm": 47.36893081665039, - "learning_rate": 5.518222222222223e-06, - "loss": 1.6103, - "step": 46980 - }, - { - "epoch": 1.5036800000000001, - "grad_norm": 45.34735870361328, - "learning_rate": 5.514666666666667e-06, - "loss": 1.6225, - "step": 46990 - }, - { - "epoch": 1.504, - "grad_norm": 46.7711296081543, - "learning_rate": 5.511111111111112e-06, - "loss": 1.6042, - "step": 47000 - }, - { - "epoch": 1.5043199999999999, - "grad_norm": 44.58100891113281, - "learning_rate": 5.507555555555556e-06, - "loss": 1.6325, - "step": 47010 - }, - { - "epoch": 1.50464, - "grad_norm": 45.050392150878906, - "learning_rate": 5.504e-06, - "loss": 1.5773, - "step": 47020 - }, - { - "epoch": 1.50496, - "grad_norm": 46.156028747558594, - "learning_rate": 5.500444444444445e-06, - "loss": 1.5925, - "step": 47030 - }, - { - "epoch": 1.50528, - "grad_norm": 45.153507232666016, - "learning_rate": 5.496888888888889e-06, - "loss": 1.5523, - "step": 47040 - }, - { - "epoch": 1.5056, - "grad_norm": 44.15436935424805, - "learning_rate": 5.493333333333334e-06, - "loss": 1.6102, - "step": 47050 - }, - { - "epoch": 1.5059200000000001, - "grad_norm": 44.35607147216797, - "learning_rate": 5.489777777777778e-06, - "loss": 1.6043, - "step": 47060 - }, - { - "epoch": 1.50624, - "grad_norm": 47.05322265625, - "learning_rate": 5.486222222222223e-06, - "loss": 1.6261, - "step": 47070 - }, - { - "epoch": 1.50656, - "grad_norm": 47.88312911987305, - "learning_rate": 5.4826666666666675e-06, - "loss": 1.6146, - "step": 47080 - }, - { - "epoch": 1.50688, - "grad_norm": 44.9384651184082, - "learning_rate": 5.479111111111112e-06, - "loss": 1.6216, - "step": 47090 - }, - { - "epoch": 1.5072, - "grad_norm": 45.72306823730469, - "learning_rate": 5.475555555555557e-06, - "loss": 1.6122, - "step": 47100 - }, - { - "epoch": 1.50752, - "grad_norm": 44.87019729614258, - "learning_rate": 5.472e-06, - "loss": 1.6009, - "step": 47110 - }, - { - "epoch": 1.5078399999999998, - "grad_norm": 43.695926666259766, - "learning_rate": 5.468444444444445e-06, - "loss": 1.6109, - "step": 47120 - }, - { - "epoch": 1.50816, - "grad_norm": 44.68513107299805, - "learning_rate": 5.464888888888889e-06, - "loss": 1.5772, - "step": 47130 - }, - { - "epoch": 1.50848, - "grad_norm": 44.061100006103516, - "learning_rate": 5.461333333333334e-06, - "loss": 1.6069, - "step": 47140 - }, - { - "epoch": 1.5088, - "grad_norm": 45.412845611572266, - "learning_rate": 5.4577777777777785e-06, - "loss": 1.5927, - "step": 47150 - }, - { - "epoch": 1.50912, - "grad_norm": 46.47201919555664, - "learning_rate": 5.454222222222223e-06, - "loss": 1.6105, - "step": 47160 - }, - { - "epoch": 1.5094400000000001, - "grad_norm": 42.82103729248047, - "learning_rate": 5.450666666666668e-06, - "loss": 1.5919, - "step": 47170 - }, - { - "epoch": 1.50976, - "grad_norm": 46.60007858276367, - "learning_rate": 5.447111111111112e-06, - "loss": 1.5819, - "step": 47180 - }, - { - "epoch": 1.5100799999999999, - "grad_norm": 45.34518051147461, - "learning_rate": 5.443555555555557e-06, - "loss": 1.5926, - "step": 47190 - }, - { - "epoch": 1.5104, - "grad_norm": 44.76665496826172, - "learning_rate": 5.4400000000000004e-06, - "loss": 1.6135, - "step": 47200 - }, - { - "epoch": 1.51072, - "grad_norm": 44.275394439697266, - "learning_rate": 5.436444444444445e-06, - "loss": 1.6198, - "step": 47210 - }, - { - "epoch": 1.51104, - "grad_norm": 45.03862380981445, - "learning_rate": 5.4328888888888895e-06, - "loss": 1.5881, - "step": 47220 - }, - { - "epoch": 1.51136, - "grad_norm": 46.152610778808594, - "learning_rate": 5.429333333333334e-06, - "loss": 1.6101, - "step": 47230 - }, - { - "epoch": 1.5116800000000001, - "grad_norm": 46.49415969848633, - "learning_rate": 5.425777777777779e-06, - "loss": 1.5826, - "step": 47240 - }, - { - "epoch": 1.512, - "grad_norm": 44.52889633178711, - "learning_rate": 5.422222222222223e-06, - "loss": 1.5879, - "step": 47250 - }, - { - "epoch": 1.5123199999999999, - "grad_norm": 50.50562286376953, - "learning_rate": 5.418666666666668e-06, - "loss": 1.5655, - "step": 47260 - }, - { - "epoch": 1.51264, - "grad_norm": 46.33927536010742, - "learning_rate": 5.415111111111112e-06, - "loss": 1.5988, - "step": 47270 - }, - { - "epoch": 1.51296, - "grad_norm": 43.79774856567383, - "learning_rate": 5.411555555555555e-06, - "loss": 1.614, - "step": 47280 - }, - { - "epoch": 1.51328, - "grad_norm": 46.49938201904297, - "learning_rate": 5.408e-06, - "loss": 1.5785, - "step": 47290 - }, - { - "epoch": 1.5135999999999998, - "grad_norm": 47.394378662109375, - "learning_rate": 5.404444444444444e-06, - "loss": 1.5937, - "step": 47300 - }, - { - "epoch": 1.5139200000000002, - "grad_norm": 44.6570930480957, - "learning_rate": 5.400888888888889e-06, - "loss": 1.6198, - "step": 47310 - }, - { - "epoch": 1.51424, - "grad_norm": 45.7407341003418, - "learning_rate": 5.397333333333333e-06, - "loss": 1.6085, - "step": 47320 - }, - { - "epoch": 1.51456, - "grad_norm": 44.883331298828125, - "learning_rate": 5.393777777777778e-06, - "loss": 1.5917, - "step": 47330 - }, - { - "epoch": 1.51488, - "grad_norm": 45.57594680786133, - "learning_rate": 5.3902222222222225e-06, - "loss": 1.5957, - "step": 47340 - }, - { - "epoch": 1.5152, - "grad_norm": 45.7113151550293, - "learning_rate": 5.386666666666667e-06, - "loss": 1.6068, - "step": 47350 - }, - { - "epoch": 1.51552, - "grad_norm": 47.294921875, - "learning_rate": 5.383111111111112e-06, - "loss": 1.5901, - "step": 47360 - }, - { - "epoch": 1.5158399999999999, - "grad_norm": 46.925968170166016, - "learning_rate": 5.379555555555555e-06, - "loss": 1.6108, - "step": 47370 - }, - { - "epoch": 1.51616, - "grad_norm": 46.3890495300293, - "learning_rate": 5.376e-06, - "loss": 1.5755, - "step": 47380 - }, - { - "epoch": 1.51648, - "grad_norm": 46.12717056274414, - "learning_rate": 5.372444444444444e-06, - "loss": 1.6021, - "step": 47390 - }, - { - "epoch": 1.5168, - "grad_norm": 47.06908416748047, - "learning_rate": 5.368888888888889e-06, - "loss": 1.5884, - "step": 47400 - }, - { - "epoch": 1.51712, - "grad_norm": 46.1263427734375, - "learning_rate": 5.3653333333333335e-06, - "loss": 1.5993, - "step": 47410 - }, - { - "epoch": 1.5174400000000001, - "grad_norm": 44.17634582519531, - "learning_rate": 5.361777777777778e-06, - "loss": 1.5801, - "step": 47420 - }, - { - "epoch": 1.51776, - "grad_norm": 44.7050895690918, - "learning_rate": 5.358222222222223e-06, - "loss": 1.6294, - "step": 47430 - }, - { - "epoch": 1.5180799999999999, - "grad_norm": 45.57835388183594, - "learning_rate": 5.354666666666667e-06, - "loss": 1.6072, - "step": 47440 - }, - { - "epoch": 1.5184, - "grad_norm": 43.80586242675781, - "learning_rate": 5.351111111111112e-06, - "loss": 1.5884, - "step": 47450 - }, - { - "epoch": 1.51872, - "grad_norm": 47.311214447021484, - "learning_rate": 5.3475555555555554e-06, - "loss": 1.5966, - "step": 47460 - }, - { - "epoch": 1.51904, - "grad_norm": 44.86566162109375, - "learning_rate": 5.344e-06, - "loss": 1.5913, - "step": 47470 - }, - { - "epoch": 1.51936, - "grad_norm": 46.597415924072266, - "learning_rate": 5.3404444444444445e-06, - "loss": 1.6077, - "step": 47480 - }, - { - "epoch": 1.5196800000000001, - "grad_norm": 46.57792663574219, - "learning_rate": 5.336888888888889e-06, - "loss": 1.6201, - "step": 47490 - }, - { - "epoch": 1.52, - "grad_norm": 42.707462310791016, - "learning_rate": 5.333333333333334e-06, - "loss": 1.6004, - "step": 47500 - }, - { - "epoch": 1.52032, - "grad_norm": 46.76858139038086, - "learning_rate": 5.329777777777778e-06, - "loss": 1.5991, - "step": 47510 - }, - { - "epoch": 1.52064, - "grad_norm": 46.61631393432617, - "learning_rate": 5.326222222222223e-06, - "loss": 1.6124, - "step": 47520 - }, - { - "epoch": 1.52096, - "grad_norm": 43.41951370239258, - "learning_rate": 5.322666666666667e-06, - "loss": 1.5968, - "step": 47530 - }, - { - "epoch": 1.52128, - "grad_norm": 47.41061782836914, - "learning_rate": 5.319111111111112e-06, - "loss": 1.6163, - "step": 47540 - }, - { - "epoch": 1.5215999999999998, - "grad_norm": 44.8211669921875, - "learning_rate": 5.3155555555555564e-06, - "loss": 1.6268, - "step": 47550 - }, - { - "epoch": 1.5219200000000002, - "grad_norm": 47.11516571044922, - "learning_rate": 5.312e-06, - "loss": 1.6076, - "step": 47560 - }, - { - "epoch": 1.52224, - "grad_norm": 46.79738998413086, - "learning_rate": 5.308444444444445e-06, - "loss": 1.6105, - "step": 47570 - }, - { - "epoch": 1.52256, - "grad_norm": 46.0588493347168, - "learning_rate": 5.304888888888889e-06, - "loss": 1.596, - "step": 47580 - }, - { - "epoch": 1.52288, - "grad_norm": 45.7782096862793, - "learning_rate": 5.301333333333334e-06, - "loss": 1.5706, - "step": 47590 - }, - { - "epoch": 1.5232, - "grad_norm": 45.6116828918457, - "learning_rate": 5.297777777777778e-06, - "loss": 1.5931, - "step": 47600 - }, - { - "epoch": 1.52352, - "grad_norm": 46.35176467895508, - "learning_rate": 5.294222222222223e-06, - "loss": 1.6091, - "step": 47610 - }, - { - "epoch": 1.5238399999999999, - "grad_norm": 46.78037643432617, - "learning_rate": 5.2906666666666675e-06, - "loss": 1.5805, - "step": 47620 - }, - { - "epoch": 1.52416, - "grad_norm": 45.619407653808594, - "learning_rate": 5.287111111111112e-06, - "loss": 1.6012, - "step": 47630 - }, - { - "epoch": 1.52448, - "grad_norm": 45.890953063964844, - "learning_rate": 5.2835555555555566e-06, - "loss": 1.607, - "step": 47640 - }, - { - "epoch": 1.5248, - "grad_norm": 47.279937744140625, - "learning_rate": 5.28e-06, - "loss": 1.5893, - "step": 47650 - }, - { - "epoch": 1.52512, - "grad_norm": 44.826416015625, - "learning_rate": 5.276444444444445e-06, - "loss": 1.6026, - "step": 47660 - }, - { - "epoch": 1.5254400000000001, - "grad_norm": 47.3812255859375, - "learning_rate": 5.272888888888889e-06, - "loss": 1.6193, - "step": 47670 - }, - { - "epoch": 1.52576, - "grad_norm": 44.42546081542969, - "learning_rate": 5.269333333333334e-06, - "loss": 1.5867, - "step": 47680 - }, - { - "epoch": 1.5260799999999999, - "grad_norm": 45.50373840332031, - "learning_rate": 5.2657777777777785e-06, - "loss": 1.5956, - "step": 47690 - }, - { - "epoch": 1.5264, - "grad_norm": 47.383426666259766, - "learning_rate": 5.262222222222223e-06, - "loss": 1.5812, - "step": 47700 - }, - { - "epoch": 1.52672, - "grad_norm": 43.08425521850586, - "learning_rate": 5.258666666666668e-06, - "loss": 1.5956, - "step": 47710 - }, - { - "epoch": 1.52704, - "grad_norm": 45.51042556762695, - "learning_rate": 5.255111111111112e-06, - "loss": 1.5905, - "step": 47720 - }, - { - "epoch": 1.52736, - "grad_norm": 44.803035736083984, - "learning_rate": 5.251555555555557e-06, - "loss": 1.6111, - "step": 47730 - }, - { - "epoch": 1.5276800000000001, - "grad_norm": 45.717918395996094, - "learning_rate": 5.248000000000001e-06, - "loss": 1.6296, - "step": 47740 - }, - { - "epoch": 1.528, - "grad_norm": 46.95458984375, - "learning_rate": 5.244444444444445e-06, - "loss": 1.5974, - "step": 47750 - }, - { - "epoch": 1.52832, - "grad_norm": 46.433319091796875, - "learning_rate": 5.2408888888888895e-06, - "loss": 1.6004, - "step": 47760 - }, - { - "epoch": 1.52864, - "grad_norm": 45.6478271484375, - "learning_rate": 5.237333333333334e-06, - "loss": 1.6422, - "step": 47770 - }, - { - "epoch": 1.52896, - "grad_norm": 45.677242279052734, - "learning_rate": 5.233777777777779e-06, - "loss": 1.5906, - "step": 47780 - }, - { - "epoch": 1.52928, - "grad_norm": 44.30915069580078, - "learning_rate": 5.230222222222223e-06, - "loss": 1.5966, - "step": 47790 - }, - { - "epoch": 1.5295999999999998, - "grad_norm": 43.836761474609375, - "learning_rate": 5.226666666666667e-06, - "loss": 1.5933, - "step": 47800 - }, - { - "epoch": 1.52992, - "grad_norm": 46.55387878417969, - "learning_rate": 5.2231111111111114e-06, - "loss": 1.6344, - "step": 47810 - }, - { - "epoch": 1.53024, - "grad_norm": 44.398651123046875, - "learning_rate": 5.219555555555555e-06, - "loss": 1.5997, - "step": 47820 - }, - { - "epoch": 1.53056, - "grad_norm": 47.1364860534668, - "learning_rate": 5.216e-06, - "loss": 1.5878, - "step": 47830 - }, - { - "epoch": 1.53088, - "grad_norm": 45.02302932739258, - "learning_rate": 5.212444444444444e-06, - "loss": 1.599, - "step": 47840 - }, - { - "epoch": 1.5312000000000001, - "grad_norm": 47.26573181152344, - "learning_rate": 5.208888888888889e-06, - "loss": 1.5955, - "step": 47850 - }, - { - "epoch": 1.53152, - "grad_norm": 43.69180679321289, - "learning_rate": 5.205333333333333e-06, - "loss": 1.5938, - "step": 47860 - }, - { - "epoch": 1.5318399999999999, - "grad_norm": 43.49293899536133, - "learning_rate": 5.201777777777778e-06, - "loss": 1.5799, - "step": 47870 - }, - { - "epoch": 1.53216, - "grad_norm": 46.23622131347656, - "learning_rate": 5.1982222222222225e-06, - "loss": 1.6039, - "step": 47880 - }, - { - "epoch": 1.53248, - "grad_norm": 47.350921630859375, - "learning_rate": 5.194666666666667e-06, - "loss": 1.615, - "step": 47890 - }, - { - "epoch": 1.5328, - "grad_norm": 45.750858306884766, - "learning_rate": 5.1911111111111116e-06, - "loss": 1.5888, - "step": 47900 - }, - { - "epoch": 1.53312, - "grad_norm": 45.1241455078125, - "learning_rate": 5.187555555555555e-06, - "loss": 1.5794, - "step": 47910 - }, - { - "epoch": 1.5334400000000001, - "grad_norm": 41.951683044433594, - "learning_rate": 5.184e-06, - "loss": 1.5688, - "step": 47920 - }, - { - "epoch": 1.53376, - "grad_norm": 45.95042419433594, - "learning_rate": 5.180444444444444e-06, - "loss": 1.6301, - "step": 47930 - }, - { - "epoch": 1.5340799999999999, - "grad_norm": 46.773109436035156, - "learning_rate": 5.176888888888889e-06, - "loss": 1.5846, - "step": 47940 - }, - { - "epoch": 1.5344, - "grad_norm": 46.49460983276367, - "learning_rate": 5.1733333333333335e-06, - "loss": 1.5899, - "step": 47950 - }, - { - "epoch": 1.53472, - "grad_norm": 43.58417510986328, - "learning_rate": 5.169777777777778e-06, - "loss": 1.5804, - "step": 47960 - }, - { - "epoch": 1.53504, - "grad_norm": 47.46805953979492, - "learning_rate": 5.166222222222223e-06, - "loss": 1.6281, - "step": 47970 - }, - { - "epoch": 1.5353599999999998, - "grad_norm": 45.62743377685547, - "learning_rate": 5.162666666666667e-06, - "loss": 1.6028, - "step": 47980 - }, - { - "epoch": 1.5356800000000002, - "grad_norm": 45.07927703857422, - "learning_rate": 5.159111111111112e-06, - "loss": 1.5915, - "step": 47990 - }, - { - "epoch": 1.536, - "grad_norm": 45.753665924072266, - "learning_rate": 5.155555555555556e-06, - "loss": 1.6215, - "step": 48000 - }, - { - "epoch": 1.53632, - "grad_norm": 44.055084228515625, - "learning_rate": 5.152e-06, - "loss": 1.5835, - "step": 48010 - }, - { - "epoch": 1.53664, - "grad_norm": 44.468421936035156, - "learning_rate": 5.1484444444444445e-06, - "loss": 1.5915, - "step": 48020 - }, - { - "epoch": 1.53696, - "grad_norm": 45.8538932800293, - "learning_rate": 5.144888888888889e-06, - "loss": 1.605, - "step": 48030 - }, - { - "epoch": 1.53728, - "grad_norm": 45.57746505737305, - "learning_rate": 5.141333333333334e-06, - "loss": 1.6134, - "step": 48040 - }, - { - "epoch": 1.5375999999999999, - "grad_norm": 46.702674865722656, - "learning_rate": 5.137777777777778e-06, - "loss": 1.5768, - "step": 48050 - }, - { - "epoch": 1.53792, - "grad_norm": 45.325279235839844, - "learning_rate": 5.134222222222223e-06, - "loss": 1.6043, - "step": 48060 - }, - { - "epoch": 1.53824, - "grad_norm": 44.36808776855469, - "learning_rate": 5.130666666666667e-06, - "loss": 1.6235, - "step": 48070 - }, - { - "epoch": 1.53856, - "grad_norm": 45.456642150878906, - "learning_rate": 5.127111111111112e-06, - "loss": 1.5894, - "step": 48080 - }, - { - "epoch": 1.53888, - "grad_norm": 45.39940643310547, - "learning_rate": 5.123555555555556e-06, - "loss": 1.5723, - "step": 48090 - }, - { - "epoch": 1.5392000000000001, - "grad_norm": 44.75956344604492, - "learning_rate": 5.12e-06, - "loss": 1.6198, - "step": 48100 - }, - { - "epoch": 1.53952, - "grad_norm": 45.92814254760742, - "learning_rate": 5.116444444444445e-06, - "loss": 1.6059, - "step": 48110 - }, - { - "epoch": 1.5398399999999999, - "grad_norm": 45.60244369506836, - "learning_rate": 5.112888888888889e-06, - "loss": 1.6104, - "step": 48120 - }, - { - "epoch": 1.54016, - "grad_norm": 44.82307052612305, - "learning_rate": 5.109333333333334e-06, - "loss": 1.6071, - "step": 48130 - }, - { - "epoch": 1.54048, - "grad_norm": 45.584922790527344, - "learning_rate": 5.105777777777778e-06, - "loss": 1.5904, - "step": 48140 - }, - { - "epoch": 1.5408, - "grad_norm": 46.74277114868164, - "learning_rate": 5.102222222222223e-06, - "loss": 1.5904, - "step": 48150 - }, - { - "epoch": 1.54112, - "grad_norm": 43.10594177246094, - "learning_rate": 5.0986666666666674e-06, - "loss": 1.565, - "step": 48160 - }, - { - "epoch": 1.5414400000000001, - "grad_norm": 45.78068161010742, - "learning_rate": 5.095111111111112e-06, - "loss": 1.6054, - "step": 48170 - }, - { - "epoch": 1.54176, - "grad_norm": 47.33755111694336, - "learning_rate": 5.0915555555555565e-06, - "loss": 1.5906, - "step": 48180 - }, - { - "epoch": 1.54208, - "grad_norm": 46.029823303222656, - "learning_rate": 5.088000000000001e-06, - "loss": 1.5908, - "step": 48190 - }, - { - "epoch": 1.5424, - "grad_norm": 45.3862419128418, - "learning_rate": 5.084444444444445e-06, - "loss": 1.6119, - "step": 48200 - }, - { - "epoch": 1.54272, - "grad_norm": 45.942222595214844, - "learning_rate": 5.080888888888889e-06, - "loss": 1.6183, - "step": 48210 - }, - { - "epoch": 1.54304, - "grad_norm": 46.743202209472656, - "learning_rate": 5.077333333333334e-06, - "loss": 1.5942, - "step": 48220 - }, - { - "epoch": 1.5433599999999998, - "grad_norm": 45.592220306396484, - "learning_rate": 5.0737777777777785e-06, - "loss": 1.579, - "step": 48230 - }, - { - "epoch": 1.5436800000000002, - "grad_norm": 43.967987060546875, - "learning_rate": 5.070222222222223e-06, - "loss": 1.5893, - "step": 48240 - }, - { - "epoch": 1.544, - "grad_norm": 45.874881744384766, - "learning_rate": 5.0666666666666676e-06, - "loss": 1.5631, - "step": 48250 - }, - { - "epoch": 1.54432, - "grad_norm": 44.52368927001953, - "learning_rate": 5.063111111111112e-06, - "loss": 1.6084, - "step": 48260 - }, - { - "epoch": 1.54464, - "grad_norm": 44.105743408203125, - "learning_rate": 5.059555555555557e-06, - "loss": 1.6041, - "step": 48270 - }, - { - "epoch": 1.54496, - "grad_norm": 45.011207580566406, - "learning_rate": 5.056000000000001e-06, - "loss": 1.6005, - "step": 48280 - }, - { - "epoch": 1.54528, - "grad_norm": 49.53215026855469, - "learning_rate": 5.052444444444446e-06, - "loss": 1.6248, - "step": 48290 - }, - { - "epoch": 1.5455999999999999, - "grad_norm": 48.222412109375, - "learning_rate": 5.0488888888888895e-06, - "loss": 1.5842, - "step": 48300 - }, - { - "epoch": 1.54592, - "grad_norm": 43.69586944580078, - "learning_rate": 5.045333333333333e-06, - "loss": 1.6032, - "step": 48310 - }, - { - "epoch": 1.54624, - "grad_norm": 46.159278869628906, - "learning_rate": 5.041777777777778e-06, - "loss": 1.6157, - "step": 48320 - }, - { - "epoch": 1.54656, - "grad_norm": 43.497520446777344, - "learning_rate": 5.038222222222222e-06, - "loss": 1.584, - "step": 48330 - }, - { - "epoch": 1.54688, - "grad_norm": 45.150970458984375, - "learning_rate": 5.034666666666667e-06, - "loss": 1.6224, - "step": 48340 - }, - { - "epoch": 1.5472000000000001, - "grad_norm": 45.95301818847656, - "learning_rate": 5.031111111111111e-06, - "loss": 1.584, - "step": 48350 - }, - { - "epoch": 1.54752, - "grad_norm": 46.46894836425781, - "learning_rate": 5.027555555555555e-06, - "loss": 1.6119, - "step": 48360 - }, - { - "epoch": 1.5478399999999999, - "grad_norm": 49.09371566772461, - "learning_rate": 5.024e-06, - "loss": 1.6229, - "step": 48370 - }, - { - "epoch": 1.54816, - "grad_norm": 47.55086135864258, - "learning_rate": 5.020444444444444e-06, - "loss": 1.6188, - "step": 48380 - }, - { - "epoch": 1.54848, - "grad_norm": 44.514366149902344, - "learning_rate": 5.016888888888889e-06, - "loss": 1.6023, - "step": 48390 - }, - { - "epoch": 1.5488, - "grad_norm": 45.68317794799805, - "learning_rate": 5.013333333333333e-06, - "loss": 1.5956, - "step": 48400 - }, - { - "epoch": 1.54912, - "grad_norm": 47.13374328613281, - "learning_rate": 5.009777777777778e-06, - "loss": 1.5704, - "step": 48410 - }, - { - "epoch": 1.5494400000000002, - "grad_norm": 44.4520149230957, - "learning_rate": 5.0062222222222224e-06, - "loss": 1.5915, - "step": 48420 - }, - { - "epoch": 1.54976, - "grad_norm": 43.78715896606445, - "learning_rate": 5.002666666666667e-06, - "loss": 1.5887, - "step": 48430 - }, - { - "epoch": 1.55008, - "grad_norm": 48.16891098022461, - "learning_rate": 4.9991111111111115e-06, - "loss": 1.6097, - "step": 48440 - }, - { - "epoch": 1.5504, - "grad_norm": 45.72539138793945, - "learning_rate": 4.995555555555556e-06, - "loss": 1.5952, - "step": 48450 - }, - { - "epoch": 1.55072, - "grad_norm": 49.00065231323242, - "learning_rate": 4.992e-06, - "loss": 1.5906, - "step": 48460 - }, - { - "epoch": 1.55104, - "grad_norm": 45.903076171875, - "learning_rate": 4.988444444444444e-06, - "loss": 1.6303, - "step": 48470 - }, - { - "epoch": 1.5513599999999999, - "grad_norm": 45.5451774597168, - "learning_rate": 4.984888888888889e-06, - "loss": 1.5971, - "step": 48480 - }, - { - "epoch": 1.55168, - "grad_norm": 47.048728942871094, - "learning_rate": 4.9813333333333335e-06, - "loss": 1.5911, - "step": 48490 - }, - { - "epoch": 1.552, - "grad_norm": 47.47088623046875, - "learning_rate": 4.977777777777778e-06, - "loss": 1.5933, - "step": 48500 - }, - { - "epoch": 1.55232, - "grad_norm": 46.04580307006836, - "learning_rate": 4.974222222222223e-06, - "loss": 1.576, - "step": 48510 - }, - { - "epoch": 1.55264, - "grad_norm": 47.11751937866211, - "learning_rate": 4.970666666666667e-06, - "loss": 1.5979, - "step": 48520 - }, - { - "epoch": 1.5529600000000001, - "grad_norm": 46.44295883178711, - "learning_rate": 4.967111111111112e-06, - "loss": 1.6133, - "step": 48530 - }, - { - "epoch": 1.55328, - "grad_norm": 47.402000427246094, - "learning_rate": 4.963555555555556e-06, - "loss": 1.6198, - "step": 48540 - }, - { - "epoch": 1.5535999999999999, - "grad_norm": 45.78688430786133, - "learning_rate": 4.960000000000001e-06, - "loss": 1.5773, - "step": 48550 - }, - { - "epoch": 1.55392, - "grad_norm": 45.35833740234375, - "learning_rate": 4.9564444444444445e-06, - "loss": 1.5802, - "step": 48560 - }, - { - "epoch": 1.55424, - "grad_norm": 46.027183532714844, - "learning_rate": 4.952888888888889e-06, - "loss": 1.6121, - "step": 48570 - }, - { - "epoch": 1.55456, - "grad_norm": 46.775455474853516, - "learning_rate": 4.949333333333334e-06, - "loss": 1.6137, - "step": 48580 - }, - { - "epoch": 1.55488, - "grad_norm": 42.35320281982422, - "learning_rate": 4.945777777777778e-06, - "loss": 1.6006, - "step": 48590 - }, - { - "epoch": 1.5552000000000001, - "grad_norm": 44.261661529541016, - "learning_rate": 4.942222222222223e-06, - "loss": 1.6055, - "step": 48600 - }, - { - "epoch": 1.55552, - "grad_norm": 43.88352966308594, - "learning_rate": 4.938666666666667e-06, - "loss": 1.5861, - "step": 48610 - }, - { - "epoch": 1.55584, - "grad_norm": 46.54319381713867, - "learning_rate": 4.935111111111112e-06, - "loss": 1.587, - "step": 48620 - }, - { - "epoch": 1.55616, - "grad_norm": 43.899959564208984, - "learning_rate": 4.931555555555556e-06, - "loss": 1.5613, - "step": 48630 - }, - { - "epoch": 1.55648, - "grad_norm": 44.880645751953125, - "learning_rate": 4.928000000000001e-06, - "loss": 1.6211, - "step": 48640 - }, - { - "epoch": 1.5568, - "grad_norm": 46.75686264038086, - "learning_rate": 4.924444444444445e-06, - "loss": 1.6005, - "step": 48650 - }, - { - "epoch": 1.5571199999999998, - "grad_norm": 45.599708557128906, - "learning_rate": 4.920888888888889e-06, - "loss": 1.5848, - "step": 48660 - }, - { - "epoch": 1.5574400000000002, - "grad_norm": 47.24040222167969, - "learning_rate": 4.917333333333334e-06, - "loss": 1.5788, - "step": 48670 - }, - { - "epoch": 1.55776, - "grad_norm": 45.050682067871094, - "learning_rate": 4.913777777777778e-06, - "loss": 1.6101, - "step": 48680 - }, - { - "epoch": 1.55808, - "grad_norm": 46.12038803100586, - "learning_rate": 4.910222222222223e-06, - "loss": 1.5966, - "step": 48690 - }, - { - "epoch": 1.5584, - "grad_norm": 45.54237365722656, - "learning_rate": 4.9066666666666666e-06, - "loss": 1.5553, - "step": 48700 - }, - { - "epoch": 1.55872, - "grad_norm": 46.248050689697266, - "learning_rate": 4.903111111111111e-06, - "loss": 1.5962, - "step": 48710 - }, - { - "epoch": 1.55904, - "grad_norm": 45.845863342285156, - "learning_rate": 4.899555555555556e-06, - "loss": 1.5888, - "step": 48720 - }, - { - "epoch": 1.5593599999999999, - "grad_norm": 47.461910247802734, - "learning_rate": 4.896e-06, - "loss": 1.6009, - "step": 48730 - }, - { - "epoch": 1.55968, - "grad_norm": 46.03202819824219, - "learning_rate": 4.892444444444445e-06, - "loss": 1.5805, - "step": 48740 - }, - { - "epoch": 1.56, - "grad_norm": 48.9558219909668, - "learning_rate": 4.888888888888889e-06, - "loss": 1.5842, - "step": 48750 - }, - { - "epoch": 1.56032, - "grad_norm": 45.61824035644531, - "learning_rate": 4.885333333333334e-06, - "loss": 1.569, - "step": 48760 - }, - { - "epoch": 1.56064, - "grad_norm": 43.683650970458984, - "learning_rate": 4.8817777777777784e-06, - "loss": 1.5892, - "step": 48770 - }, - { - "epoch": 1.5609600000000001, - "grad_norm": 46.38489532470703, - "learning_rate": 4.878222222222222e-06, - "loss": 1.6174, - "step": 48780 - }, - { - "epoch": 1.56128, - "grad_norm": 45.02937316894531, - "learning_rate": 4.874666666666667e-06, - "loss": 1.5899, - "step": 48790 - }, - { - "epoch": 1.5615999999999999, - "grad_norm": 46.43217468261719, - "learning_rate": 4.871111111111111e-06, - "loss": 1.592, - "step": 48800 - }, - { - "epoch": 1.56192, - "grad_norm": 44.164634704589844, - "learning_rate": 4.867555555555556e-06, - "loss": 1.5769, - "step": 48810 - }, - { - "epoch": 1.56224, - "grad_norm": 46.32484817504883, - "learning_rate": 4.864e-06, - "loss": 1.5899, - "step": 48820 - }, - { - "epoch": 1.56256, - "grad_norm": 47.25856018066406, - "learning_rate": 4.860444444444445e-06, - "loss": 1.5823, - "step": 48830 - }, - { - "epoch": 1.56288, - "grad_norm": 47.051937103271484, - "learning_rate": 4.8568888888888895e-06, - "loss": 1.5783, - "step": 48840 - }, - { - "epoch": 1.5632000000000001, - "grad_norm": 43.570640563964844, - "learning_rate": 4.853333333333334e-06, - "loss": 1.5962, - "step": 48850 - }, - { - "epoch": 1.56352, - "grad_norm": 47.42235565185547, - "learning_rate": 4.8497777777777786e-06, - "loss": 1.5896, - "step": 48860 - }, - { - "epoch": 1.56384, - "grad_norm": 45.89921951293945, - "learning_rate": 4.846222222222223e-06, - "loss": 1.5705, - "step": 48870 - }, - { - "epoch": 1.56416, - "grad_norm": 44.323524475097656, - "learning_rate": 4.842666666666667e-06, - "loss": 1.5953, - "step": 48880 - }, - { - "epoch": 1.56448, - "grad_norm": 45.725799560546875, - "learning_rate": 4.839111111111111e-06, - "loss": 1.5988, - "step": 48890 - }, - { - "epoch": 1.5648, - "grad_norm": 44.503028869628906, - "learning_rate": 4.835555555555556e-06, - "loss": 1.5833, - "step": 48900 - }, - { - "epoch": 1.5651199999999998, - "grad_norm": 44.74811553955078, - "learning_rate": 4.8320000000000005e-06, - "loss": 1.6001, - "step": 48910 - }, - { - "epoch": 1.5654400000000002, - "grad_norm": 46.46488952636719, - "learning_rate": 4.828444444444445e-06, - "loss": 1.5868, - "step": 48920 - }, - { - "epoch": 1.56576, - "grad_norm": 44.30849838256836, - "learning_rate": 4.82488888888889e-06, - "loss": 1.5965, - "step": 48930 - }, - { - "epoch": 1.56608, - "grad_norm": 45.116268157958984, - "learning_rate": 4.821333333333334e-06, - "loss": 1.6137, - "step": 48940 - }, - { - "epoch": 1.5664, - "grad_norm": 44.71920394897461, - "learning_rate": 4.817777777777779e-06, - "loss": 1.5803, - "step": 48950 - }, - { - "epoch": 1.5667200000000001, - "grad_norm": 45.48208999633789, - "learning_rate": 4.814222222222222e-06, - "loss": 1.5986, - "step": 48960 - }, - { - "epoch": 1.56704, - "grad_norm": 47.023948669433594, - "learning_rate": 4.810666666666667e-06, - "loss": 1.6032, - "step": 48970 - }, - { - "epoch": 1.5673599999999999, - "grad_norm": 46.040283203125, - "learning_rate": 4.8071111111111115e-06, - "loss": 1.5844, - "step": 48980 - }, - { - "epoch": 1.56768, - "grad_norm": 44.496978759765625, - "learning_rate": 4.803555555555556e-06, - "loss": 1.5661, - "step": 48990 - }, - { - "epoch": 1.568, - "grad_norm": 45.307952880859375, - "learning_rate": 4.800000000000001e-06, - "loss": 1.5877, - "step": 49000 - }, - { - "epoch": 1.56832, - "grad_norm": 45.56713104248047, - "learning_rate": 4.796444444444444e-06, - "loss": 1.5885, - "step": 49010 - }, - { - "epoch": 1.56864, - "grad_norm": 44.79756546020508, - "learning_rate": 4.792888888888889e-06, - "loss": 1.5636, - "step": 49020 - }, - { - "epoch": 1.5689600000000001, - "grad_norm": 45.48712921142578, - "learning_rate": 4.7893333333333334e-06, - "loss": 1.6004, - "step": 49030 - }, - { - "epoch": 1.56928, - "grad_norm": 46.09973907470703, - "learning_rate": 4.785777777777778e-06, - "loss": 1.5947, - "step": 49040 - }, - { - "epoch": 1.5695999999999999, - "grad_norm": 45.13676834106445, - "learning_rate": 4.7822222222222226e-06, - "loss": 1.5717, - "step": 49050 - }, - { - "epoch": 1.56992, - "grad_norm": 47.738285064697266, - "learning_rate": 4.778666666666667e-06, - "loss": 1.6115, - "step": 49060 - }, - { - "epoch": 1.57024, - "grad_norm": 44.058349609375, - "learning_rate": 4.775111111111112e-06, - "loss": 1.5722, - "step": 49070 - }, - { - "epoch": 1.57056, - "grad_norm": 46.16585159301758, - "learning_rate": 4.771555555555556e-06, - "loss": 1.6029, - "step": 49080 - }, - { - "epoch": 1.57088, - "grad_norm": 46.44235610961914, - "learning_rate": 4.768000000000001e-06, - "loss": 1.5949, - "step": 49090 - }, - { - "epoch": 1.5712000000000002, - "grad_norm": 45.075191497802734, - "learning_rate": 4.7644444444444445e-06, - "loss": 1.6002, - "step": 49100 - }, - { - "epoch": 1.57152, - "grad_norm": 46.433570861816406, - "learning_rate": 4.760888888888889e-06, - "loss": 1.5938, - "step": 49110 - }, - { - "epoch": 1.57184, - "grad_norm": 43.26811981201172, - "learning_rate": 4.757333333333334e-06, - "loss": 1.5908, - "step": 49120 - }, - { - "epoch": 1.57216, - "grad_norm": 45.4081916809082, - "learning_rate": 4.753777777777778e-06, - "loss": 1.5939, - "step": 49130 - }, - { - "epoch": 1.57248, - "grad_norm": 47.71382141113281, - "learning_rate": 4.750222222222223e-06, - "loss": 1.5864, - "step": 49140 - }, - { - "epoch": 1.5728, - "grad_norm": 44.84182357788086, - "learning_rate": 4.746666666666667e-06, - "loss": 1.6356, - "step": 49150 - }, - { - "epoch": 1.5731199999999999, - "grad_norm": 46.003990173339844, - "learning_rate": 4.743111111111112e-06, - "loss": 1.6466, - "step": 49160 - }, - { - "epoch": 1.57344, - "grad_norm": 44.50692367553711, - "learning_rate": 4.739555555555556e-06, - "loss": 1.6076, - "step": 49170 - }, - { - "epoch": 1.57376, - "grad_norm": 47.03111267089844, - "learning_rate": 4.736000000000001e-06, - "loss": 1.5869, - "step": 49180 - }, - { - "epoch": 1.57408, - "grad_norm": 46.598873138427734, - "learning_rate": 4.7324444444444455e-06, - "loss": 1.5847, - "step": 49190 - }, - { - "epoch": 1.5744, - "grad_norm": 46.089786529541016, - "learning_rate": 4.728888888888889e-06, - "loss": 1.5767, - "step": 49200 - }, - { - "epoch": 1.5747200000000001, - "grad_norm": 45.16957092285156, - "learning_rate": 4.725333333333334e-06, - "loss": 1.5933, - "step": 49210 - }, - { - "epoch": 1.57504, - "grad_norm": 45.54878234863281, - "learning_rate": 4.721777777777778e-06, - "loss": 1.5773, - "step": 49220 - }, - { - "epoch": 1.5753599999999999, - "grad_norm": 46.04212188720703, - "learning_rate": 4.718222222222222e-06, - "loss": 1.5895, - "step": 49230 - }, - { - "epoch": 1.57568, - "grad_norm": 46.73760986328125, - "learning_rate": 4.7146666666666665e-06, - "loss": 1.6408, - "step": 49240 - }, - { - "epoch": 1.576, - "grad_norm": 47.090755462646484, - "learning_rate": 4.711111111111111e-06, - "loss": 1.623, - "step": 49250 - }, - { - "epoch": 1.57632, - "grad_norm": 46.1207160949707, - "learning_rate": 4.707555555555556e-06, - "loss": 1.5815, - "step": 49260 - }, - { - "epoch": 1.57664, - "grad_norm": 47.64818572998047, - "learning_rate": 4.704e-06, - "loss": 1.597, - "step": 49270 - }, - { - "epoch": 1.5769600000000001, - "grad_norm": 46.952392578125, - "learning_rate": 4.700444444444445e-06, - "loss": 1.5954, - "step": 49280 - }, - { - "epoch": 1.57728, - "grad_norm": 46.08356475830078, - "learning_rate": 4.696888888888889e-06, - "loss": 1.5949, - "step": 49290 - }, - { - "epoch": 1.5776, - "grad_norm": 45.83845901489258, - "learning_rate": 4.693333333333334e-06, - "loss": 1.6158, - "step": 49300 - }, - { - "epoch": 1.57792, - "grad_norm": 45.37620162963867, - "learning_rate": 4.689777777777778e-06, - "loss": 1.5881, - "step": 49310 - }, - { - "epoch": 1.57824, - "grad_norm": 44.12497329711914, - "learning_rate": 4.686222222222223e-06, - "loss": 1.6028, - "step": 49320 - }, - { - "epoch": 1.57856, - "grad_norm": 46.23371124267578, - "learning_rate": 4.682666666666667e-06, - "loss": 1.6129, - "step": 49330 - }, - { - "epoch": 1.5788799999999998, - "grad_norm": 46.70281982421875, - "learning_rate": 4.679111111111111e-06, - "loss": 1.6013, - "step": 49340 - }, - { - "epoch": 1.5792000000000002, - "grad_norm": 46.354454040527344, - "learning_rate": 4.675555555555556e-06, - "loss": 1.5801, - "step": 49350 - }, - { - "epoch": 1.57952, - "grad_norm": 44.693077087402344, - "learning_rate": 4.672e-06, - "loss": 1.6157, - "step": 49360 - }, - { - "epoch": 1.57984, - "grad_norm": 45.19998550415039, - "learning_rate": 4.668444444444445e-06, - "loss": 1.5855, - "step": 49370 - }, - { - "epoch": 1.58016, - "grad_norm": 43.8433837890625, - "learning_rate": 4.6648888888888894e-06, - "loss": 1.6182, - "step": 49380 - }, - { - "epoch": 1.58048, - "grad_norm": 44.49872589111328, - "learning_rate": 4.661333333333334e-06, - "loss": 1.6011, - "step": 49390 - }, - { - "epoch": 1.5808, - "grad_norm": 45.483192443847656, - "learning_rate": 4.6577777777777785e-06, - "loss": 1.5845, - "step": 49400 - }, - { - "epoch": 1.5811199999999999, - "grad_norm": 44.29515838623047, - "learning_rate": 4.654222222222223e-06, - "loss": 1.6335, - "step": 49410 - }, - { - "epoch": 1.58144, - "grad_norm": 43.83500671386719, - "learning_rate": 4.650666666666667e-06, - "loss": 1.6099, - "step": 49420 - }, - { - "epoch": 1.58176, - "grad_norm": 46.206172943115234, - "learning_rate": 4.647111111111111e-06, - "loss": 1.6153, - "step": 49430 - }, - { - "epoch": 1.58208, - "grad_norm": 47.253238677978516, - "learning_rate": 4.643555555555556e-06, - "loss": 1.6045, - "step": 49440 - }, - { - "epoch": 1.5824, - "grad_norm": 46.3438835144043, - "learning_rate": 4.6400000000000005e-06, - "loss": 1.5862, - "step": 49450 - }, - { - "epoch": 1.5827200000000001, - "grad_norm": 46.675289154052734, - "learning_rate": 4.636444444444445e-06, - "loss": 1.5973, - "step": 49460 - }, - { - "epoch": 1.58304, - "grad_norm": 46.53513717651367, - "learning_rate": 4.632888888888889e-06, - "loss": 1.6053, - "step": 49470 - }, - { - "epoch": 1.5833599999999999, - "grad_norm": 44.555091857910156, - "learning_rate": 4.629333333333333e-06, - "loss": 1.6145, - "step": 49480 - }, - { - "epoch": 1.58368, - "grad_norm": 47.14988327026367, - "learning_rate": 4.625777777777778e-06, - "loss": 1.5914, - "step": 49490 - }, - { - "epoch": 1.584, - "grad_norm": 47.16956329345703, - "learning_rate": 4.622222222222222e-06, - "loss": 1.582, - "step": 49500 - }, - { - "epoch": 1.58432, - "grad_norm": 45.73616409301758, - "learning_rate": 4.618666666666667e-06, - "loss": 1.5876, - "step": 49510 - }, - { - "epoch": 1.58464, - "grad_norm": 47.80562210083008, - "learning_rate": 4.6151111111111115e-06, - "loss": 1.5855, - "step": 49520 - }, - { - "epoch": 1.5849600000000001, - "grad_norm": 45.63853454589844, - "learning_rate": 4.611555555555556e-06, - "loss": 1.5892, - "step": 49530 - }, - { - "epoch": 1.58528, - "grad_norm": 46.739627838134766, - "learning_rate": 4.608000000000001e-06, - "loss": 1.5997, - "step": 49540 - }, - { - "epoch": 1.5856, - "grad_norm": 46.625587463378906, - "learning_rate": 4.604444444444444e-06, - "loss": 1.586, - "step": 49550 - }, - { - "epoch": 1.58592, - "grad_norm": 47.605796813964844, - "learning_rate": 4.600888888888889e-06, - "loss": 1.6017, - "step": 49560 - }, - { - "epoch": 1.58624, - "grad_norm": 45.937286376953125, - "learning_rate": 4.597333333333333e-06, - "loss": 1.5918, - "step": 49570 - }, - { - "epoch": 1.58656, - "grad_norm": 44.786460876464844, - "learning_rate": 4.593777777777778e-06, - "loss": 1.6187, - "step": 49580 - }, - { - "epoch": 1.5868799999999998, - "grad_norm": 44.00181579589844, - "learning_rate": 4.5902222222222225e-06, - "loss": 1.597, - "step": 49590 - }, - { - "epoch": 1.5872000000000002, - "grad_norm": 45.921409606933594, - "learning_rate": 4.586666666666667e-06, - "loss": 1.6047, - "step": 49600 - }, - { - "epoch": 1.58752, - "grad_norm": 44.386146545410156, - "learning_rate": 4.583111111111112e-06, - "loss": 1.5841, - "step": 49610 - }, - { - "epoch": 1.58784, - "grad_norm": 44.642608642578125, - "learning_rate": 4.579555555555556e-06, - "loss": 1.5969, - "step": 49620 - }, - { - "epoch": 1.58816, - "grad_norm": 45.06999588012695, - "learning_rate": 4.576000000000001e-06, - "loss": 1.6094, - "step": 49630 - }, - { - "epoch": 1.5884800000000001, - "grad_norm": 45.32560729980469, - "learning_rate": 4.572444444444445e-06, - "loss": 1.5804, - "step": 49640 - }, - { - "epoch": 1.5888, - "grad_norm": 47.215904235839844, - "learning_rate": 4.568888888888889e-06, - "loss": 1.622, - "step": 49650 - }, - { - "epoch": 1.5891199999999999, - "grad_norm": 45.09397888183594, - "learning_rate": 4.5653333333333336e-06, - "loss": 1.6058, - "step": 49660 - }, - { - "epoch": 1.58944, - "grad_norm": 47.4116325378418, - "learning_rate": 4.561777777777778e-06, - "loss": 1.6086, - "step": 49670 - }, - { - "epoch": 1.58976, - "grad_norm": 44.43334197998047, - "learning_rate": 4.558222222222223e-06, - "loss": 1.63, - "step": 49680 - }, - { - "epoch": 1.59008, - "grad_norm": 44.518882751464844, - "learning_rate": 4.554666666666667e-06, - "loss": 1.6114, - "step": 49690 - }, - { - "epoch": 1.5904, - "grad_norm": 45.74620056152344, - "learning_rate": 4.551111111111112e-06, - "loss": 1.5886, - "step": 49700 - }, - { - "epoch": 1.5907200000000001, - "grad_norm": 45.82404327392578, - "learning_rate": 4.547555555555556e-06, - "loss": 1.6022, - "step": 49710 - }, - { - "epoch": 1.59104, - "grad_norm": 47.253089904785156, - "learning_rate": 4.544000000000001e-06, - "loss": 1.5895, - "step": 49720 - }, - { - "epoch": 1.5913599999999999, - "grad_norm": 46.178253173828125, - "learning_rate": 4.540444444444445e-06, - "loss": 1.5902, - "step": 49730 - }, - { - "epoch": 1.59168, - "grad_norm": 46.93673324584961, - "learning_rate": 4.536888888888889e-06, - "loss": 1.6254, - "step": 49740 - }, - { - "epoch": 1.592, - "grad_norm": 46.61408996582031, - "learning_rate": 4.533333333333334e-06, - "loss": 1.6315, - "step": 49750 - }, - { - "epoch": 1.59232, - "grad_norm": 46.882930755615234, - "learning_rate": 4.529777777777778e-06, - "loss": 1.6253, - "step": 49760 - }, - { - "epoch": 1.5926399999999998, - "grad_norm": 47.14992141723633, - "learning_rate": 4.526222222222223e-06, - "loss": 1.5998, - "step": 49770 - }, - { - "epoch": 1.5929600000000002, - "grad_norm": 46.04390335083008, - "learning_rate": 4.5226666666666665e-06, - "loss": 1.5912, - "step": 49780 - }, - { - "epoch": 1.59328, - "grad_norm": 43.933555603027344, - "learning_rate": 4.519111111111111e-06, - "loss": 1.5897, - "step": 49790 - }, - { - "epoch": 1.5936, - "grad_norm": 48.31332015991211, - "learning_rate": 4.515555555555556e-06, - "loss": 1.5821, - "step": 49800 - }, - { - "epoch": 1.59392, - "grad_norm": 43.75424575805664, - "learning_rate": 4.512e-06, - "loss": 1.6014, - "step": 49810 - }, - { - "epoch": 1.59424, - "grad_norm": 45.77436447143555, - "learning_rate": 4.508444444444445e-06, - "loss": 1.6042, - "step": 49820 - }, - { - "epoch": 1.59456, - "grad_norm": 45.634220123291016, - "learning_rate": 4.504888888888889e-06, - "loss": 1.6119, - "step": 49830 - }, - { - "epoch": 1.5948799999999999, - "grad_norm": 45.671112060546875, - "learning_rate": 4.501333333333334e-06, - "loss": 1.6364, - "step": 49840 - }, - { - "epoch": 1.5952, - "grad_norm": 45.11393356323242, - "learning_rate": 4.497777777777778e-06, - "loss": 1.5874, - "step": 49850 - }, - { - "epoch": 1.59552, - "grad_norm": 46.57709884643555, - "learning_rate": 4.494222222222223e-06, - "loss": 1.5769, - "step": 49860 - }, - { - "epoch": 1.59584, - "grad_norm": 45.363006591796875, - "learning_rate": 4.490666666666667e-06, - "loss": 1.5677, - "step": 49870 - }, - { - "epoch": 1.59616, - "grad_norm": 45.681785583496094, - "learning_rate": 4.487111111111111e-06, - "loss": 1.5862, - "step": 49880 - }, - { - "epoch": 1.5964800000000001, - "grad_norm": 47.11852264404297, - "learning_rate": 4.483555555555556e-06, - "loss": 1.585, - "step": 49890 - }, - { - "epoch": 1.5968, - "grad_norm": 45.29941940307617, - "learning_rate": 4.48e-06, - "loss": 1.585, - "step": 49900 - }, - { - "epoch": 1.5971199999999999, - "grad_norm": 43.873050689697266, - "learning_rate": 4.476444444444445e-06, - "loss": 1.6232, - "step": 49910 - }, - { - "epoch": 1.59744, - "grad_norm": 44.94419860839844, - "learning_rate": 4.472888888888889e-06, - "loss": 1.6314, - "step": 49920 - }, - { - "epoch": 1.59776, - "grad_norm": 44.24449920654297, - "learning_rate": 4.469333333333334e-06, - "loss": 1.6162, - "step": 49930 - }, - { - "epoch": 1.59808, - "grad_norm": 44.8932991027832, - "learning_rate": 4.4657777777777785e-06, - "loss": 1.5827, - "step": 49940 - }, - { - "epoch": 1.5984, - "grad_norm": 43.619686126708984, - "learning_rate": 4.462222222222223e-06, - "loss": 1.5784, - "step": 49950 - }, - { - "epoch": 1.5987200000000001, - "grad_norm": 46.18088150024414, - "learning_rate": 4.458666666666668e-06, - "loss": 1.6026, - "step": 49960 - }, - { - "epoch": 1.59904, - "grad_norm": 59.165828704833984, - "learning_rate": 4.455111111111111e-06, - "loss": 1.6044, - "step": 49970 - }, - { - "epoch": 1.59936, - "grad_norm": 44.62675094604492, - "learning_rate": 4.451555555555556e-06, - "loss": 1.6104, - "step": 49980 - }, - { - "epoch": 1.59968, - "grad_norm": 45.41344451904297, - "learning_rate": 4.4480000000000004e-06, - "loss": 1.5886, - "step": 49990 - }, - { - "epoch": 1.6, - "grad_norm": 45.50170135498047, - "learning_rate": 4.444444444444444e-06, - "loss": 1.5892, - "step": 50000 - }, - { - "epoch": 1.60032, - "grad_norm": 45.556915283203125, - "learning_rate": 4.440888888888889e-06, - "loss": 1.5747, - "step": 50010 - }, - { - "epoch": 1.6006399999999998, - "grad_norm": 43.671234130859375, - "learning_rate": 4.437333333333333e-06, - "loss": 1.6092, - "step": 50020 - }, - { - "epoch": 1.6009600000000002, - "grad_norm": 45.89202117919922, - "learning_rate": 4.433777777777778e-06, - "loss": 1.585, - "step": 50030 - }, - { - "epoch": 1.60128, - "grad_norm": 44.5806770324707, - "learning_rate": 4.430222222222222e-06, - "loss": 1.6141, - "step": 50040 - }, - { - "epoch": 1.6016, - "grad_norm": 47.617652893066406, - "learning_rate": 4.426666666666667e-06, - "loss": 1.5834, - "step": 50050 - }, - { - "epoch": 1.60192, - "grad_norm": 45.94891357421875, - "learning_rate": 4.4231111111111115e-06, - "loss": 1.5604, - "step": 50060 - }, - { - "epoch": 1.60224, - "grad_norm": 46.36294937133789, - "learning_rate": 4.419555555555556e-06, - "loss": 1.581, - "step": 50070 - }, - { - "epoch": 1.60256, - "grad_norm": 44.878177642822266, - "learning_rate": 4.416000000000001e-06, - "loss": 1.5884, - "step": 50080 - }, - { - "epoch": 1.6028799999999999, - "grad_norm": 46.650146484375, - "learning_rate": 4.412444444444445e-06, - "loss": 1.59, - "step": 50090 - }, - { - "epoch": 1.6032, - "grad_norm": 44.59064483642578, - "learning_rate": 4.408888888888889e-06, - "loss": 1.5989, - "step": 50100 - }, - { - "epoch": 1.60352, - "grad_norm": 45.031578063964844, - "learning_rate": 4.405333333333333e-06, - "loss": 1.5923, - "step": 50110 - }, - { - "epoch": 1.60384, - "grad_norm": 44.28569793701172, - "learning_rate": 4.401777777777778e-06, - "loss": 1.6092, - "step": 50120 - }, - { - "epoch": 1.60416, - "grad_norm": 44.284339904785156, - "learning_rate": 4.3982222222222225e-06, - "loss": 1.6016, - "step": 50130 - }, - { - "epoch": 1.6044800000000001, - "grad_norm": 44.18607711791992, - "learning_rate": 4.394666666666667e-06, - "loss": 1.6137, - "step": 50140 - }, - { - "epoch": 1.6048, - "grad_norm": 45.102230072021484, - "learning_rate": 4.391111111111112e-06, - "loss": 1.5995, - "step": 50150 - }, - { - "epoch": 1.6051199999999999, - "grad_norm": 44.339927673339844, - "learning_rate": 4.387555555555556e-06, - "loss": 1.6157, - "step": 50160 - }, - { - "epoch": 1.60544, - "grad_norm": 45.931880950927734, - "learning_rate": 4.384000000000001e-06, - "loss": 1.5997, - "step": 50170 - }, - { - "epoch": 1.60576, - "grad_norm": 44.674312591552734, - "learning_rate": 4.380444444444445e-06, - "loss": 1.6045, - "step": 50180 - }, - { - "epoch": 1.60608, - "grad_norm": 47.0339241027832, - "learning_rate": 4.37688888888889e-06, - "loss": 1.6022, - "step": 50190 - }, - { - "epoch": 1.6064, - "grad_norm": 49.61772155761719, - "learning_rate": 4.3733333333333335e-06, - "loss": 1.583, - "step": 50200 - }, - { - "epoch": 1.6067200000000001, - "grad_norm": 46.44553756713867, - "learning_rate": 4.369777777777778e-06, - "loss": 1.5689, - "step": 50210 - }, - { - "epoch": 1.60704, - "grad_norm": 44.75165939331055, - "learning_rate": 4.366222222222223e-06, - "loss": 1.5962, - "step": 50220 - }, - { - "epoch": 1.60736, - "grad_norm": 46.96379470825195, - "learning_rate": 4.362666666666667e-06, - "loss": 1.5637, - "step": 50230 - }, - { - "epoch": 1.60768, - "grad_norm": 44.98953628540039, - "learning_rate": 4.359111111111112e-06, - "loss": 1.5939, - "step": 50240 - }, - { - "epoch": 1.608, - "grad_norm": 46.01952362060547, - "learning_rate": 4.3555555555555555e-06, - "loss": 1.6029, - "step": 50250 - }, - { - "epoch": 1.60832, - "grad_norm": 44.32866287231445, - "learning_rate": 4.352e-06, - "loss": 1.5855, - "step": 50260 - }, - { - "epoch": 1.6086399999999998, - "grad_norm": 47.755096435546875, - "learning_rate": 4.3484444444444446e-06, - "loss": 1.6094, - "step": 50270 - }, - { - "epoch": 1.60896, - "grad_norm": 44.24766159057617, - "learning_rate": 4.344888888888889e-06, - "loss": 1.5795, - "step": 50280 - }, - { - "epoch": 1.60928, - "grad_norm": 45.763004302978516, - "learning_rate": 4.341333333333334e-06, - "loss": 1.6071, - "step": 50290 - }, - { - "epoch": 1.6096, - "grad_norm": 45.684059143066406, - "learning_rate": 4.337777777777778e-06, - "loss": 1.619, - "step": 50300 - }, - { - "epoch": 1.60992, - "grad_norm": 44.89235305786133, - "learning_rate": 4.334222222222223e-06, - "loss": 1.563, - "step": 50310 - }, - { - "epoch": 1.6102400000000001, - "grad_norm": 47.123172760009766, - "learning_rate": 4.330666666666667e-06, - "loss": 1.6095, - "step": 50320 - }, - { - "epoch": 1.61056, - "grad_norm": 47.982330322265625, - "learning_rate": 4.327111111111111e-06, - "loss": 1.5968, - "step": 50330 - }, - { - "epoch": 1.6108799999999999, - "grad_norm": 44.019073486328125, - "learning_rate": 4.323555555555556e-06, - "loss": 1.5747, - "step": 50340 - }, - { - "epoch": 1.6112, - "grad_norm": 44.900474548339844, - "learning_rate": 4.32e-06, - "loss": 1.5865, - "step": 50350 - }, - { - "epoch": 1.61152, - "grad_norm": 46.74665451049805, - "learning_rate": 4.316444444444445e-06, - "loss": 1.5976, - "step": 50360 - }, - { - "epoch": 1.61184, - "grad_norm": 45.36431884765625, - "learning_rate": 4.312888888888889e-06, - "loss": 1.5888, - "step": 50370 - }, - { - "epoch": 1.61216, - "grad_norm": 44.3800163269043, - "learning_rate": 4.309333333333334e-06, - "loss": 1.6082, - "step": 50380 - }, - { - "epoch": 1.6124800000000001, - "grad_norm": 45.327423095703125, - "learning_rate": 4.305777777777778e-06, - "loss": 1.5848, - "step": 50390 - }, - { - "epoch": 1.6128, - "grad_norm": 46.212669372558594, - "learning_rate": 4.302222222222223e-06, - "loss": 1.6142, - "step": 50400 - }, - { - "epoch": 1.6131199999999999, - "grad_norm": 47.48764419555664, - "learning_rate": 4.2986666666666675e-06, - "loss": 1.6003, - "step": 50410 - }, - { - "epoch": 1.61344, - "grad_norm": 47.0029182434082, - "learning_rate": 4.295111111111111e-06, - "loss": 1.581, - "step": 50420 - }, - { - "epoch": 1.61376, - "grad_norm": 44.517669677734375, - "learning_rate": 4.291555555555556e-06, - "loss": 1.5755, - "step": 50430 - }, - { - "epoch": 1.61408, - "grad_norm": 44.641807556152344, - "learning_rate": 4.288e-06, - "loss": 1.6121, - "step": 50440 - }, - { - "epoch": 1.6143999999999998, - "grad_norm": 46.77535629272461, - "learning_rate": 4.284444444444445e-06, - "loss": 1.6039, - "step": 50450 - }, - { - "epoch": 1.6147200000000002, - "grad_norm": 47.83342742919922, - "learning_rate": 4.280888888888889e-06, - "loss": 1.5909, - "step": 50460 - }, - { - "epoch": 1.61504, - "grad_norm": 46.20832061767578, - "learning_rate": 4.277333333333334e-06, - "loss": 1.6097, - "step": 50470 - }, - { - "epoch": 1.61536, - "grad_norm": 46.99225616455078, - "learning_rate": 4.2737777777777785e-06, - "loss": 1.5748, - "step": 50480 - }, - { - "epoch": 1.61568, - "grad_norm": 48.268760681152344, - "learning_rate": 4.270222222222223e-06, - "loss": 1.5669, - "step": 50490 - }, - { - "epoch": 1.616, - "grad_norm": 45.4713249206543, - "learning_rate": 4.266666666666668e-06, - "loss": 1.6042, - "step": 50500 - }, - { - "epoch": 1.61632, - "grad_norm": 46.854278564453125, - "learning_rate": 4.263111111111111e-06, - "loss": 1.6017, - "step": 50510 - }, - { - "epoch": 1.6166399999999999, - "grad_norm": 45.30042266845703, - "learning_rate": 4.259555555555556e-06, - "loss": 1.6155, - "step": 50520 - }, - { - "epoch": 1.61696, - "grad_norm": 45.22309112548828, - "learning_rate": 4.256e-06, - "loss": 1.5923, - "step": 50530 - }, - { - "epoch": 1.61728, - "grad_norm": 45.24235916137695, - "learning_rate": 4.252444444444445e-06, - "loss": 1.5976, - "step": 50540 - }, - { - "epoch": 1.6176, - "grad_norm": 44.514007568359375, - "learning_rate": 4.248888888888889e-06, - "loss": 1.6064, - "step": 50550 - }, - { - "epoch": 1.61792, - "grad_norm": 44.3435173034668, - "learning_rate": 4.245333333333333e-06, - "loss": 1.6048, - "step": 50560 - }, - { - "epoch": 1.6182400000000001, - "grad_norm": 44.056148529052734, - "learning_rate": 4.241777777777778e-06, - "loss": 1.5859, - "step": 50570 - }, - { - "epoch": 1.61856, - "grad_norm": 46.236839294433594, - "learning_rate": 4.238222222222222e-06, - "loss": 1.5929, - "step": 50580 - }, - { - "epoch": 1.6188799999999999, - "grad_norm": 43.43770980834961, - "learning_rate": 4.234666666666667e-06, - "loss": 1.5758, - "step": 50590 - }, - { - "epoch": 1.6192, - "grad_norm": 46.04836654663086, - "learning_rate": 4.2311111111111114e-06, - "loss": 1.6049, - "step": 50600 - }, - { - "epoch": 1.61952, - "grad_norm": 45.94651794433594, - "learning_rate": 4.227555555555556e-06, - "loss": 1.5727, - "step": 50610 - }, - { - "epoch": 1.61984, - "grad_norm": 46.23525619506836, - "learning_rate": 4.2240000000000006e-06, - "loss": 1.5732, - "step": 50620 - }, - { - "epoch": 1.62016, - "grad_norm": 46.8028678894043, - "learning_rate": 4.220444444444445e-06, - "loss": 1.5841, - "step": 50630 - }, - { - "epoch": 1.6204800000000001, - "grad_norm": 45.00838851928711, - "learning_rate": 4.21688888888889e-06, - "loss": 1.5685, - "step": 50640 - }, - { - "epoch": 1.6208, - "grad_norm": 44.92591857910156, - "learning_rate": 4.213333333333333e-06, - "loss": 1.5978, - "step": 50650 - }, - { - "epoch": 1.62112, - "grad_norm": 45.20899963378906, - "learning_rate": 4.209777777777778e-06, - "loss": 1.5992, - "step": 50660 - }, - { - "epoch": 1.62144, - "grad_norm": 46.38852310180664, - "learning_rate": 4.2062222222222225e-06, - "loss": 1.5855, - "step": 50670 - }, - { - "epoch": 1.62176, - "grad_norm": 42.88059997558594, - "learning_rate": 4.202666666666667e-06, - "loss": 1.5763, - "step": 50680 - }, - { - "epoch": 1.62208, - "grad_norm": 46.491859436035156, - "learning_rate": 4.199111111111112e-06, - "loss": 1.5542, - "step": 50690 - }, - { - "epoch": 1.6223999999999998, - "grad_norm": 46.772457122802734, - "learning_rate": 4.195555555555556e-06, - "loss": 1.5973, - "step": 50700 - }, - { - "epoch": 1.6227200000000002, - "grad_norm": 45.93109893798828, - "learning_rate": 4.192000000000001e-06, - "loss": 1.6171, - "step": 50710 - }, - { - "epoch": 1.62304, - "grad_norm": 46.431365966796875, - "learning_rate": 4.188444444444445e-06, - "loss": 1.5954, - "step": 50720 - }, - { - "epoch": 1.62336, - "grad_norm": 47.1487922668457, - "learning_rate": 4.18488888888889e-06, - "loss": 1.6097, - "step": 50730 - }, - { - "epoch": 1.62368, - "grad_norm": 44.51396179199219, - "learning_rate": 4.1813333333333335e-06, - "loss": 1.6403, - "step": 50740 - }, - { - "epoch": 1.624, - "grad_norm": 45.483638763427734, - "learning_rate": 4.177777777777778e-06, - "loss": 1.622, - "step": 50750 - }, - { - "epoch": 1.62432, - "grad_norm": 46.74037551879883, - "learning_rate": 4.174222222222223e-06, - "loss": 1.5975, - "step": 50760 - }, - { - "epoch": 1.6246399999999999, - "grad_norm": 45.22315979003906, - "learning_rate": 4.170666666666667e-06, - "loss": 1.5898, - "step": 50770 - }, - { - "epoch": 1.62496, - "grad_norm": 47.88893508911133, - "learning_rate": 4.167111111111111e-06, - "loss": 1.5861, - "step": 50780 - }, - { - "epoch": 1.62528, - "grad_norm": 47.07771682739258, - "learning_rate": 4.1635555555555554e-06, - "loss": 1.5988, - "step": 50790 - }, - { - "epoch": 1.6256, - "grad_norm": 46.006805419921875, - "learning_rate": 4.16e-06, - "loss": 1.6015, - "step": 50800 - }, - { - "epoch": 1.62592, - "grad_norm": 45.821189880371094, - "learning_rate": 4.1564444444444445e-06, - "loss": 1.5748, - "step": 50810 - }, - { - "epoch": 1.6262400000000001, - "grad_norm": 44.78703308105469, - "learning_rate": 4.152888888888889e-06, - "loss": 1.6025, - "step": 50820 - }, - { - "epoch": 1.62656, - "grad_norm": 46.905548095703125, - "learning_rate": 4.149333333333334e-06, - "loss": 1.5806, - "step": 50830 - }, - { - "epoch": 1.6268799999999999, - "grad_norm": 45.533382415771484, - "learning_rate": 4.145777777777778e-06, - "loss": 1.5724, - "step": 50840 - }, - { - "epoch": 1.6272, - "grad_norm": 46.332847595214844, - "learning_rate": 4.142222222222223e-06, - "loss": 1.6117, - "step": 50850 - }, - { - "epoch": 1.62752, - "grad_norm": 47.08037567138672, - "learning_rate": 4.138666666666667e-06, - "loss": 1.6313, - "step": 50860 - }, - { - "epoch": 1.62784, - "grad_norm": 45.778968811035156, - "learning_rate": 4.135111111111111e-06, - "loss": 1.5783, - "step": 50870 - }, - { - "epoch": 1.62816, - "grad_norm": 46.12378692626953, - "learning_rate": 4.1315555555555556e-06, - "loss": 1.5866, - "step": 50880 - }, - { - "epoch": 1.6284800000000001, - "grad_norm": 46.11397933959961, - "learning_rate": 4.128e-06, - "loss": 1.5727, - "step": 50890 - }, - { - "epoch": 1.6288, - "grad_norm": 45.41218185424805, - "learning_rate": 4.124444444444445e-06, - "loss": 1.6054, - "step": 50900 - }, - { - "epoch": 1.62912, - "grad_norm": 44.79296875, - "learning_rate": 4.120888888888889e-06, - "loss": 1.5776, - "step": 50910 - }, - { - "epoch": 1.62944, - "grad_norm": 46.6500129699707, - "learning_rate": 4.117333333333334e-06, - "loss": 1.5955, - "step": 50920 - }, - { - "epoch": 1.62976, - "grad_norm": 44.73964309692383, - "learning_rate": 4.113777777777778e-06, - "loss": 1.6007, - "step": 50930 - }, - { - "epoch": 1.63008, - "grad_norm": 44.71598815917969, - "learning_rate": 4.110222222222223e-06, - "loss": 1.6068, - "step": 50940 - }, - { - "epoch": 1.6303999999999998, - "grad_norm": 45.50582504272461, - "learning_rate": 4.1066666666666674e-06, - "loss": 1.5937, - "step": 50950 - }, - { - "epoch": 1.63072, - "grad_norm": 47.319496154785156, - "learning_rate": 4.103111111111112e-06, - "loss": 1.5752, - "step": 50960 - }, - { - "epoch": 1.63104, - "grad_norm": 42.19738006591797, - "learning_rate": 4.099555555555556e-06, - "loss": 1.607, - "step": 50970 - }, - { - "epoch": 1.63136, - "grad_norm": 48.765106201171875, - "learning_rate": 4.096e-06, - "loss": 1.59, - "step": 50980 - }, - { - "epoch": 1.63168, - "grad_norm": 43.94348907470703, - "learning_rate": 4.092444444444445e-06, - "loss": 1.5813, - "step": 50990 - }, - { - "epoch": 1.6320000000000001, - "grad_norm": 46.9215087890625, - "learning_rate": 4.088888888888889e-06, - "loss": 1.6027, - "step": 51000 - }, - { - "epoch": 1.63232, - "grad_norm": 45.82568359375, - "learning_rate": 4.085333333333334e-06, - "loss": 1.5715, - "step": 51010 - }, - { - "epoch": 1.6326399999999999, - "grad_norm": 44.695281982421875, - "learning_rate": 4.081777777777778e-06, - "loss": 1.6268, - "step": 51020 - }, - { - "epoch": 1.63296, - "grad_norm": 47.35905456542969, - "learning_rate": 4.078222222222222e-06, - "loss": 1.5944, - "step": 51030 - }, - { - "epoch": 1.63328, - "grad_norm": 46.44203567504883, - "learning_rate": 4.074666666666667e-06, - "loss": 1.5942, - "step": 51040 - }, - { - "epoch": 1.6336, - "grad_norm": 46.74188995361328, - "learning_rate": 4.071111111111111e-06, - "loss": 1.6059, - "step": 51050 - }, - { - "epoch": 1.63392, - "grad_norm": 45.445526123046875, - "learning_rate": 4.067555555555556e-06, - "loss": 1.6191, - "step": 51060 - }, - { - "epoch": 1.6342400000000001, - "grad_norm": 43.05592346191406, - "learning_rate": 4.064e-06, - "loss": 1.5822, - "step": 51070 - }, - { - "epoch": 1.63456, - "grad_norm": 47.14398193359375, - "learning_rate": 4.060444444444445e-06, - "loss": 1.571, - "step": 51080 - }, - { - "epoch": 1.6348799999999999, - "grad_norm": 44.45650100708008, - "learning_rate": 4.0568888888888895e-06, - "loss": 1.5836, - "step": 51090 - }, - { - "epoch": 1.6352, - "grad_norm": 46.076725006103516, - "learning_rate": 4.053333333333333e-06, - "loss": 1.5917, - "step": 51100 - }, - { - "epoch": 1.63552, - "grad_norm": 48.551517486572266, - "learning_rate": 4.049777777777778e-06, - "loss": 1.5996, - "step": 51110 - }, - { - "epoch": 1.63584, - "grad_norm": 46.424808502197266, - "learning_rate": 4.046222222222222e-06, - "loss": 1.6065, - "step": 51120 - }, - { - "epoch": 1.6361599999999998, - "grad_norm": 44.893001556396484, - "learning_rate": 4.042666666666667e-06, - "loss": 1.5853, - "step": 51130 - }, - { - "epoch": 1.6364800000000002, - "grad_norm": 45.07635498046875, - "learning_rate": 4.039111111111111e-06, - "loss": 1.5802, - "step": 51140 - }, - { - "epoch": 1.6368, - "grad_norm": 45.24571990966797, - "learning_rate": 4.035555555555556e-06, - "loss": 1.6256, - "step": 51150 - }, - { - "epoch": 1.63712, - "grad_norm": 47.38965606689453, - "learning_rate": 4.0320000000000005e-06, - "loss": 1.6005, - "step": 51160 - }, - { - "epoch": 1.63744, - "grad_norm": 44.441802978515625, - "learning_rate": 4.028444444444445e-06, - "loss": 1.595, - "step": 51170 - }, - { - "epoch": 1.63776, - "grad_norm": 45.896514892578125, - "learning_rate": 4.02488888888889e-06, - "loss": 1.5747, - "step": 51180 - }, - { - "epoch": 1.63808, - "grad_norm": 45.57615280151367, - "learning_rate": 4.021333333333333e-06, - "loss": 1.5843, - "step": 51190 - }, - { - "epoch": 1.6383999999999999, - "grad_norm": 44.5887336730957, - "learning_rate": 4.017777777777778e-06, - "loss": 1.5938, - "step": 51200 - }, - { - "epoch": 1.63872, - "grad_norm": 44.419898986816406, - "learning_rate": 4.0142222222222225e-06, - "loss": 1.5938, - "step": 51210 - }, - { - "epoch": 1.63904, - "grad_norm": 46.093406677246094, - "learning_rate": 4.010666666666667e-06, - "loss": 1.5766, - "step": 51220 - }, - { - "epoch": 1.63936, - "grad_norm": 44.555179595947266, - "learning_rate": 4.0071111111111116e-06, - "loss": 1.6233, - "step": 51230 - }, - { - "epoch": 1.63968, - "grad_norm": 45.38337707519531, - "learning_rate": 4.003555555555556e-06, - "loss": 1.5829, - "step": 51240 - }, - { - "epoch": 1.6400000000000001, - "grad_norm": 47.5709114074707, - "learning_rate": 4.000000000000001e-06, - "loss": 1.5673, - "step": 51250 - }, - { - "epoch": 1.64032, - "grad_norm": 46.36149597167969, - "learning_rate": 3.996444444444445e-06, - "loss": 1.5784, - "step": 51260 - }, - { - "epoch": 1.6406399999999999, - "grad_norm": 43.161376953125, - "learning_rate": 3.99288888888889e-06, - "loss": 1.5826, - "step": 51270 - }, - { - "epoch": 1.64096, - "grad_norm": 44.26531219482422, - "learning_rate": 3.9893333333333335e-06, - "loss": 1.5945, - "step": 51280 - }, - { - "epoch": 1.64128, - "grad_norm": 46.19755172729492, - "learning_rate": 3.985777777777778e-06, - "loss": 1.6204, - "step": 51290 - }, - { - "epoch": 1.6416, - "grad_norm": 45.3082275390625, - "learning_rate": 3.982222222222223e-06, - "loss": 1.6296, - "step": 51300 - }, - { - "epoch": 1.64192, - "grad_norm": 46.39171600341797, - "learning_rate": 3.978666666666667e-06, - "loss": 1.5551, - "step": 51310 - }, - { - "epoch": 1.6422400000000001, - "grad_norm": 46.921443939208984, - "learning_rate": 3.975111111111111e-06, - "loss": 1.5899, - "step": 51320 - }, - { - "epoch": 1.64256, - "grad_norm": 45.100067138671875, - "learning_rate": 3.971555555555555e-06, - "loss": 1.5889, - "step": 51330 - }, - { - "epoch": 1.64288, - "grad_norm": 46.697940826416016, - "learning_rate": 3.968e-06, - "loss": 1.5864, - "step": 51340 - }, - { - "epoch": 1.6432, - "grad_norm": 45.882144927978516, - "learning_rate": 3.9644444444444445e-06, - "loss": 1.6122, - "step": 51350 - }, - { - "epoch": 1.64352, - "grad_norm": 43.447383880615234, - "learning_rate": 3.960888888888889e-06, - "loss": 1.5687, - "step": 51360 - }, - { - "epoch": 1.64384, - "grad_norm": 44.602203369140625, - "learning_rate": 3.957333333333334e-06, - "loss": 1.5972, - "step": 51370 - }, - { - "epoch": 1.6441599999999998, - "grad_norm": 46.61246109008789, - "learning_rate": 3.953777777777778e-06, - "loss": 1.6087, - "step": 51380 - }, - { - "epoch": 1.6444800000000002, - "grad_norm": 43.60810852050781, - "learning_rate": 3.950222222222223e-06, - "loss": 1.5684, - "step": 51390 - }, - { - "epoch": 1.6448, - "grad_norm": 46.67555236816406, - "learning_rate": 3.946666666666667e-06, - "loss": 1.593, - "step": 51400 - }, - { - "epoch": 1.64512, - "grad_norm": 46.29722213745117, - "learning_rate": 3.943111111111112e-06, - "loss": 1.5815, - "step": 51410 - }, - { - "epoch": 1.64544, - "grad_norm": 45.6417121887207, - "learning_rate": 3.9395555555555555e-06, - "loss": 1.5646, - "step": 51420 - }, - { - "epoch": 1.6457600000000001, - "grad_norm": 45.40904235839844, - "learning_rate": 3.936e-06, - "loss": 1.5829, - "step": 51430 - }, - { - "epoch": 1.64608, - "grad_norm": 45.21997833251953, - "learning_rate": 3.932444444444445e-06, - "loss": 1.5967, - "step": 51440 - }, - { - "epoch": 1.6463999999999999, - "grad_norm": 44.75946044921875, - "learning_rate": 3.928888888888889e-06, - "loss": 1.5784, - "step": 51450 - }, - { - "epoch": 1.64672, - "grad_norm": 45.29618835449219, - "learning_rate": 3.925333333333334e-06, - "loss": 1.5942, - "step": 51460 - }, - { - "epoch": 1.64704, - "grad_norm": 45.929935455322266, - "learning_rate": 3.921777777777778e-06, - "loss": 1.6064, - "step": 51470 - }, - { - "epoch": 1.64736, - "grad_norm": 45.438934326171875, - "learning_rate": 3.918222222222223e-06, - "loss": 1.5798, - "step": 51480 - }, - { - "epoch": 1.64768, - "grad_norm": 47.577274322509766, - "learning_rate": 3.914666666666667e-06, - "loss": 1.6149, - "step": 51490 - }, - { - "epoch": 1.6480000000000001, - "grad_norm": 45.842472076416016, - "learning_rate": 3.911111111111112e-06, - "loss": 1.6094, - "step": 51500 - }, - { - "epoch": 1.64832, - "grad_norm": 45.71033477783203, - "learning_rate": 3.9075555555555565e-06, - "loss": 1.5777, - "step": 51510 - }, - { - "epoch": 1.6486399999999999, - "grad_norm": 47.43943405151367, - "learning_rate": 3.904e-06, - "loss": 1.605, - "step": 51520 - }, - { - "epoch": 1.64896, - "grad_norm": 45.22209167480469, - "learning_rate": 3.900444444444445e-06, - "loss": 1.6268, - "step": 51530 - }, - { - "epoch": 1.64928, - "grad_norm": 47.26148223876953, - "learning_rate": 3.896888888888889e-06, - "loss": 1.5841, - "step": 51540 - }, - { - "epoch": 1.6496, - "grad_norm": 45.25746536254883, - "learning_rate": 3.893333333333333e-06, - "loss": 1.584, - "step": 51550 - }, - { - "epoch": 1.64992, - "grad_norm": 45.87767791748047, - "learning_rate": 3.889777777777778e-06, - "loss": 1.5847, - "step": 51560 - }, - { - "epoch": 1.6502400000000002, - "grad_norm": 44.278568267822266, - "learning_rate": 3.886222222222222e-06, - "loss": 1.6103, - "step": 51570 - }, - { - "epoch": 1.65056, - "grad_norm": 45.72529983520508, - "learning_rate": 3.882666666666667e-06, - "loss": 1.6127, - "step": 51580 - }, - { - "epoch": 1.65088, - "grad_norm": 44.123146057128906, - "learning_rate": 3.879111111111111e-06, - "loss": 1.5901, - "step": 51590 - }, - { - "epoch": 1.6512, - "grad_norm": 45.891361236572266, - "learning_rate": 3.875555555555556e-06, - "loss": 1.5728, - "step": 51600 - }, - { - "epoch": 1.65152, - "grad_norm": 46.983604431152344, - "learning_rate": 3.872e-06, - "loss": 1.6013, - "step": 51610 - }, - { - "epoch": 1.65184, - "grad_norm": 45.0738525390625, - "learning_rate": 3.868444444444445e-06, - "loss": 1.6019, - "step": 51620 - }, - { - "epoch": 1.6521599999999999, - "grad_norm": 45.97278594970703, - "learning_rate": 3.8648888888888895e-06, - "loss": 1.5751, - "step": 51630 - }, - { - "epoch": 1.65248, - "grad_norm": 44.256500244140625, - "learning_rate": 3.861333333333333e-06, - "loss": 1.5589, - "step": 51640 - }, - { - "epoch": 1.6528, - "grad_norm": 45.32353973388672, - "learning_rate": 3.857777777777778e-06, - "loss": 1.5763, - "step": 51650 - }, - { - "epoch": 1.65312, - "grad_norm": 45.695960998535156, - "learning_rate": 3.854222222222222e-06, - "loss": 1.567, - "step": 51660 - }, - { - "epoch": 1.65344, - "grad_norm": 43.78841781616211, - "learning_rate": 3.850666666666667e-06, - "loss": 1.5825, - "step": 51670 - }, - { - "epoch": 1.6537600000000001, - "grad_norm": 45.8950080871582, - "learning_rate": 3.847111111111111e-06, - "loss": 1.6075, - "step": 51680 - }, - { - "epoch": 1.65408, - "grad_norm": 46.352874755859375, - "learning_rate": 3.843555555555556e-06, - "loss": 1.62, - "step": 51690 - }, - { - "epoch": 1.6543999999999999, - "grad_norm": 43.862728118896484, - "learning_rate": 3.8400000000000005e-06, - "loss": 1.6047, - "step": 51700 - }, - { - "epoch": 1.65472, - "grad_norm": 46.20279312133789, - "learning_rate": 3.836444444444445e-06, - "loss": 1.5958, - "step": 51710 - }, - { - "epoch": 1.65504, - "grad_norm": 47.41118621826172, - "learning_rate": 3.83288888888889e-06, - "loss": 1.6116, - "step": 51720 - }, - { - "epoch": 1.65536, - "grad_norm": 44.2399787902832, - "learning_rate": 3.829333333333334e-06, - "loss": 1.6, - "step": 51730 - }, - { - "epoch": 1.65568, - "grad_norm": 45.41532897949219, - "learning_rate": 3.825777777777778e-06, - "loss": 1.5894, - "step": 51740 - }, - { - "epoch": 1.6560000000000001, - "grad_norm": 49.13901901245117, - "learning_rate": 3.8222222222222224e-06, - "loss": 1.5931, - "step": 51750 - }, - { - "epoch": 1.65632, - "grad_norm": 46.375240325927734, - "learning_rate": 3.818666666666667e-06, - "loss": 1.5792, - "step": 51760 - }, - { - "epoch": 1.65664, - "grad_norm": 43.16218185424805, - "learning_rate": 3.8151111111111115e-06, - "loss": 1.6248, - "step": 51770 - }, - { - "epoch": 1.65696, - "grad_norm": 47.00339889526367, - "learning_rate": 3.811555555555556e-06, - "loss": 1.5988, - "step": 51780 - }, - { - "epoch": 1.65728, - "grad_norm": 43.82436752319336, - "learning_rate": 3.8080000000000006e-06, - "loss": 1.592, - "step": 51790 - }, - { - "epoch": 1.6576, - "grad_norm": 47.84590530395508, - "learning_rate": 3.8044444444444443e-06, - "loss": 1.5862, - "step": 51800 - }, - { - "epoch": 1.6579199999999998, - "grad_norm": 45.108680725097656, - "learning_rate": 3.800888888888889e-06, - "loss": 1.5799, - "step": 51810 - }, - { - "epoch": 1.6582400000000002, - "grad_norm": 45.71805953979492, - "learning_rate": 3.7973333333333335e-06, - "loss": 1.6113, - "step": 51820 - }, - { - "epoch": 1.65856, - "grad_norm": 46.040164947509766, - "learning_rate": 3.793777777777778e-06, - "loss": 1.5847, - "step": 51830 - }, - { - "epoch": 1.65888, - "grad_norm": 45.32584762573242, - "learning_rate": 3.7902222222222226e-06, - "loss": 1.5995, - "step": 51840 - }, - { - "epoch": 1.6592, - "grad_norm": 44.63491439819336, - "learning_rate": 3.7866666666666667e-06, - "loss": 1.6002, - "step": 51850 - }, - { - "epoch": 1.65952, - "grad_norm": 46.6467399597168, - "learning_rate": 3.7831111111111112e-06, - "loss": 1.599, - "step": 51860 - }, - { - "epoch": 1.65984, - "grad_norm": 44.993499755859375, - "learning_rate": 3.779555555555556e-06, - "loss": 1.6072, - "step": 51870 - }, - { - "epoch": 1.6601599999999999, - "grad_norm": 45.52958679199219, - "learning_rate": 3.7760000000000004e-06, - "loss": 1.5949, - "step": 51880 - }, - { - "epoch": 1.66048, - "grad_norm": 45.84259033203125, - "learning_rate": 3.7724444444444445e-06, - "loss": 1.5956, - "step": 51890 - }, - { - "epoch": 1.6608, - "grad_norm": 46.66897964477539, - "learning_rate": 3.768888888888889e-06, - "loss": 1.6052, - "step": 51900 - }, - { - "epoch": 1.66112, - "grad_norm": 45.33606719970703, - "learning_rate": 3.7653333333333336e-06, - "loss": 1.6023, - "step": 51910 - }, - { - "epoch": 1.66144, - "grad_norm": 45.42198944091797, - "learning_rate": 3.761777777777778e-06, - "loss": 1.5922, - "step": 51920 - }, - { - "epoch": 1.6617600000000001, - "grad_norm": 46.46091842651367, - "learning_rate": 3.7582222222222227e-06, - "loss": 1.6021, - "step": 51930 - }, - { - "epoch": 1.66208, - "grad_norm": 44.122169494628906, - "learning_rate": 3.754666666666667e-06, - "loss": 1.6073, - "step": 51940 - }, - { - "epoch": 1.6623999999999999, - "grad_norm": 44.32771682739258, - "learning_rate": 3.7511111111111114e-06, - "loss": 1.5977, - "step": 51950 - }, - { - "epoch": 1.66272, - "grad_norm": 44.800567626953125, - "learning_rate": 3.747555555555556e-06, - "loss": 1.5832, - "step": 51960 - }, - { - "epoch": 1.66304, - "grad_norm": 45.98767852783203, - "learning_rate": 3.7440000000000005e-06, - "loss": 1.5969, - "step": 51970 - }, - { - "epoch": 1.66336, - "grad_norm": 45.25199890136719, - "learning_rate": 3.740444444444445e-06, - "loss": 1.5853, - "step": 51980 - }, - { - "epoch": 1.66368, - "grad_norm": 44.7115478515625, - "learning_rate": 3.736888888888889e-06, - "loss": 1.5913, - "step": 51990 - }, - { - "epoch": 1.6640000000000001, - "grad_norm": 46.41364288330078, - "learning_rate": 3.7333333333333337e-06, - "loss": 1.5784, - "step": 52000 - }, - { - "epoch": 1.66432, - "grad_norm": 45.47352981567383, - "learning_rate": 3.7297777777777783e-06, - "loss": 1.5906, - "step": 52010 - }, - { - "epoch": 1.66464, - "grad_norm": 44.713462829589844, - "learning_rate": 3.726222222222223e-06, - "loss": 1.5641, - "step": 52020 - }, - { - "epoch": 1.66496, - "grad_norm": 43.92695999145508, - "learning_rate": 3.7226666666666674e-06, - "loss": 1.5806, - "step": 52030 - }, - { - "epoch": 1.66528, - "grad_norm": 44.497310638427734, - "learning_rate": 3.7191111111111115e-06, - "loss": 1.593, - "step": 52040 - }, - { - "epoch": 1.6656, - "grad_norm": 44.12403106689453, - "learning_rate": 3.7155555555555557e-06, - "loss": 1.5966, - "step": 52050 - }, - { - "epoch": 1.6659199999999998, - "grad_norm": 46.343746185302734, - "learning_rate": 3.712e-06, - "loss": 1.5958, - "step": 52060 - }, - { - "epoch": 1.6662400000000002, - "grad_norm": 44.707275390625, - "learning_rate": 3.7084444444444443e-06, - "loss": 1.5921, - "step": 52070 - }, - { - "epoch": 1.66656, - "grad_norm": 44.39040756225586, - "learning_rate": 3.704888888888889e-06, - "loss": 1.5704, - "step": 52080 - }, - { - "epoch": 1.66688, - "grad_norm": 45.53627395629883, - "learning_rate": 3.7013333333333334e-06, - "loss": 1.5879, - "step": 52090 - }, - { - "epoch": 1.6672, - "grad_norm": 46.279083251953125, - "learning_rate": 3.697777777777778e-06, - "loss": 1.6016, - "step": 52100 - }, - { - "epoch": 1.6675200000000001, - "grad_norm": 44.5371208190918, - "learning_rate": 3.6942222222222226e-06, - "loss": 1.5677, - "step": 52110 - }, - { - "epoch": 1.66784, - "grad_norm": 42.929622650146484, - "learning_rate": 3.6906666666666667e-06, - "loss": 1.5781, - "step": 52120 - }, - { - "epoch": 1.6681599999999999, - "grad_norm": 44.74525833129883, - "learning_rate": 3.6871111111111112e-06, - "loss": 1.5949, - "step": 52130 - }, - { - "epoch": 1.66848, - "grad_norm": 43.849388122558594, - "learning_rate": 3.6835555555555558e-06, - "loss": 1.5873, - "step": 52140 - }, - { - "epoch": 1.6688, - "grad_norm": 45.275665283203125, - "learning_rate": 3.6800000000000003e-06, - "loss": 1.5767, - "step": 52150 - }, - { - "epoch": 1.66912, - "grad_norm": 45.785057067871094, - "learning_rate": 3.676444444444445e-06, - "loss": 1.5831, - "step": 52160 - }, - { - "epoch": 1.66944, - "grad_norm": 46.84950256347656, - "learning_rate": 3.672888888888889e-06, - "loss": 1.5949, - "step": 52170 - }, - { - "epoch": 1.6697600000000001, - "grad_norm": 43.389034271240234, - "learning_rate": 3.6693333333333336e-06, - "loss": 1.5767, - "step": 52180 - }, - { - "epoch": 1.67008, - "grad_norm": 43.85615921020508, - "learning_rate": 3.665777777777778e-06, - "loss": 1.601, - "step": 52190 - }, - { - "epoch": 1.6703999999999999, - "grad_norm": 44.621917724609375, - "learning_rate": 3.6622222222222227e-06, - "loss": 1.5791, - "step": 52200 - }, - { - "epoch": 1.67072, - "grad_norm": 45.9107551574707, - "learning_rate": 3.6586666666666672e-06, - "loss": 1.5946, - "step": 52210 - }, - { - "epoch": 1.67104, - "grad_norm": 46.91019058227539, - "learning_rate": 3.6551111111111114e-06, - "loss": 1.5811, - "step": 52220 - }, - { - "epoch": 1.67136, - "grad_norm": 43.92897033691406, - "learning_rate": 3.651555555555556e-06, - "loss": 1.5966, - "step": 52230 - }, - { - "epoch": 1.67168, - "grad_norm": 48.71943283081055, - "learning_rate": 3.6480000000000005e-06, - "loss": 1.591, - "step": 52240 - }, - { - "epoch": 1.6720000000000002, - "grad_norm": 44.170555114746094, - "learning_rate": 3.644444444444445e-06, - "loss": 1.5861, - "step": 52250 - }, - { - "epoch": 1.67232, - "grad_norm": 44.75503158569336, - "learning_rate": 3.640888888888889e-06, - "loss": 1.5603, - "step": 52260 - }, - { - "epoch": 1.67264, - "grad_norm": 45.242271423339844, - "learning_rate": 3.6373333333333337e-06, - "loss": 1.585, - "step": 52270 - }, - { - "epoch": 1.67296, - "grad_norm": 45.12602233886719, - "learning_rate": 3.6337777777777783e-06, - "loss": 1.6057, - "step": 52280 - }, - { - "epoch": 1.67328, - "grad_norm": 45.389461517333984, - "learning_rate": 3.630222222222223e-06, - "loss": 1.5982, - "step": 52290 - }, - { - "epoch": 1.6736, - "grad_norm": 44.52558898925781, - "learning_rate": 3.6266666666666674e-06, - "loss": 1.5794, - "step": 52300 - }, - { - "epoch": 1.6739199999999999, - "grad_norm": 44.75640869140625, - "learning_rate": 3.623111111111111e-06, - "loss": 1.5652, - "step": 52310 - }, - { - "epoch": 1.67424, - "grad_norm": 46.04345703125, - "learning_rate": 3.6195555555555556e-06, - "loss": 1.5881, - "step": 52320 - }, - { - "epoch": 1.67456, - "grad_norm": 44.973365783691406, - "learning_rate": 3.616e-06, - "loss": 1.5792, - "step": 52330 - }, - { - "epoch": 1.67488, - "grad_norm": 47.327857971191406, - "learning_rate": 3.6124444444444443e-06, - "loss": 1.586, - "step": 52340 - }, - { - "epoch": 1.6752, - "grad_norm": 44.85529708862305, - "learning_rate": 3.608888888888889e-06, - "loss": 1.5715, - "step": 52350 - }, - { - "epoch": 1.6755200000000001, - "grad_norm": 44.864768981933594, - "learning_rate": 3.6053333333333334e-06, - "loss": 1.6091, - "step": 52360 - }, - { - "epoch": 1.67584, - "grad_norm": 46.082679748535156, - "learning_rate": 3.601777777777778e-06, - "loss": 1.5772, - "step": 52370 - }, - { - "epoch": 1.6761599999999999, - "grad_norm": 45.27592086791992, - "learning_rate": 3.5982222222222225e-06, - "loss": 1.6049, - "step": 52380 - }, - { - "epoch": 1.67648, - "grad_norm": 45.963401794433594, - "learning_rate": 3.5946666666666667e-06, - "loss": 1.6017, - "step": 52390 - }, - { - "epoch": 1.6768, - "grad_norm": 46.299800872802734, - "learning_rate": 3.5911111111111112e-06, - "loss": 1.5578, - "step": 52400 - }, - { - "epoch": 1.67712, - "grad_norm": 46.58485794067383, - "learning_rate": 3.5875555555555558e-06, - "loss": 1.5812, - "step": 52410 - }, - { - "epoch": 1.67744, - "grad_norm": 44.165306091308594, - "learning_rate": 3.5840000000000003e-06, - "loss": 1.5728, - "step": 52420 - }, - { - "epoch": 1.6777600000000001, - "grad_norm": 45.76871109008789, - "learning_rate": 3.580444444444445e-06, - "loss": 1.5943, - "step": 52430 - }, - { - "epoch": 1.67808, - "grad_norm": 46.34144592285156, - "learning_rate": 3.576888888888889e-06, - "loss": 1.6141, - "step": 52440 - }, - { - "epoch": 1.6784, - "grad_norm": 44.653202056884766, - "learning_rate": 3.5733333333333336e-06, - "loss": 1.5804, - "step": 52450 - }, - { - "epoch": 1.67872, - "grad_norm": 44.92979431152344, - "learning_rate": 3.569777777777778e-06, - "loss": 1.6015, - "step": 52460 - }, - { - "epoch": 1.67904, - "grad_norm": 47.18275451660156, - "learning_rate": 3.5662222222222227e-06, - "loss": 1.6068, - "step": 52470 - }, - { - "epoch": 1.67936, - "grad_norm": 44.27334976196289, - "learning_rate": 3.5626666666666672e-06, - "loss": 1.5736, - "step": 52480 - }, - { - "epoch": 1.6796799999999998, - "grad_norm": 44.15816116333008, - "learning_rate": 3.5591111111111114e-06, - "loss": 1.6013, - "step": 52490 - }, - { - "epoch": 1.6800000000000002, - "grad_norm": 46.573646545410156, - "learning_rate": 3.555555555555556e-06, - "loss": 1.6026, - "step": 52500 - }, - { - "epoch": 1.68032, - "grad_norm": 46.70231628417969, - "learning_rate": 3.5520000000000005e-06, - "loss": 1.6064, - "step": 52510 - }, - { - "epoch": 1.68064, - "grad_norm": 47.547264099121094, - "learning_rate": 3.548444444444445e-06, - "loss": 1.5905, - "step": 52520 - }, - { - "epoch": 1.68096, - "grad_norm": 46.42538070678711, - "learning_rate": 3.5448888888888896e-06, - "loss": 1.5743, - "step": 52530 - }, - { - "epoch": 1.68128, - "grad_norm": 47.20460891723633, - "learning_rate": 3.5413333333333337e-06, - "loss": 1.6122, - "step": 52540 - }, - { - "epoch": 1.6816, - "grad_norm": 46.976173400878906, - "learning_rate": 3.5377777777777783e-06, - "loss": 1.6072, - "step": 52550 - }, - { - "epoch": 1.6819199999999999, - "grad_norm": 45.38555145263672, - "learning_rate": 3.534222222222223e-06, - "loss": 1.6041, - "step": 52560 - }, - { - "epoch": 1.68224, - "grad_norm": 44.77622985839844, - "learning_rate": 3.5306666666666665e-06, - "loss": 1.5783, - "step": 52570 - }, - { - "epoch": 1.68256, - "grad_norm": 46.87044906616211, - "learning_rate": 3.527111111111111e-06, - "loss": 1.5894, - "step": 52580 - }, - { - "epoch": 1.68288, - "grad_norm": 42.50729751586914, - "learning_rate": 3.5235555555555556e-06, - "loss": 1.5774, - "step": 52590 - }, - { - "epoch": 1.6832, - "grad_norm": 45.89781951904297, - "learning_rate": 3.52e-06, - "loss": 1.5917, - "step": 52600 - }, - { - "epoch": 1.6835200000000001, - "grad_norm": 45.55253601074219, - "learning_rate": 3.5164444444444447e-06, - "loss": 1.6051, - "step": 52610 - }, - { - "epoch": 1.68384, - "grad_norm": 45.637691497802734, - "learning_rate": 3.512888888888889e-06, - "loss": 1.5723, - "step": 52620 - }, - { - "epoch": 1.6841599999999999, - "grad_norm": 46.54056167602539, - "learning_rate": 3.5093333333333334e-06, - "loss": 1.6018, - "step": 52630 - }, - { - "epoch": 1.68448, - "grad_norm": 45.923065185546875, - "learning_rate": 3.505777777777778e-06, - "loss": 1.5924, - "step": 52640 - }, - { - "epoch": 1.6848, - "grad_norm": 45.01921844482422, - "learning_rate": 3.5022222222222225e-06, - "loss": 1.6038, - "step": 52650 - }, - { - "epoch": 1.68512, - "grad_norm": 46.71082305908203, - "learning_rate": 3.498666666666667e-06, - "loss": 1.6226, - "step": 52660 - }, - { - "epoch": 1.68544, - "grad_norm": 43.84824752807617, - "learning_rate": 3.495111111111111e-06, - "loss": 1.5696, - "step": 52670 - }, - { - "epoch": 1.6857600000000001, - "grad_norm": 47.342376708984375, - "learning_rate": 3.4915555555555558e-06, - "loss": 1.5872, - "step": 52680 - }, - { - "epoch": 1.68608, - "grad_norm": 43.14659118652344, - "learning_rate": 3.4880000000000003e-06, - "loss": 1.5725, - "step": 52690 - }, - { - "epoch": 1.6864, - "grad_norm": 47.94723129272461, - "learning_rate": 3.484444444444445e-06, - "loss": 1.576, - "step": 52700 - }, - { - "epoch": 1.68672, - "grad_norm": 48.267154693603516, - "learning_rate": 3.480888888888889e-06, - "loss": 1.611, - "step": 52710 - }, - { - "epoch": 1.68704, - "grad_norm": 46.32544708251953, - "learning_rate": 3.4773333333333336e-06, - "loss": 1.5865, - "step": 52720 - }, - { - "epoch": 1.68736, - "grad_norm": 46.21999740600586, - "learning_rate": 3.473777777777778e-06, - "loss": 1.6158, - "step": 52730 - }, - { - "epoch": 1.6876799999999998, - "grad_norm": 46.073204040527344, - "learning_rate": 3.4702222222222227e-06, - "loss": 1.6152, - "step": 52740 - }, - { - "epoch": 1.688, - "grad_norm": 47.04712677001953, - "learning_rate": 3.4666666666666672e-06, - "loss": 1.6032, - "step": 52750 - }, - { - "epoch": 1.68832, - "grad_norm": 46.21564483642578, - "learning_rate": 3.4631111111111113e-06, - "loss": 1.589, - "step": 52760 - }, - { - "epoch": 1.68864, - "grad_norm": 44.94911193847656, - "learning_rate": 3.459555555555556e-06, - "loss": 1.5702, - "step": 52770 - }, - { - "epoch": 1.68896, - "grad_norm": 46.054893493652344, - "learning_rate": 3.4560000000000005e-06, - "loss": 1.5829, - "step": 52780 - }, - { - "epoch": 1.6892800000000001, - "grad_norm": 45.82691955566406, - "learning_rate": 3.452444444444445e-06, - "loss": 1.6029, - "step": 52790 - }, - { - "epoch": 1.6896, - "grad_norm": 45.559146881103516, - "learning_rate": 3.4488888888888896e-06, - "loss": 1.5699, - "step": 52800 - }, - { - "epoch": 1.6899199999999999, - "grad_norm": 44.324974060058594, - "learning_rate": 3.4453333333333337e-06, - "loss": 1.5822, - "step": 52810 - }, - { - "epoch": 1.69024, - "grad_norm": 49.79753875732422, - "learning_rate": 3.4417777777777782e-06, - "loss": 1.591, - "step": 52820 - }, - { - "epoch": 1.69056, - "grad_norm": 46.32487487792969, - "learning_rate": 3.4382222222222224e-06, - "loss": 1.5929, - "step": 52830 - }, - { - "epoch": 1.69088, - "grad_norm": 47.12722396850586, - "learning_rate": 3.4346666666666665e-06, - "loss": 1.5979, - "step": 52840 - }, - { - "epoch": 1.6912, - "grad_norm": 45.32570266723633, - "learning_rate": 3.431111111111111e-06, - "loss": 1.5673, - "step": 52850 - }, - { - "epoch": 1.6915200000000001, - "grad_norm": 46.18455505371094, - "learning_rate": 3.4275555555555556e-06, - "loss": 1.6429, - "step": 52860 - }, - { - "epoch": 1.69184, - "grad_norm": 45.9107780456543, - "learning_rate": 3.424e-06, - "loss": 1.6182, - "step": 52870 - }, - { - "epoch": 1.6921599999999999, - "grad_norm": 45.0168342590332, - "learning_rate": 3.4204444444444447e-06, - "loss": 1.5625, - "step": 52880 - }, - { - "epoch": 1.69248, - "grad_norm": 45.60793685913086, - "learning_rate": 3.416888888888889e-06, - "loss": 1.5905, - "step": 52890 - }, - { - "epoch": 1.6928, - "grad_norm": 45.69491195678711, - "learning_rate": 3.4133333333333334e-06, - "loss": 1.5774, - "step": 52900 - }, - { - "epoch": 1.69312, - "grad_norm": 45.09488296508789, - "learning_rate": 3.409777777777778e-06, - "loss": 1.5902, - "step": 52910 - }, - { - "epoch": 1.6934399999999998, - "grad_norm": 47.69397735595703, - "learning_rate": 3.4062222222222225e-06, - "loss": 1.5888, - "step": 52920 - }, - { - "epoch": 1.6937600000000002, - "grad_norm": 46.744075775146484, - "learning_rate": 3.402666666666667e-06, - "loss": 1.577, - "step": 52930 - }, - { - "epoch": 1.69408, - "grad_norm": 45.290775299072266, - "learning_rate": 3.399111111111111e-06, - "loss": 1.5799, - "step": 52940 - }, - { - "epoch": 1.6944, - "grad_norm": 47.012481689453125, - "learning_rate": 3.3955555555555558e-06, - "loss": 1.6094, - "step": 52950 - }, - { - "epoch": 1.69472, - "grad_norm": 47.2081298828125, - "learning_rate": 3.3920000000000003e-06, - "loss": 1.572, - "step": 52960 - }, - { - "epoch": 1.69504, - "grad_norm": 47.48590850830078, - "learning_rate": 3.388444444444445e-06, - "loss": 1.6143, - "step": 52970 - }, - { - "epoch": 1.69536, - "grad_norm": 47.304962158203125, - "learning_rate": 3.3848888888888894e-06, - "loss": 1.5967, - "step": 52980 - }, - { - "epoch": 1.6956799999999999, - "grad_norm": 46.138160705566406, - "learning_rate": 3.3813333333333335e-06, - "loss": 1.623, - "step": 52990 - }, - { - "epoch": 1.696, - "grad_norm": 45.87535095214844, - "learning_rate": 3.377777777777778e-06, - "loss": 1.5781, - "step": 53000 - }, - { - "epoch": 1.69632, - "grad_norm": 45.48112106323242, - "learning_rate": 3.3742222222222226e-06, - "loss": 1.575, - "step": 53010 - }, - { - "epoch": 1.69664, - "grad_norm": 45.52760314941406, - "learning_rate": 3.370666666666667e-06, - "loss": 1.5674, - "step": 53020 - }, - { - "epoch": 1.69696, - "grad_norm": 45.82063674926758, - "learning_rate": 3.3671111111111118e-06, - "loss": 1.6147, - "step": 53030 - }, - { - "epoch": 1.6972800000000001, - "grad_norm": 46.35969924926758, - "learning_rate": 3.363555555555556e-06, - "loss": 1.5475, - "step": 53040 - }, - { - "epoch": 1.6976, - "grad_norm": 45.41106033325195, - "learning_rate": 3.3600000000000004e-06, - "loss": 1.6013, - "step": 53050 - }, - { - "epoch": 1.6979199999999999, - "grad_norm": 44.992618560791016, - "learning_rate": 3.356444444444445e-06, - "loss": 1.5914, - "step": 53060 - }, - { - "epoch": 1.69824, - "grad_norm": 48.927650451660156, - "learning_rate": 3.3528888888888895e-06, - "loss": 1.5979, - "step": 53070 - }, - { - "epoch": 1.69856, - "grad_norm": 45.46303176879883, - "learning_rate": 3.3493333333333337e-06, - "loss": 1.5867, - "step": 53080 - }, - { - "epoch": 1.69888, - "grad_norm": 46.1902961730957, - "learning_rate": 3.345777777777778e-06, - "loss": 1.6074, - "step": 53090 - }, - { - "epoch": 1.6992, - "grad_norm": 44.09128952026367, - "learning_rate": 3.3422222222222224e-06, - "loss": 1.6105, - "step": 53100 - }, - { - "epoch": 1.6995200000000001, - "grad_norm": 44.388973236083984, - "learning_rate": 3.338666666666667e-06, - "loss": 1.5736, - "step": 53110 - }, - { - "epoch": 1.69984, - "grad_norm": 44.886451721191406, - "learning_rate": 3.335111111111111e-06, - "loss": 1.6158, - "step": 53120 - }, - { - "epoch": 1.70016, - "grad_norm": 45.723670959472656, - "learning_rate": 3.3315555555555556e-06, - "loss": 1.599, - "step": 53130 - }, - { - "epoch": 1.70048, - "grad_norm": 47.3483772277832, - "learning_rate": 3.328e-06, - "loss": 1.613, - "step": 53140 - }, - { - "epoch": 1.7008, - "grad_norm": 45.45103073120117, - "learning_rate": 3.3244444444444447e-06, - "loss": 1.5638, - "step": 53150 - }, - { - "epoch": 1.70112, - "grad_norm": 45.69742202758789, - "learning_rate": 3.3208888888888893e-06, - "loss": 1.5924, - "step": 53160 - }, - { - "epoch": 1.7014399999999998, - "grad_norm": 45.6076545715332, - "learning_rate": 3.3173333333333334e-06, - "loss": 1.601, - "step": 53170 - }, - { - "epoch": 1.7017600000000002, - "grad_norm": 44.57272720336914, - "learning_rate": 3.313777777777778e-06, - "loss": 1.6027, - "step": 53180 - }, - { - "epoch": 1.70208, - "grad_norm": 46.239906311035156, - "learning_rate": 3.3102222222222225e-06, - "loss": 1.5891, - "step": 53190 - }, - { - "epoch": 1.7024, - "grad_norm": 45.24344253540039, - "learning_rate": 3.306666666666667e-06, - "loss": 1.569, - "step": 53200 - }, - { - "epoch": 1.70272, - "grad_norm": 44.35649871826172, - "learning_rate": 3.303111111111111e-06, - "loss": 1.5861, - "step": 53210 - }, - { - "epoch": 1.70304, - "grad_norm": 46.33483123779297, - "learning_rate": 3.2995555555555557e-06, - "loss": 1.5841, - "step": 53220 - }, - { - "epoch": 1.70336, - "grad_norm": 45.243778228759766, - "learning_rate": 3.2960000000000003e-06, - "loss": 1.5776, - "step": 53230 - }, - { - "epoch": 1.7036799999999999, - "grad_norm": 48.42351531982422, - "learning_rate": 3.292444444444445e-06, - "loss": 1.5709, - "step": 53240 - }, - { - "epoch": 1.704, - "grad_norm": 46.71955871582031, - "learning_rate": 3.2888888888888894e-06, - "loss": 1.6112, - "step": 53250 - }, - { - "epoch": 1.70432, - "grad_norm": 43.32274627685547, - "learning_rate": 3.2853333333333335e-06, - "loss": 1.553, - "step": 53260 - }, - { - "epoch": 1.70464, - "grad_norm": 47.185638427734375, - "learning_rate": 3.281777777777778e-06, - "loss": 1.6212, - "step": 53270 - }, - { - "epoch": 1.70496, - "grad_norm": 45.04874038696289, - "learning_rate": 3.2782222222222226e-06, - "loss": 1.5856, - "step": 53280 - }, - { - "epoch": 1.7052800000000001, - "grad_norm": 48.05552291870117, - "learning_rate": 3.274666666666667e-06, - "loss": 1.6068, - "step": 53290 - }, - { - "epoch": 1.7056, - "grad_norm": 45.691795349121094, - "learning_rate": 3.2711111111111117e-06, - "loss": 1.5867, - "step": 53300 - }, - { - "epoch": 1.7059199999999999, - "grad_norm": 46.98811721801758, - "learning_rate": 3.267555555555556e-06, - "loss": 1.6024, - "step": 53310 - }, - { - "epoch": 1.70624, - "grad_norm": 46.40603256225586, - "learning_rate": 3.2640000000000004e-06, - "loss": 1.5938, - "step": 53320 - }, - { - "epoch": 1.70656, - "grad_norm": 46.9987678527832, - "learning_rate": 3.260444444444445e-06, - "loss": 1.5696, - "step": 53330 - }, - { - "epoch": 1.70688, - "grad_norm": 45.82811737060547, - "learning_rate": 3.2568888888888895e-06, - "loss": 1.5863, - "step": 53340 - }, - { - "epoch": 1.7072, - "grad_norm": 44.975677490234375, - "learning_rate": 3.2533333333333332e-06, - "loss": 1.5977, - "step": 53350 - }, - { - "epoch": 1.7075200000000001, - "grad_norm": 45.661842346191406, - "learning_rate": 3.249777777777778e-06, - "loss": 1.5828, - "step": 53360 - }, - { - "epoch": 1.70784, - "grad_norm": 47.03694534301758, - "learning_rate": 3.2462222222222224e-06, - "loss": 1.6006, - "step": 53370 - }, - { - "epoch": 1.70816, - "grad_norm": 48.213199615478516, - "learning_rate": 3.242666666666667e-06, - "loss": 1.6157, - "step": 53380 - }, - { - "epoch": 1.70848, - "grad_norm": 48.341346740722656, - "learning_rate": 3.239111111111111e-06, - "loss": 1.6048, - "step": 53390 - }, - { - "epoch": 1.7088, - "grad_norm": 44.660884857177734, - "learning_rate": 3.2355555555555556e-06, - "loss": 1.5845, - "step": 53400 - }, - { - "epoch": 1.70912, - "grad_norm": 47.39234924316406, - "learning_rate": 3.232e-06, - "loss": 1.5914, - "step": 53410 - }, - { - "epoch": 1.7094399999999998, - "grad_norm": 45.54050064086914, - "learning_rate": 3.2284444444444447e-06, - "loss": 1.5943, - "step": 53420 - }, - { - "epoch": 1.70976, - "grad_norm": 46.718017578125, - "learning_rate": 3.2248888888888892e-06, - "loss": 1.5672, - "step": 53430 - }, - { - "epoch": 1.71008, - "grad_norm": 45.91179656982422, - "learning_rate": 3.2213333333333334e-06, - "loss": 1.5901, - "step": 53440 - }, - { - "epoch": 1.7104, - "grad_norm": 46.24087905883789, - "learning_rate": 3.217777777777778e-06, - "loss": 1.6141, - "step": 53450 - }, - { - "epoch": 1.71072, - "grad_norm": 43.00766372680664, - "learning_rate": 3.2142222222222225e-06, - "loss": 1.6029, - "step": 53460 - }, - { - "epoch": 1.7110400000000001, - "grad_norm": 44.39076232910156, - "learning_rate": 3.210666666666667e-06, - "loss": 1.546, - "step": 53470 - }, - { - "epoch": 1.71136, - "grad_norm": 46.70647048950195, - "learning_rate": 3.2071111111111116e-06, - "loss": 1.5961, - "step": 53480 - }, - { - "epoch": 1.7116799999999999, - "grad_norm": 48.059104919433594, - "learning_rate": 3.2035555555555557e-06, - "loss": 1.6026, - "step": 53490 - }, - { - "epoch": 1.712, - "grad_norm": 44.19499206542969, - "learning_rate": 3.2000000000000003e-06, - "loss": 1.5858, - "step": 53500 - }, - { - "epoch": 1.71232, - "grad_norm": 46.01230239868164, - "learning_rate": 3.196444444444445e-06, - "loss": 1.5919, - "step": 53510 - }, - { - "epoch": 1.71264, - "grad_norm": 45.61040496826172, - "learning_rate": 3.1928888888888894e-06, - "loss": 1.5565, - "step": 53520 - }, - { - "epoch": 1.71296, - "grad_norm": 46.07856369018555, - "learning_rate": 3.1893333333333335e-06, - "loss": 1.5847, - "step": 53530 - }, - { - "epoch": 1.7132800000000001, - "grad_norm": 43.263511657714844, - "learning_rate": 3.185777777777778e-06, - "loss": 1.5748, - "step": 53540 - }, - { - "epoch": 1.7136, - "grad_norm": 47.8712272644043, - "learning_rate": 3.1822222222222226e-06, - "loss": 1.6012, - "step": 53550 - }, - { - "epoch": 1.7139199999999999, - "grad_norm": 46.34992218017578, - "learning_rate": 3.178666666666667e-06, - "loss": 1.6002, - "step": 53560 - }, - { - "epoch": 1.71424, - "grad_norm": 43.75440979003906, - "learning_rate": 3.1751111111111117e-06, - "loss": 1.5925, - "step": 53570 - }, - { - "epoch": 1.71456, - "grad_norm": 44.676578521728516, - "learning_rate": 3.171555555555556e-06, - "loss": 1.5746, - "step": 53580 - }, - { - "epoch": 1.71488, - "grad_norm": 45.60486602783203, - "learning_rate": 3.1680000000000004e-06, - "loss": 1.5715, - "step": 53590 - }, - { - "epoch": 1.7151999999999998, - "grad_norm": 44.54808044433594, - "learning_rate": 3.1644444444444445e-06, - "loss": 1.5822, - "step": 53600 - }, - { - "epoch": 1.7155200000000002, - "grad_norm": 46.047264099121094, - "learning_rate": 3.160888888888889e-06, - "loss": 1.6015, - "step": 53610 - }, - { - "epoch": 1.71584, - "grad_norm": 45.982940673828125, - "learning_rate": 3.1573333333333332e-06, - "loss": 1.5928, - "step": 53620 - }, - { - "epoch": 1.71616, - "grad_norm": 44.72659683227539, - "learning_rate": 3.1537777777777778e-06, - "loss": 1.5941, - "step": 53630 - }, - { - "epoch": 1.71648, - "grad_norm": 46.68376541137695, - "learning_rate": 3.1502222222222223e-06, - "loss": 1.585, - "step": 53640 - }, - { - "epoch": 1.7168, - "grad_norm": 46.007652282714844, - "learning_rate": 3.146666666666667e-06, - "loss": 1.6095, - "step": 53650 - }, - { - "epoch": 1.71712, - "grad_norm": 48.402584075927734, - "learning_rate": 3.143111111111111e-06, - "loss": 1.5706, - "step": 53660 - }, - { - "epoch": 1.7174399999999999, - "grad_norm": 46.359947204589844, - "learning_rate": 3.1395555555555556e-06, - "loss": 1.5995, - "step": 53670 - }, - { - "epoch": 1.71776, - "grad_norm": 45.970184326171875, - "learning_rate": 3.136e-06, - "loss": 1.5891, - "step": 53680 - }, - { - "epoch": 1.71808, - "grad_norm": 46.32889175415039, - "learning_rate": 3.1324444444444447e-06, - "loss": 1.593, - "step": 53690 - }, - { - "epoch": 1.7184, - "grad_norm": 44.1278190612793, - "learning_rate": 3.1288888888888892e-06, - "loss": 1.6027, - "step": 53700 - }, - { - "epoch": 1.71872, - "grad_norm": 46.170387268066406, - "learning_rate": 3.1253333333333334e-06, - "loss": 1.6016, - "step": 53710 - }, - { - "epoch": 1.7190400000000001, - "grad_norm": 48.95341491699219, - "learning_rate": 3.121777777777778e-06, - "loss": 1.5959, - "step": 53720 - }, - { - "epoch": 1.71936, - "grad_norm": 46.69742965698242, - "learning_rate": 3.1182222222222225e-06, - "loss": 1.58, - "step": 53730 - }, - { - "epoch": 1.7196799999999999, - "grad_norm": 44.949180603027344, - "learning_rate": 3.114666666666667e-06, - "loss": 1.5845, - "step": 53740 - }, - { - "epoch": 1.72, - "grad_norm": 47.0362434387207, - "learning_rate": 3.1111111111111116e-06, - "loss": 1.6146, - "step": 53750 - }, - { - "epoch": 1.72032, - "grad_norm": 47.64339828491211, - "learning_rate": 3.1075555555555557e-06, - "loss": 1.6238, - "step": 53760 - }, - { - "epoch": 1.72064, - "grad_norm": 47.11689758300781, - "learning_rate": 3.1040000000000003e-06, - "loss": 1.5878, - "step": 53770 - }, - { - "epoch": 1.72096, - "grad_norm": 46.65193557739258, - "learning_rate": 3.100444444444445e-06, - "loss": 1.5894, - "step": 53780 - }, - { - "epoch": 1.7212800000000001, - "grad_norm": 47.493778228759766, - "learning_rate": 3.0968888888888894e-06, - "loss": 1.5807, - "step": 53790 - }, - { - "epoch": 1.7216, - "grad_norm": 45.632720947265625, - "learning_rate": 3.093333333333334e-06, - "loss": 1.6096, - "step": 53800 - }, - { - "epoch": 1.72192, - "grad_norm": 46.928646087646484, - "learning_rate": 3.089777777777778e-06, - "loss": 1.5793, - "step": 53810 - }, - { - "epoch": 1.72224, - "grad_norm": 48.068153381347656, - "learning_rate": 3.0862222222222226e-06, - "loss": 1.6036, - "step": 53820 - }, - { - "epoch": 1.72256, - "grad_norm": 45.10832977294922, - "learning_rate": 3.082666666666667e-06, - "loss": 1.5993, - "step": 53830 - }, - { - "epoch": 1.72288, - "grad_norm": 47.08902359008789, - "learning_rate": 3.0791111111111117e-06, - "loss": 1.5951, - "step": 53840 - }, - { - "epoch": 1.7231999999999998, - "grad_norm": 46.66806411743164, - "learning_rate": 3.0755555555555563e-06, - "loss": 1.5956, - "step": 53850 - }, - { - "epoch": 1.7235200000000002, - "grad_norm": 44.86949157714844, - "learning_rate": 3.072e-06, - "loss": 1.5838, - "step": 53860 - }, - { - "epoch": 1.72384, - "grad_norm": 44.62445831298828, - "learning_rate": 3.0684444444444445e-06, - "loss": 1.6053, - "step": 53870 - }, - { - "epoch": 1.72416, - "grad_norm": 58.58441162109375, - "learning_rate": 3.064888888888889e-06, - "loss": 1.6101, - "step": 53880 - }, - { - "epoch": 1.72448, - "grad_norm": 47.462833404541016, - "learning_rate": 3.0613333333333332e-06, - "loss": 1.5816, - "step": 53890 - }, - { - "epoch": 1.7248, - "grad_norm": 44.10990905761719, - "learning_rate": 3.0577777777777778e-06, - "loss": 1.5876, - "step": 53900 - }, - { - "epoch": 1.72512, - "grad_norm": 46.68988037109375, - "learning_rate": 3.0542222222222223e-06, - "loss": 1.5852, - "step": 53910 - }, - { - "epoch": 1.7254399999999999, - "grad_norm": 45.82401657104492, - "learning_rate": 3.050666666666667e-06, - "loss": 1.5877, - "step": 53920 - }, - { - "epoch": 1.72576, - "grad_norm": 45.19087219238281, - "learning_rate": 3.0471111111111114e-06, - "loss": 1.5625, - "step": 53930 - }, - { - "epoch": 1.72608, - "grad_norm": 47.10851287841797, - "learning_rate": 3.0435555555555556e-06, - "loss": 1.5695, - "step": 53940 - }, - { - "epoch": 1.7264, - "grad_norm": 46.2303581237793, - "learning_rate": 3.04e-06, - "loss": 1.5625, - "step": 53950 - }, - { - "epoch": 1.72672, - "grad_norm": 45.25043487548828, - "learning_rate": 3.0364444444444447e-06, - "loss": 1.5951, - "step": 53960 - }, - { - "epoch": 1.7270400000000001, - "grad_norm": 46.983970642089844, - "learning_rate": 3.0328888888888892e-06, - "loss": 1.5889, - "step": 53970 - }, - { - "epoch": 1.72736, - "grad_norm": 45.9723014831543, - "learning_rate": 3.0293333333333338e-06, - "loss": 1.5972, - "step": 53980 - }, - { - "epoch": 1.7276799999999999, - "grad_norm": 46.250850677490234, - "learning_rate": 3.025777777777778e-06, - "loss": 1.5962, - "step": 53990 - }, - { - "epoch": 1.728, - "grad_norm": 45.149532318115234, - "learning_rate": 3.0222222222222225e-06, - "loss": 1.5752, - "step": 54000 - }, - { - "epoch": 1.72832, - "grad_norm": 44.299720764160156, - "learning_rate": 3.018666666666667e-06, - "loss": 1.5883, - "step": 54010 - }, - { - "epoch": 1.72864, - "grad_norm": 45.48408508300781, - "learning_rate": 3.0151111111111116e-06, - "loss": 1.5829, - "step": 54020 - }, - { - "epoch": 1.72896, - "grad_norm": 45.040367126464844, - "learning_rate": 3.0115555555555557e-06, - "loss": 1.5962, - "step": 54030 - }, - { - "epoch": 1.7292800000000002, - "grad_norm": 46.36260986328125, - "learning_rate": 3.0080000000000003e-06, - "loss": 1.5923, - "step": 54040 - }, - { - "epoch": 1.7296, - "grad_norm": 44.90981674194336, - "learning_rate": 3.004444444444445e-06, - "loss": 1.6213, - "step": 54050 - }, - { - "epoch": 1.72992, - "grad_norm": 47.245059967041016, - "learning_rate": 3.0008888888888894e-06, - "loss": 1.578, - "step": 54060 - }, - { - "epoch": 1.73024, - "grad_norm": 44.638729095458984, - "learning_rate": 2.997333333333334e-06, - "loss": 1.5771, - "step": 54070 - }, - { - "epoch": 1.73056, - "grad_norm": 45.962371826171875, - "learning_rate": 2.993777777777778e-06, - "loss": 1.5925, - "step": 54080 - }, - { - "epoch": 1.73088, - "grad_norm": 47.59477996826172, - "learning_rate": 2.9902222222222226e-06, - "loss": 1.6139, - "step": 54090 - }, - { - "epoch": 1.7311999999999999, - "grad_norm": 44.2559928894043, - "learning_rate": 2.986666666666667e-06, - "loss": 1.5937, - "step": 54100 - }, - { - "epoch": 1.73152, - "grad_norm": 45.09414291381836, - "learning_rate": 2.9831111111111117e-06, - "loss": 1.5887, - "step": 54110 - }, - { - "epoch": 1.73184, - "grad_norm": 45.167659759521484, - "learning_rate": 2.9795555555555554e-06, - "loss": 1.5933, - "step": 54120 - }, - { - "epoch": 1.73216, - "grad_norm": 46.760459899902344, - "learning_rate": 2.976e-06, - "loss": 1.5563, - "step": 54130 - }, - { - "epoch": 1.73248, - "grad_norm": 46.089717864990234, - "learning_rate": 2.9724444444444445e-06, - "loss": 1.6039, - "step": 54140 - }, - { - "epoch": 1.7328000000000001, - "grad_norm": 44.89067077636719, - "learning_rate": 2.968888888888889e-06, - "loss": 1.598, - "step": 54150 - }, - { - "epoch": 1.73312, - "grad_norm": 46.289737701416016, - "learning_rate": 2.965333333333333e-06, - "loss": 1.5787, - "step": 54160 - }, - { - "epoch": 1.7334399999999999, - "grad_norm": 54.458438873291016, - "learning_rate": 2.9617777777777778e-06, - "loss": 1.6064, - "step": 54170 - }, - { - "epoch": 1.73376, - "grad_norm": 46.293182373046875, - "learning_rate": 2.9582222222222223e-06, - "loss": 1.5778, - "step": 54180 - }, - { - "epoch": 1.73408, - "grad_norm": 44.743141174316406, - "learning_rate": 2.954666666666667e-06, - "loss": 1.5935, - "step": 54190 - }, - { - "epoch": 1.7344, - "grad_norm": 45.82085418701172, - "learning_rate": 2.9511111111111114e-06, - "loss": 1.565, - "step": 54200 - }, - { - "epoch": 1.73472, - "grad_norm": 45.540897369384766, - "learning_rate": 2.9475555555555556e-06, - "loss": 1.575, - "step": 54210 - }, - { - "epoch": 1.7350400000000001, - "grad_norm": 43.974327087402344, - "learning_rate": 2.944e-06, - "loss": 1.5982, - "step": 54220 - }, - { - "epoch": 1.73536, - "grad_norm": 45.236446380615234, - "learning_rate": 2.9404444444444447e-06, - "loss": 1.5448, - "step": 54230 - }, - { - "epoch": 1.73568, - "grad_norm": 45.35393142700195, - "learning_rate": 2.936888888888889e-06, - "loss": 1.5719, - "step": 54240 - }, - { - "epoch": 1.736, - "grad_norm": 45.527549743652344, - "learning_rate": 2.9333333333333338e-06, - "loss": 1.6116, - "step": 54250 - }, - { - "epoch": 1.73632, - "grad_norm": 46.48612594604492, - "learning_rate": 2.929777777777778e-06, - "loss": 1.5727, - "step": 54260 - }, - { - "epoch": 1.73664, - "grad_norm": 46.852294921875, - "learning_rate": 2.9262222222222224e-06, - "loss": 1.6184, - "step": 54270 - }, - { - "epoch": 1.7369599999999998, - "grad_norm": 45.39048767089844, - "learning_rate": 2.922666666666667e-06, - "loss": 1.5797, - "step": 54280 - }, - { - "epoch": 1.7372800000000002, - "grad_norm": 46.60129928588867, - "learning_rate": 2.9191111111111116e-06, - "loss": 1.615, - "step": 54290 - }, - { - "epoch": 1.7376, - "grad_norm": 45.626441955566406, - "learning_rate": 2.915555555555556e-06, - "loss": 1.5825, - "step": 54300 - }, - { - "epoch": 1.73792, - "grad_norm": 48.38727569580078, - "learning_rate": 2.9120000000000002e-06, - "loss": 1.6134, - "step": 54310 - }, - { - "epoch": 1.73824, - "grad_norm": 45.35850143432617, - "learning_rate": 2.908444444444445e-06, - "loss": 1.5666, - "step": 54320 - }, - { - "epoch": 1.73856, - "grad_norm": 45.78057861328125, - "learning_rate": 2.9048888888888893e-06, - "loss": 1.5809, - "step": 54330 - }, - { - "epoch": 1.73888, - "grad_norm": 45.44707489013672, - "learning_rate": 2.901333333333334e-06, - "loss": 1.5908, - "step": 54340 - }, - { - "epoch": 1.7391999999999999, - "grad_norm": 44.71982955932617, - "learning_rate": 2.8977777777777785e-06, - "loss": 1.5843, - "step": 54350 - }, - { - "epoch": 1.73952, - "grad_norm": 45.987815856933594, - "learning_rate": 2.8942222222222226e-06, - "loss": 1.585, - "step": 54360 - }, - { - "epoch": 1.73984, - "grad_norm": 47.58973693847656, - "learning_rate": 2.890666666666667e-06, - "loss": 1.5962, - "step": 54370 - }, - { - "epoch": 1.74016, - "grad_norm": 47.01103973388672, - "learning_rate": 2.8871111111111113e-06, - "loss": 1.5784, - "step": 54380 - }, - { - "epoch": 1.74048, - "grad_norm": 45.42546463012695, - "learning_rate": 2.8835555555555554e-06, - "loss": 1.6064, - "step": 54390 - }, - { - "epoch": 1.7408000000000001, - "grad_norm": 45.88031768798828, - "learning_rate": 2.88e-06, - "loss": 1.5828, - "step": 54400 - }, - { - "epoch": 1.74112, - "grad_norm": 43.90048599243164, - "learning_rate": 2.8764444444444445e-06, - "loss": 1.5748, - "step": 54410 - }, - { - "epoch": 1.7414399999999999, - "grad_norm": 45.916751861572266, - "learning_rate": 2.872888888888889e-06, - "loss": 1.6149, - "step": 54420 - }, - { - "epoch": 1.74176, - "grad_norm": 44.58837890625, - "learning_rate": 2.8693333333333336e-06, - "loss": 1.6083, - "step": 54430 - }, - { - "epoch": 1.74208, - "grad_norm": 45.08499526977539, - "learning_rate": 2.8657777777777777e-06, - "loss": 1.5823, - "step": 54440 - }, - { - "epoch": 1.7424, - "grad_norm": 45.388545989990234, - "learning_rate": 2.8622222222222223e-06, - "loss": 1.5837, - "step": 54450 - }, - { - "epoch": 1.74272, - "grad_norm": 45.021602630615234, - "learning_rate": 2.858666666666667e-06, - "loss": 1.5933, - "step": 54460 - }, - { - "epoch": 1.7430400000000001, - "grad_norm": 45.340152740478516, - "learning_rate": 2.8551111111111114e-06, - "loss": 1.6089, - "step": 54470 - }, - { - "epoch": 1.74336, - "grad_norm": 48.71046829223633, - "learning_rate": 2.8515555555555555e-06, - "loss": 1.5735, - "step": 54480 - }, - { - "epoch": 1.74368, - "grad_norm": 46.213844299316406, - "learning_rate": 2.848e-06, - "loss": 1.5872, - "step": 54490 - }, - { - "epoch": 1.744, - "grad_norm": 44.88610076904297, - "learning_rate": 2.8444444444444446e-06, - "loss": 1.5619, - "step": 54500 - }, - { - "epoch": 1.74432, - "grad_norm": 48.63814163208008, - "learning_rate": 2.840888888888889e-06, - "loss": 1.5965, - "step": 54510 - }, - { - "epoch": 1.74464, - "grad_norm": 45.273155212402344, - "learning_rate": 2.8373333333333338e-06, - "loss": 1.587, - "step": 54520 - }, - { - "epoch": 1.7449599999999998, - "grad_norm": 47.4951057434082, - "learning_rate": 2.833777777777778e-06, - "loss": 1.5887, - "step": 54530 - }, - { - "epoch": 1.7452800000000002, - "grad_norm": 46.31680679321289, - "learning_rate": 2.8302222222222224e-06, - "loss": 1.6039, - "step": 54540 - }, - { - "epoch": 1.7456, - "grad_norm": 45.749420166015625, - "learning_rate": 2.826666666666667e-06, - "loss": 1.5674, - "step": 54550 - }, - { - "epoch": 1.74592, - "grad_norm": 47.789306640625, - "learning_rate": 2.8231111111111115e-06, - "loss": 1.5951, - "step": 54560 - }, - { - "epoch": 1.74624, - "grad_norm": 48.388572692871094, - "learning_rate": 2.819555555555556e-06, - "loss": 1.605, - "step": 54570 - }, - { - "epoch": 1.7465600000000001, - "grad_norm": 46.346153259277344, - "learning_rate": 2.8160000000000002e-06, - "loss": 1.5703, - "step": 54580 - }, - { - "epoch": 1.74688, - "grad_norm": 44.61640548706055, - "learning_rate": 2.8124444444444448e-06, - "loss": 1.6008, - "step": 54590 - }, - { - "epoch": 1.7471999999999999, - "grad_norm": 45.16740417480469, - "learning_rate": 2.8088888888888893e-06, - "loss": 1.5427, - "step": 54600 - }, - { - "epoch": 1.74752, - "grad_norm": 47.40975570678711, - "learning_rate": 2.805333333333334e-06, - "loss": 1.5796, - "step": 54610 - }, - { - "epoch": 1.74784, - "grad_norm": 44.373897552490234, - "learning_rate": 2.8017777777777784e-06, - "loss": 1.5912, - "step": 54620 - }, - { - "epoch": 1.74816, - "grad_norm": 46.67420196533203, - "learning_rate": 2.7982222222222226e-06, - "loss": 1.5763, - "step": 54630 - }, - { - "epoch": 1.74848, - "grad_norm": 44.51902770996094, - "learning_rate": 2.7946666666666667e-06, - "loss": 1.5819, - "step": 54640 - }, - { - "epoch": 1.7488000000000001, - "grad_norm": 48.27885437011719, - "learning_rate": 2.7911111111111113e-06, - "loss": 1.5972, - "step": 54650 - }, - { - "epoch": 1.74912, - "grad_norm": 45.42656707763672, - "learning_rate": 2.7875555555555554e-06, - "loss": 1.5925, - "step": 54660 - }, - { - "epoch": 1.7494399999999999, - "grad_norm": 44.766998291015625, - "learning_rate": 2.784e-06, - "loss": 1.6199, - "step": 54670 - }, - { - "epoch": 1.74976, - "grad_norm": 44.84164047241211, - "learning_rate": 2.7804444444444445e-06, - "loss": 1.5559, - "step": 54680 - }, - { - "epoch": 1.75008, - "grad_norm": 45.92261505126953, - "learning_rate": 2.776888888888889e-06, - "loss": 1.5874, - "step": 54690 - }, - { - "epoch": 1.7504, - "grad_norm": 45.73942565917969, - "learning_rate": 2.7733333333333336e-06, - "loss": 1.5739, - "step": 54700 - }, - { - "epoch": 1.75072, - "grad_norm": 47.31524658203125, - "learning_rate": 2.7697777777777777e-06, - "loss": 1.5882, - "step": 54710 - }, - { - "epoch": 1.7510400000000002, - "grad_norm": 46.772430419921875, - "learning_rate": 2.7662222222222223e-06, - "loss": 1.587, - "step": 54720 - }, - { - "epoch": 1.75136, - "grad_norm": 45.29179763793945, - "learning_rate": 2.762666666666667e-06, - "loss": 1.5967, - "step": 54730 - }, - { - "epoch": 1.75168, - "grad_norm": 46.218624114990234, - "learning_rate": 2.7591111111111114e-06, - "loss": 1.5756, - "step": 54740 - }, - { - "epoch": 1.752, - "grad_norm": 46.63479995727539, - "learning_rate": 2.755555555555556e-06, - "loss": 1.5881, - "step": 54750 - }, - { - "epoch": 1.75232, - "grad_norm": 46.75434112548828, - "learning_rate": 2.752e-06, - "loss": 1.5659, - "step": 54760 - }, - { - "epoch": 1.75264, - "grad_norm": 45.202186584472656, - "learning_rate": 2.7484444444444446e-06, - "loss": 1.605, - "step": 54770 - }, - { - "epoch": 1.7529599999999999, - "grad_norm": 44.8332633972168, - "learning_rate": 2.744888888888889e-06, - "loss": 1.5793, - "step": 54780 - }, - { - "epoch": 1.75328, - "grad_norm": 44.32758331298828, - "learning_rate": 2.7413333333333337e-06, - "loss": 1.5865, - "step": 54790 - }, - { - "epoch": 1.7536, - "grad_norm": 45.434085845947266, - "learning_rate": 2.7377777777777783e-06, - "loss": 1.5872, - "step": 54800 - }, - { - "epoch": 1.75392, - "grad_norm": 45.59672546386719, - "learning_rate": 2.7342222222222224e-06, - "loss": 1.5765, - "step": 54810 - }, - { - "epoch": 1.75424, - "grad_norm": 47.09999084472656, - "learning_rate": 2.730666666666667e-06, - "loss": 1.5781, - "step": 54820 - }, - { - "epoch": 1.7545600000000001, - "grad_norm": 46.603023529052734, - "learning_rate": 2.7271111111111115e-06, - "loss": 1.5639, - "step": 54830 - }, - { - "epoch": 1.75488, - "grad_norm": 43.42913055419922, - "learning_rate": 2.723555555555556e-06, - "loss": 1.5731, - "step": 54840 - }, - { - "epoch": 1.7551999999999999, - "grad_norm": 44.83296585083008, - "learning_rate": 2.7200000000000002e-06, - "loss": 1.5562, - "step": 54850 - }, - { - "epoch": 1.75552, - "grad_norm": 47.64613723754883, - "learning_rate": 2.7164444444444448e-06, - "loss": 1.57, - "step": 54860 - }, - { - "epoch": 1.75584, - "grad_norm": 47.252655029296875, - "learning_rate": 2.7128888888888893e-06, - "loss": 1.6299, - "step": 54870 - }, - { - "epoch": 1.75616, - "grad_norm": 48.46607208251953, - "learning_rate": 2.709333333333334e-06, - "loss": 1.5777, - "step": 54880 - }, - { - "epoch": 1.75648, - "grad_norm": 47.050479888916016, - "learning_rate": 2.7057777777777776e-06, - "loss": 1.5911, - "step": 54890 - }, - { - "epoch": 1.7568000000000001, - "grad_norm": 45.11897659301758, - "learning_rate": 2.702222222222222e-06, - "loss": 1.5852, - "step": 54900 - }, - { - "epoch": 1.75712, - "grad_norm": 44.798404693603516, - "learning_rate": 2.6986666666666667e-06, - "loss": 1.5997, - "step": 54910 - }, - { - "epoch": 1.75744, - "grad_norm": 45.404212951660156, - "learning_rate": 2.6951111111111112e-06, - "loss": 1.5861, - "step": 54920 - }, - { - "epoch": 1.75776, - "grad_norm": 45.11406326293945, - "learning_rate": 2.691555555555556e-06, - "loss": 1.5691, - "step": 54930 - }, - { - "epoch": 1.75808, - "grad_norm": 47.126953125, - "learning_rate": 2.688e-06, - "loss": 1.5937, - "step": 54940 - }, - { - "epoch": 1.7584, - "grad_norm": 49.89951705932617, - "learning_rate": 2.6844444444444445e-06, - "loss": 1.5766, - "step": 54950 - }, - { - "epoch": 1.7587199999999998, - "grad_norm": 45.07528305053711, - "learning_rate": 2.680888888888889e-06, - "loss": 1.5754, - "step": 54960 - }, - { - "epoch": 1.7590400000000002, - "grad_norm": 44.071266174316406, - "learning_rate": 2.6773333333333336e-06, - "loss": 1.6159, - "step": 54970 - }, - { - "epoch": 1.75936, - "grad_norm": 47.069217681884766, - "learning_rate": 2.6737777777777777e-06, - "loss": 1.6047, - "step": 54980 - }, - { - "epoch": 1.75968, - "grad_norm": 44.97282409667969, - "learning_rate": 2.6702222222222223e-06, - "loss": 1.5753, - "step": 54990 - }, - { - "epoch": 1.76, - "grad_norm": 46.30501937866211, - "learning_rate": 2.666666666666667e-06, - "loss": 1.6084, - "step": 55000 - }, - { - "epoch": 1.76032, - "grad_norm": 46.91132736206055, - "learning_rate": 2.6631111111111114e-06, - "loss": 1.601, - "step": 55010 - }, - { - "epoch": 1.76064, - "grad_norm": 47.901580810546875, - "learning_rate": 2.659555555555556e-06, - "loss": 1.5803, - "step": 55020 - }, - { - "epoch": 1.7609599999999999, - "grad_norm": 46.67458724975586, - "learning_rate": 2.656e-06, - "loss": 1.5893, - "step": 55030 - }, - { - "epoch": 1.76128, - "grad_norm": 45.30000686645508, - "learning_rate": 2.6524444444444446e-06, - "loss": 1.5668, - "step": 55040 - }, - { - "epoch": 1.7616, - "grad_norm": 47.15391159057617, - "learning_rate": 2.648888888888889e-06, - "loss": 1.5733, - "step": 55050 - }, - { - "epoch": 1.76192, - "grad_norm": 47.432796478271484, - "learning_rate": 2.6453333333333337e-06, - "loss": 1.6094, - "step": 55060 - }, - { - "epoch": 1.76224, - "grad_norm": 45.1340446472168, - "learning_rate": 2.6417777777777783e-06, - "loss": 1.5999, - "step": 55070 - }, - { - "epoch": 1.7625600000000001, - "grad_norm": 45.92671203613281, - "learning_rate": 2.6382222222222224e-06, - "loss": 1.6099, - "step": 55080 - }, - { - "epoch": 1.76288, - "grad_norm": 46.85237503051758, - "learning_rate": 2.634666666666667e-06, - "loss": 1.6062, - "step": 55090 - }, - { - "epoch": 1.7631999999999999, - "grad_norm": 44.259727478027344, - "learning_rate": 2.6311111111111115e-06, - "loss": 1.5687, - "step": 55100 - }, - { - "epoch": 1.76352, - "grad_norm": 47.20001983642578, - "learning_rate": 2.627555555555556e-06, - "loss": 1.5658, - "step": 55110 - }, - { - "epoch": 1.76384, - "grad_norm": 46.446468353271484, - "learning_rate": 2.6240000000000006e-06, - "loss": 1.6, - "step": 55120 - }, - { - "epoch": 1.76416, - "grad_norm": 48.24945831298828, - "learning_rate": 2.6204444444444448e-06, - "loss": 1.5677, - "step": 55130 - }, - { - "epoch": 1.76448, - "grad_norm": 45.008094787597656, - "learning_rate": 2.6168888888888893e-06, - "loss": 1.5802, - "step": 55140 - }, - { - "epoch": 1.7648000000000001, - "grad_norm": 44.15761947631836, - "learning_rate": 2.6133333333333334e-06, - "loss": 1.5379, - "step": 55150 - }, - { - "epoch": 1.76512, - "grad_norm": 45.80890655517578, - "learning_rate": 2.6097777777777776e-06, - "loss": 1.5746, - "step": 55160 - }, - { - "epoch": 1.76544, - "grad_norm": 44.99577713012695, - "learning_rate": 2.606222222222222e-06, - "loss": 1.5863, - "step": 55170 - }, - { - "epoch": 1.76576, - "grad_norm": 45.428462982177734, - "learning_rate": 2.6026666666666667e-06, - "loss": 1.5704, - "step": 55180 - }, - { - "epoch": 1.76608, - "grad_norm": 45.20389938354492, - "learning_rate": 2.5991111111111112e-06, - "loss": 1.5804, - "step": 55190 - }, - { - "epoch": 1.7664, - "grad_norm": 49.05274200439453, - "learning_rate": 2.5955555555555558e-06, - "loss": 1.5703, - "step": 55200 - }, - { - "epoch": 1.7667199999999998, - "grad_norm": 44.522544860839844, - "learning_rate": 2.592e-06, - "loss": 1.6099, - "step": 55210 - }, - { - "epoch": 1.7670400000000002, - "grad_norm": 46.331634521484375, - "learning_rate": 2.5884444444444445e-06, - "loss": 1.6057, - "step": 55220 - }, - { - "epoch": 1.76736, - "grad_norm": 46.33926773071289, - "learning_rate": 2.584888888888889e-06, - "loss": 1.591, - "step": 55230 - }, - { - "epoch": 1.76768, - "grad_norm": 46.363548278808594, - "learning_rate": 2.5813333333333336e-06, - "loss": 1.5776, - "step": 55240 - }, - { - "epoch": 1.768, - "grad_norm": 44.61825942993164, - "learning_rate": 2.577777777777778e-06, - "loss": 1.5853, - "step": 55250 - }, - { - "epoch": 1.7683200000000001, - "grad_norm": 45.92292404174805, - "learning_rate": 2.5742222222222223e-06, - "loss": 1.6115, - "step": 55260 - }, - { - "epoch": 1.76864, - "grad_norm": 46.37104415893555, - "learning_rate": 2.570666666666667e-06, - "loss": 1.6099, - "step": 55270 - }, - { - "epoch": 1.7689599999999999, - "grad_norm": 47.2564697265625, - "learning_rate": 2.5671111111111114e-06, - "loss": 1.5999, - "step": 55280 - }, - { - "epoch": 1.76928, - "grad_norm": 45.823089599609375, - "learning_rate": 2.563555555555556e-06, - "loss": 1.5867, - "step": 55290 - }, - { - "epoch": 1.7696, - "grad_norm": 45.80973815917969, - "learning_rate": 2.56e-06, - "loss": 1.5808, - "step": 55300 - }, - { - "epoch": 1.76992, - "grad_norm": 45.57069778442383, - "learning_rate": 2.5564444444444446e-06, - "loss": 1.6039, - "step": 55310 - }, - { - "epoch": 1.77024, - "grad_norm": 48.02677917480469, - "learning_rate": 2.552888888888889e-06, - "loss": 1.5714, - "step": 55320 - }, - { - "epoch": 1.7705600000000001, - "grad_norm": 46.72894287109375, - "learning_rate": 2.5493333333333337e-06, - "loss": 1.6051, - "step": 55330 - }, - { - "epoch": 1.77088, - "grad_norm": 47.3582763671875, - "learning_rate": 2.5457777777777783e-06, - "loss": 1.5619, - "step": 55340 - }, - { - "epoch": 1.7711999999999999, - "grad_norm": 47.09748077392578, - "learning_rate": 2.5422222222222224e-06, - "loss": 1.5879, - "step": 55350 - }, - { - "epoch": 1.77152, - "grad_norm": 45.226200103759766, - "learning_rate": 2.538666666666667e-06, - "loss": 1.5996, - "step": 55360 - }, - { - "epoch": 1.77184, - "grad_norm": 46.5212287902832, - "learning_rate": 2.5351111111111115e-06, - "loss": 1.5795, - "step": 55370 - }, - { - "epoch": 1.77216, - "grad_norm": 45.95960235595703, - "learning_rate": 2.531555555555556e-06, - "loss": 1.5892, - "step": 55380 - }, - { - "epoch": 1.77248, - "grad_norm": 47.54762649536133, - "learning_rate": 2.5280000000000006e-06, - "loss": 1.5966, - "step": 55390 - }, - { - "epoch": 1.7728000000000002, - "grad_norm": 46.98671340942383, - "learning_rate": 2.5244444444444447e-06, - "loss": 1.5964, - "step": 55400 - }, - { - "epoch": 1.77312, - "grad_norm": 44.890193939208984, - "learning_rate": 2.520888888888889e-06, - "loss": 1.5641, - "step": 55410 - }, - { - "epoch": 1.77344, - "grad_norm": 46.220298767089844, - "learning_rate": 2.5173333333333334e-06, - "loss": 1.5601, - "step": 55420 - }, - { - "epoch": 1.77376, - "grad_norm": 46.14614486694336, - "learning_rate": 2.5137777777777776e-06, - "loss": 1.5798, - "step": 55430 - }, - { - "epoch": 1.77408, - "grad_norm": 46.30167770385742, - "learning_rate": 2.510222222222222e-06, - "loss": 1.6126, - "step": 55440 - }, - { - "epoch": 1.7744, - "grad_norm": 46.571044921875, - "learning_rate": 2.5066666666666667e-06, - "loss": 1.6008, - "step": 55450 - }, - { - "epoch": 1.7747199999999999, - "grad_norm": 46.22939682006836, - "learning_rate": 2.5031111111111112e-06, - "loss": 1.59, - "step": 55460 - }, - { - "epoch": 1.77504, - "grad_norm": 46.0821533203125, - "learning_rate": 2.4995555555555558e-06, - "loss": 1.604, - "step": 55470 - }, - { - "epoch": 1.77536, - "grad_norm": 44.83059310913086, - "learning_rate": 2.496e-06, - "loss": 1.5583, - "step": 55480 - }, - { - "epoch": 1.77568, - "grad_norm": 45.116336822509766, - "learning_rate": 2.4924444444444445e-06, - "loss": 1.5915, - "step": 55490 - }, - { - "epoch": 1.776, - "grad_norm": 45.40968704223633, - "learning_rate": 2.488888888888889e-06, - "loss": 1.5715, - "step": 55500 - }, - { - "epoch": 1.7763200000000001, - "grad_norm": 47.14876937866211, - "learning_rate": 2.4853333333333336e-06, - "loss": 1.5946, - "step": 55510 - }, - { - "epoch": 1.77664, - "grad_norm": 45.58320617675781, - "learning_rate": 2.481777777777778e-06, - "loss": 1.5863, - "step": 55520 - }, - { - "epoch": 1.7769599999999999, - "grad_norm": 44.24687576293945, - "learning_rate": 2.4782222222222222e-06, - "loss": 1.6072, - "step": 55530 - }, - { - "epoch": 1.77728, - "grad_norm": 44.717613220214844, - "learning_rate": 2.474666666666667e-06, - "loss": 1.5911, - "step": 55540 - }, - { - "epoch": 1.7776, - "grad_norm": 45.04279327392578, - "learning_rate": 2.4711111111111114e-06, - "loss": 1.5845, - "step": 55550 - }, - { - "epoch": 1.77792, - "grad_norm": 46.93659591674805, - "learning_rate": 2.467555555555556e-06, - "loss": 1.578, - "step": 55560 - }, - { - "epoch": 1.77824, - "grad_norm": 44.31327819824219, - "learning_rate": 2.4640000000000005e-06, - "loss": 1.5721, - "step": 55570 - }, - { - "epoch": 1.7785600000000001, - "grad_norm": 45.77568817138672, - "learning_rate": 2.4604444444444446e-06, - "loss": 1.5834, - "step": 55580 - }, - { - "epoch": 1.77888, - "grad_norm": 44.93718338012695, - "learning_rate": 2.456888888888889e-06, - "loss": 1.5598, - "step": 55590 - }, - { - "epoch": 1.7792, - "grad_norm": 47.456947326660156, - "learning_rate": 2.4533333333333333e-06, - "loss": 1.5909, - "step": 55600 - }, - { - "epoch": 1.77952, - "grad_norm": 46.9789924621582, - "learning_rate": 2.449777777777778e-06, - "loss": 1.5777, - "step": 55610 - }, - { - "epoch": 1.77984, - "grad_norm": 53.498592376708984, - "learning_rate": 2.4462222222222224e-06, - "loss": 1.5873, - "step": 55620 - }, - { - "epoch": 1.78016, - "grad_norm": 47.94544982910156, - "learning_rate": 2.442666666666667e-06, - "loss": 1.5902, - "step": 55630 - }, - { - "epoch": 1.7804799999999998, - "grad_norm": 44.35692596435547, - "learning_rate": 2.439111111111111e-06, - "loss": 1.5891, - "step": 55640 - }, - { - "epoch": 1.7808000000000002, - "grad_norm": 45.31683349609375, - "learning_rate": 2.4355555555555556e-06, - "loss": 1.5967, - "step": 55650 - }, - { - "epoch": 1.78112, - "grad_norm": 46.0125732421875, - "learning_rate": 2.432e-06, - "loss": 1.5759, - "step": 55660 - }, - { - "epoch": 1.78144, - "grad_norm": 45.48175811767578, - "learning_rate": 2.4284444444444447e-06, - "loss": 1.5829, - "step": 55670 - }, - { - "epoch": 1.78176, - "grad_norm": 45.606834411621094, - "learning_rate": 2.4248888888888893e-06, - "loss": 1.5964, - "step": 55680 - }, - { - "epoch": 1.78208, - "grad_norm": 61.35505294799805, - "learning_rate": 2.4213333333333334e-06, - "loss": 1.596, - "step": 55690 - }, - { - "epoch": 1.7824, - "grad_norm": 46.245399475097656, - "learning_rate": 2.417777777777778e-06, - "loss": 1.5756, - "step": 55700 - }, - { - "epoch": 1.7827199999999999, - "grad_norm": 45.621490478515625, - "learning_rate": 2.4142222222222225e-06, - "loss": 1.6155, - "step": 55710 - }, - { - "epoch": 1.78304, - "grad_norm": 46.63582992553711, - "learning_rate": 2.410666666666667e-06, - "loss": 1.61, - "step": 55720 - }, - { - "epoch": 1.78336, - "grad_norm": 46.067195892333984, - "learning_rate": 2.407111111111111e-06, - "loss": 1.6075, - "step": 55730 - }, - { - "epoch": 1.78368, - "grad_norm": 47.03789520263672, - "learning_rate": 2.4035555555555558e-06, - "loss": 1.6083, - "step": 55740 - }, - { - "epoch": 1.784, - "grad_norm": 43.586143493652344, - "learning_rate": 2.4000000000000003e-06, - "loss": 1.5847, - "step": 55750 - }, - { - "epoch": 1.7843200000000001, - "grad_norm": 46.635459899902344, - "learning_rate": 2.3964444444444444e-06, - "loss": 1.5636, - "step": 55760 - }, - { - "epoch": 1.78464, - "grad_norm": 47.186370849609375, - "learning_rate": 2.392888888888889e-06, - "loss": 1.5964, - "step": 55770 - }, - { - "epoch": 1.7849599999999999, - "grad_norm": 45.05913543701172, - "learning_rate": 2.3893333333333336e-06, - "loss": 1.589, - "step": 55780 - }, - { - "epoch": 1.78528, - "grad_norm": 45.96673583984375, - "learning_rate": 2.385777777777778e-06, - "loss": 1.5975, - "step": 55790 - }, - { - "epoch": 1.7856, - "grad_norm": 49.0675163269043, - "learning_rate": 2.3822222222222222e-06, - "loss": 1.6115, - "step": 55800 - }, - { - "epoch": 1.78592, - "grad_norm": 45.164005279541016, - "learning_rate": 2.378666666666667e-06, - "loss": 1.573, - "step": 55810 - }, - { - "epoch": 1.78624, - "grad_norm": 47.418373107910156, - "learning_rate": 2.3751111111111113e-06, - "loss": 1.5898, - "step": 55820 - }, - { - "epoch": 1.7865600000000001, - "grad_norm": 47.338897705078125, - "learning_rate": 2.371555555555556e-06, - "loss": 1.5831, - "step": 55830 - }, - { - "epoch": 1.78688, - "grad_norm": 46.223323822021484, - "learning_rate": 2.3680000000000005e-06, - "loss": 1.5614, - "step": 55840 - }, - { - "epoch": 1.7872, - "grad_norm": 46.24585723876953, - "learning_rate": 2.3644444444444446e-06, - "loss": 1.5753, - "step": 55850 - }, - { - "epoch": 1.78752, - "grad_norm": 45.412471771240234, - "learning_rate": 2.360888888888889e-06, - "loss": 1.5642, - "step": 55860 - }, - { - "epoch": 1.78784, - "grad_norm": 46.19806671142578, - "learning_rate": 2.3573333333333333e-06, - "loss": 1.6342, - "step": 55870 - }, - { - "epoch": 1.78816, - "grad_norm": 45.83122634887695, - "learning_rate": 2.353777777777778e-06, - "loss": 1.5747, - "step": 55880 - }, - { - "epoch": 1.7884799999999998, - "grad_norm": 46.43621826171875, - "learning_rate": 2.3502222222222224e-06, - "loss": 1.6143, - "step": 55890 - }, - { - "epoch": 1.7888, - "grad_norm": 46.50686264038086, - "learning_rate": 2.346666666666667e-06, - "loss": 1.6208, - "step": 55900 - }, - { - "epoch": 1.78912, - "grad_norm": 46.26643371582031, - "learning_rate": 2.3431111111111115e-06, - "loss": 1.6048, - "step": 55910 - }, - { - "epoch": 1.78944, - "grad_norm": 46.18266677856445, - "learning_rate": 2.3395555555555556e-06, - "loss": 1.5608, - "step": 55920 - }, - { - "epoch": 1.78976, - "grad_norm": 44.978973388671875, - "learning_rate": 2.336e-06, - "loss": 1.5868, - "step": 55930 - }, - { - "epoch": 1.7900800000000001, - "grad_norm": 46.06013107299805, - "learning_rate": 2.3324444444444447e-06, - "loss": 1.6047, - "step": 55940 - }, - { - "epoch": 1.7904, - "grad_norm": 45.7539176940918, - "learning_rate": 2.3288888888888893e-06, - "loss": 1.5809, - "step": 55950 - }, - { - "epoch": 1.7907199999999999, - "grad_norm": 44.96236801147461, - "learning_rate": 2.3253333333333334e-06, - "loss": 1.5617, - "step": 55960 - }, - { - "epoch": 1.79104, - "grad_norm": 46.3323974609375, - "learning_rate": 2.321777777777778e-06, - "loss": 1.5557, - "step": 55970 - }, - { - "epoch": 1.79136, - "grad_norm": 45.12177276611328, - "learning_rate": 2.3182222222222225e-06, - "loss": 1.6195, - "step": 55980 - }, - { - "epoch": 1.79168, - "grad_norm": 46.304664611816406, - "learning_rate": 2.3146666666666666e-06, - "loss": 1.5838, - "step": 55990 - }, - { - "epoch": 1.792, - "grad_norm": 46.32132339477539, - "learning_rate": 2.311111111111111e-06, - "loss": 1.5968, - "step": 56000 - }, - { - "epoch": 1.7923200000000001, - "grad_norm": 45.86129379272461, - "learning_rate": 2.3075555555555557e-06, - "loss": 1.5952, - "step": 56010 - }, - { - "epoch": 1.79264, - "grad_norm": 46.15711212158203, - "learning_rate": 2.3040000000000003e-06, - "loss": 1.568, - "step": 56020 - }, - { - "epoch": 1.7929599999999999, - "grad_norm": 46.497249603271484, - "learning_rate": 2.3004444444444444e-06, - "loss": 1.6112, - "step": 56030 - }, - { - "epoch": 1.79328, - "grad_norm": 45.105045318603516, - "learning_rate": 2.296888888888889e-06, - "loss": 1.6041, - "step": 56040 - }, - { - "epoch": 1.7936, - "grad_norm": 44.266658782958984, - "learning_rate": 2.2933333333333335e-06, - "loss": 1.5485, - "step": 56050 - }, - { - "epoch": 1.79392, - "grad_norm": 45.53001022338867, - "learning_rate": 2.289777777777778e-06, - "loss": 1.606, - "step": 56060 - }, - { - "epoch": 1.7942399999999998, - "grad_norm": 47.24198913574219, - "learning_rate": 2.2862222222222226e-06, - "loss": 1.5648, - "step": 56070 - }, - { - "epoch": 1.7945600000000002, - "grad_norm": 46.86454391479492, - "learning_rate": 2.2826666666666668e-06, - "loss": 1.5816, - "step": 56080 - }, - { - "epoch": 1.79488, - "grad_norm": 47.774723052978516, - "learning_rate": 2.2791111111111113e-06, - "loss": 1.6021, - "step": 56090 - }, - { - "epoch": 1.7952, - "grad_norm": 45.98683166503906, - "learning_rate": 2.275555555555556e-06, - "loss": 1.5843, - "step": 56100 - }, - { - "epoch": 1.79552, - "grad_norm": 45.332523345947266, - "learning_rate": 2.2720000000000004e-06, - "loss": 1.6126, - "step": 56110 - }, - { - "epoch": 1.79584, - "grad_norm": 46.97977066040039, - "learning_rate": 2.2684444444444446e-06, - "loss": 1.5895, - "step": 56120 - }, - { - "epoch": 1.79616, - "grad_norm": 45.19950866699219, - "learning_rate": 2.264888888888889e-06, - "loss": 1.6076, - "step": 56130 - }, - { - "epoch": 1.7964799999999999, - "grad_norm": 46.60654830932617, - "learning_rate": 2.2613333333333333e-06, - "loss": 1.5804, - "step": 56140 - }, - { - "epoch": 1.7968, - "grad_norm": 43.94088363647461, - "learning_rate": 2.257777777777778e-06, - "loss": 1.5936, - "step": 56150 - }, - { - "epoch": 1.79712, - "grad_norm": 46.990787506103516, - "learning_rate": 2.2542222222222224e-06, - "loss": 1.617, - "step": 56160 - }, - { - "epoch": 1.79744, - "grad_norm": 45.92596435546875, - "learning_rate": 2.250666666666667e-06, - "loss": 1.59, - "step": 56170 - }, - { - "epoch": 1.79776, - "grad_norm": 45.383148193359375, - "learning_rate": 2.2471111111111115e-06, - "loss": 1.5845, - "step": 56180 - }, - { - "epoch": 1.7980800000000001, - "grad_norm": 45.40883255004883, - "learning_rate": 2.2435555555555556e-06, - "loss": 1.5849, - "step": 56190 - }, - { - "epoch": 1.7984, - "grad_norm": 45.579917907714844, - "learning_rate": 2.24e-06, - "loss": 1.5701, - "step": 56200 - }, - { - "epoch": 1.7987199999999999, - "grad_norm": 44.44224548339844, - "learning_rate": 2.2364444444444447e-06, - "loss": 1.5925, - "step": 56210 - }, - { - "epoch": 1.79904, - "grad_norm": 45.03302764892578, - "learning_rate": 2.2328888888888893e-06, - "loss": 1.5808, - "step": 56220 - }, - { - "epoch": 1.79936, - "grad_norm": 46.868614196777344, - "learning_rate": 2.229333333333334e-06, - "loss": 1.5644, - "step": 56230 - }, - { - "epoch": 1.79968, - "grad_norm": 45.28606033325195, - "learning_rate": 2.225777777777778e-06, - "loss": 1.5832, - "step": 56240 - }, - { - "epoch": 1.8, - "grad_norm": 45.76778793334961, - "learning_rate": 2.222222222222222e-06, - "loss": 1.587, - "step": 56250 - }, - { - "epoch": 1.8003200000000001, - "grad_norm": 44.555789947509766, - "learning_rate": 2.2186666666666666e-06, - "loss": 1.5767, - "step": 56260 - }, - { - "epoch": 1.80064, - "grad_norm": 45.19745635986328, - "learning_rate": 2.215111111111111e-06, - "loss": 1.6335, - "step": 56270 - }, - { - "epoch": 1.80096, - "grad_norm": 44.41781997680664, - "learning_rate": 2.2115555555555557e-06, - "loss": 1.6064, - "step": 56280 - }, - { - "epoch": 1.80128, - "grad_norm": 45.24692916870117, - "learning_rate": 2.2080000000000003e-06, - "loss": 1.5886, - "step": 56290 - }, - { - "epoch": 1.8016, - "grad_norm": 44.298160552978516, - "learning_rate": 2.2044444444444444e-06, - "loss": 1.5731, - "step": 56300 - }, - { - "epoch": 1.80192, - "grad_norm": 48.903236389160156, - "learning_rate": 2.200888888888889e-06, - "loss": 1.5567, - "step": 56310 - }, - { - "epoch": 1.8022399999999998, - "grad_norm": 44.9493293762207, - "learning_rate": 2.1973333333333335e-06, - "loss": 1.5892, - "step": 56320 - }, - { - "epoch": 1.8025600000000002, - "grad_norm": 46.3359489440918, - "learning_rate": 2.193777777777778e-06, - "loss": 1.5631, - "step": 56330 - }, - { - "epoch": 1.80288, - "grad_norm": 46.73094940185547, - "learning_rate": 2.1902222222222226e-06, - "loss": 1.5904, - "step": 56340 - }, - { - "epoch": 1.8032, - "grad_norm": 47.31883239746094, - "learning_rate": 2.1866666666666668e-06, - "loss": 1.5641, - "step": 56350 - }, - { - "epoch": 1.80352, - "grad_norm": 49.820865631103516, - "learning_rate": 2.1831111111111113e-06, - "loss": 1.6101, - "step": 56360 - }, - { - "epoch": 1.80384, - "grad_norm": 46.607112884521484, - "learning_rate": 2.179555555555556e-06, - "loss": 1.5932, - "step": 56370 - }, - { - "epoch": 1.80416, - "grad_norm": 45.860992431640625, - "learning_rate": 2.176e-06, - "loss": 1.5899, - "step": 56380 - }, - { - "epoch": 1.8044799999999999, - "grad_norm": 44.20576477050781, - "learning_rate": 2.1724444444444446e-06, - "loss": 1.5934, - "step": 56390 - }, - { - "epoch": 1.8048, - "grad_norm": 45.662288665771484, - "learning_rate": 2.168888888888889e-06, - "loss": 1.5845, - "step": 56400 - }, - { - "epoch": 1.80512, - "grad_norm": 46.737857818603516, - "learning_rate": 2.1653333333333337e-06, - "loss": 1.5815, - "step": 56410 - }, - { - "epoch": 1.80544, - "grad_norm": 46.261051177978516, - "learning_rate": 2.161777777777778e-06, - "loss": 1.5693, - "step": 56420 - }, - { - "epoch": 1.80576, - "grad_norm": 45.66044235229492, - "learning_rate": 2.1582222222222223e-06, - "loss": 1.6025, - "step": 56430 - }, - { - "epoch": 1.8060800000000001, - "grad_norm": 45.01734924316406, - "learning_rate": 2.154666666666667e-06, - "loss": 1.5983, - "step": 56440 - }, - { - "epoch": 1.8064, - "grad_norm": 45.47689437866211, - "learning_rate": 2.1511111111111115e-06, - "loss": 1.6084, - "step": 56450 - }, - { - "epoch": 1.8067199999999999, - "grad_norm": 45.470767974853516, - "learning_rate": 2.1475555555555556e-06, - "loss": 1.584, - "step": 56460 - }, - { - "epoch": 1.80704, - "grad_norm": 45.65459060668945, - "learning_rate": 2.144e-06, - "loss": 1.5848, - "step": 56470 - }, - { - "epoch": 1.80736, - "grad_norm": 46.29679489135742, - "learning_rate": 2.1404444444444447e-06, - "loss": 1.5596, - "step": 56480 - }, - { - "epoch": 1.80768, - "grad_norm": 43.89360046386719, - "learning_rate": 2.1368888888888892e-06, - "loss": 1.5724, - "step": 56490 - }, - { - "epoch": 1.808, - "grad_norm": 44.68407440185547, - "learning_rate": 2.133333333333334e-06, - "loss": 1.5904, - "step": 56500 - }, - { - "epoch": 1.8083200000000001, - "grad_norm": 44.551185607910156, - "learning_rate": 2.129777777777778e-06, - "loss": 1.5921, - "step": 56510 - }, - { - "epoch": 1.80864, - "grad_norm": 47.263126373291016, - "learning_rate": 2.1262222222222225e-06, - "loss": 1.5847, - "step": 56520 - }, - { - "epoch": 1.80896, - "grad_norm": 45.34291458129883, - "learning_rate": 2.1226666666666666e-06, - "loss": 1.6079, - "step": 56530 - }, - { - "epoch": 1.80928, - "grad_norm": 46.09779739379883, - "learning_rate": 2.119111111111111e-06, - "loss": 1.6251, - "step": 56540 - }, - { - "epoch": 1.8096, - "grad_norm": 46.95382308959961, - "learning_rate": 2.1155555555555557e-06, - "loss": 1.5672, - "step": 56550 - }, - { - "epoch": 1.80992, - "grad_norm": 45.27779006958008, - "learning_rate": 2.1120000000000003e-06, - "loss": 1.5892, - "step": 56560 - }, - { - "epoch": 1.8102399999999998, - "grad_norm": 44.9322624206543, - "learning_rate": 2.108444444444445e-06, - "loss": 1.5847, - "step": 56570 - }, - { - "epoch": 1.81056, - "grad_norm": 48.82658004760742, - "learning_rate": 2.104888888888889e-06, - "loss": 1.5899, - "step": 56580 - }, - { - "epoch": 1.81088, - "grad_norm": 48.06019973754883, - "learning_rate": 2.1013333333333335e-06, - "loss": 1.5513, - "step": 56590 - }, - { - "epoch": 1.8112, - "grad_norm": 45.88014221191406, - "learning_rate": 2.097777777777778e-06, - "loss": 1.5952, - "step": 56600 - }, - { - "epoch": 1.81152, - "grad_norm": 42.34318161010742, - "learning_rate": 2.0942222222222226e-06, - "loss": 1.5755, - "step": 56610 - }, - { - "epoch": 1.8118400000000001, - "grad_norm": 45.93275451660156, - "learning_rate": 2.0906666666666668e-06, - "loss": 1.5776, - "step": 56620 - }, - { - "epoch": 1.81216, - "grad_norm": 43.69744873046875, - "learning_rate": 2.0871111111111113e-06, - "loss": 1.5893, - "step": 56630 - }, - { - "epoch": 1.8124799999999999, - "grad_norm": 44.21855926513672, - "learning_rate": 2.0835555555555554e-06, - "loss": 1.5874, - "step": 56640 - }, - { - "epoch": 1.8128, - "grad_norm": 45.70142364501953, - "learning_rate": 2.08e-06, - "loss": 1.5684, - "step": 56650 - }, - { - "epoch": 1.81312, - "grad_norm": 46.35956573486328, - "learning_rate": 2.0764444444444445e-06, - "loss": 1.6093, - "step": 56660 - }, - { - "epoch": 1.81344, - "grad_norm": 47.33995056152344, - "learning_rate": 2.072888888888889e-06, - "loss": 1.5873, - "step": 56670 - }, - { - "epoch": 1.81376, - "grad_norm": 45.1085205078125, - "learning_rate": 2.0693333333333337e-06, - "loss": 1.5867, - "step": 56680 - }, - { - "epoch": 1.8140800000000001, - "grad_norm": 46.70127868652344, - "learning_rate": 2.0657777777777778e-06, - "loss": 1.6007, - "step": 56690 - }, - { - "epoch": 1.8144, - "grad_norm": 45.99809646606445, - "learning_rate": 2.0622222222222223e-06, - "loss": 1.6079, - "step": 56700 - }, - { - "epoch": 1.8147199999999999, - "grad_norm": 46.1832275390625, - "learning_rate": 2.058666666666667e-06, - "loss": 1.6042, - "step": 56710 - }, - { - "epoch": 1.81504, - "grad_norm": 47.55323791503906, - "learning_rate": 2.0551111111111114e-06, - "loss": 1.5965, - "step": 56720 - }, - { - "epoch": 1.81536, - "grad_norm": 45.24617004394531, - "learning_rate": 2.051555555555556e-06, - "loss": 1.5797, - "step": 56730 - }, - { - "epoch": 1.81568, - "grad_norm": 44.48154067993164, - "learning_rate": 2.048e-06, - "loss": 1.568, - "step": 56740 - }, - { - "epoch": 1.8159999999999998, - "grad_norm": 47.31721878051758, - "learning_rate": 2.0444444444444447e-06, - "loss": 1.5819, - "step": 56750 - }, - { - "epoch": 1.8163200000000002, - "grad_norm": 58.64398193359375, - "learning_rate": 2.040888888888889e-06, - "loss": 1.5612, - "step": 56760 - }, - { - "epoch": 1.81664, - "grad_norm": 48.63230514526367, - "learning_rate": 2.0373333333333334e-06, - "loss": 1.6083, - "step": 56770 - }, - { - "epoch": 1.81696, - "grad_norm": 47.880393981933594, - "learning_rate": 2.033777777777778e-06, - "loss": 1.5927, - "step": 56780 - }, - { - "epoch": 1.81728, - "grad_norm": 45.82305145263672, - "learning_rate": 2.0302222222222225e-06, - "loss": 1.5959, - "step": 56790 - }, - { - "epoch": 1.8176, - "grad_norm": 44.46937561035156, - "learning_rate": 2.0266666666666666e-06, - "loss": 1.6333, - "step": 56800 - }, - { - "epoch": 1.81792, - "grad_norm": 45.742794036865234, - "learning_rate": 2.023111111111111e-06, - "loss": 1.5872, - "step": 56810 - }, - { - "epoch": 1.8182399999999999, - "grad_norm": 45.73196029663086, - "learning_rate": 2.0195555555555557e-06, - "loss": 1.5682, - "step": 56820 - }, - { - "epoch": 1.81856, - "grad_norm": 45.85786437988281, - "learning_rate": 2.0160000000000003e-06, - "loss": 1.5906, - "step": 56830 - }, - { - "epoch": 1.81888, - "grad_norm": 45.84141159057617, - "learning_rate": 2.012444444444445e-06, - "loss": 1.5962, - "step": 56840 - }, - { - "epoch": 1.8192, - "grad_norm": 45.378883361816406, - "learning_rate": 2.008888888888889e-06, - "loss": 1.5749, - "step": 56850 - }, - { - "epoch": 1.81952, - "grad_norm": 46.715023040771484, - "learning_rate": 2.0053333333333335e-06, - "loss": 1.5586, - "step": 56860 - }, - { - "epoch": 1.8198400000000001, - "grad_norm": 45.816993713378906, - "learning_rate": 2.001777777777778e-06, - "loss": 1.5638, - "step": 56870 - }, - { - "epoch": 1.82016, - "grad_norm": 44.63705062866211, - "learning_rate": 1.9982222222222226e-06, - "loss": 1.5921, - "step": 56880 - }, - { - "epoch": 1.8204799999999999, - "grad_norm": 45.8122444152832, - "learning_rate": 1.9946666666666667e-06, - "loss": 1.6025, - "step": 56890 - }, - { - "epoch": 1.8208, - "grad_norm": 45.895965576171875, - "learning_rate": 1.9911111111111113e-06, - "loss": 1.5821, - "step": 56900 - }, - { - "epoch": 1.82112, - "grad_norm": 45.55632019042969, - "learning_rate": 1.9875555555555554e-06, - "loss": 1.6013, - "step": 56910 - }, - { - "epoch": 1.82144, - "grad_norm": 46.17961883544922, - "learning_rate": 1.984e-06, - "loss": 1.5889, - "step": 56920 - }, - { - "epoch": 1.82176, - "grad_norm": 46.277687072753906, - "learning_rate": 1.9804444444444445e-06, - "loss": 1.5787, - "step": 56930 - }, - { - "epoch": 1.8220800000000001, - "grad_norm": 45.59478759765625, - "learning_rate": 1.976888888888889e-06, - "loss": 1.588, - "step": 56940 - }, - { - "epoch": 1.8224, - "grad_norm": 45.2066764831543, - "learning_rate": 1.9733333333333336e-06, - "loss": 1.5655, - "step": 56950 - }, - { - "epoch": 1.82272, - "grad_norm": 44.63209533691406, - "learning_rate": 1.9697777777777778e-06, - "loss": 1.574, - "step": 56960 - }, - { - "epoch": 1.82304, - "grad_norm": 47.10441589355469, - "learning_rate": 1.9662222222222223e-06, - "loss": 1.5783, - "step": 56970 - }, - { - "epoch": 1.82336, - "grad_norm": 45.74570083618164, - "learning_rate": 1.962666666666667e-06, - "loss": 1.5658, - "step": 56980 - }, - { - "epoch": 1.82368, - "grad_norm": 44.77720642089844, - "learning_rate": 1.9591111111111114e-06, - "loss": 1.5996, - "step": 56990 - }, - { - "epoch": 1.8239999999999998, - "grad_norm": 46.88585662841797, - "learning_rate": 1.955555555555556e-06, - "loss": 1.6126, - "step": 57000 - }, - { - "epoch": 1.8243200000000002, - "grad_norm": 47.91762161254883, - "learning_rate": 1.952e-06, - "loss": 1.5885, - "step": 57010 - }, - { - "epoch": 1.82464, - "grad_norm": 45.013240814208984, - "learning_rate": 1.9484444444444447e-06, - "loss": 1.5731, - "step": 57020 - }, - { - "epoch": 1.82496, - "grad_norm": 44.162113189697266, - "learning_rate": 1.944888888888889e-06, - "loss": 1.6139, - "step": 57030 - }, - { - "epoch": 1.82528, - "grad_norm": 46.55525588989258, - "learning_rate": 1.9413333333333334e-06, - "loss": 1.5635, - "step": 57040 - }, - { - "epoch": 1.8256000000000001, - "grad_norm": 46.97242736816406, - "learning_rate": 1.937777777777778e-06, - "loss": 1.5949, - "step": 57050 - }, - { - "epoch": 1.82592, - "grad_norm": 44.33784484863281, - "learning_rate": 1.9342222222222225e-06, - "loss": 1.5636, - "step": 57060 - }, - { - "epoch": 1.8262399999999999, - "grad_norm": 44.85808181762695, - "learning_rate": 1.9306666666666666e-06, - "loss": 1.599, - "step": 57070 - }, - { - "epoch": 1.82656, - "grad_norm": 48.044742584228516, - "learning_rate": 1.927111111111111e-06, - "loss": 1.5846, - "step": 57080 - }, - { - "epoch": 1.82688, - "grad_norm": 48.76578140258789, - "learning_rate": 1.9235555555555557e-06, - "loss": 1.5993, - "step": 57090 - }, - { - "epoch": 1.8272, - "grad_norm": 46.712890625, - "learning_rate": 1.9200000000000003e-06, - "loss": 1.5997, - "step": 57100 - }, - { - "epoch": 1.82752, - "grad_norm": 44.625770568847656, - "learning_rate": 1.916444444444445e-06, - "loss": 1.5811, - "step": 57110 - }, - { - "epoch": 1.8278400000000001, - "grad_norm": 47.16094970703125, - "learning_rate": 1.912888888888889e-06, - "loss": 1.5732, - "step": 57120 - }, - { - "epoch": 1.82816, - "grad_norm": 46.85769271850586, - "learning_rate": 1.9093333333333335e-06, - "loss": 1.5915, - "step": 57130 - }, - { - "epoch": 1.8284799999999999, - "grad_norm": 46.92313766479492, - "learning_rate": 1.905777777777778e-06, - "loss": 1.5702, - "step": 57140 - }, - { - "epoch": 1.8288, - "grad_norm": 46.07356643676758, - "learning_rate": 1.9022222222222222e-06, - "loss": 1.6097, - "step": 57150 - }, - { - "epoch": 1.82912, - "grad_norm": 46.94747543334961, - "learning_rate": 1.8986666666666667e-06, - "loss": 1.588, - "step": 57160 - }, - { - "epoch": 1.82944, - "grad_norm": 46.5937614440918, - "learning_rate": 1.8951111111111113e-06, - "loss": 1.6113, - "step": 57170 - }, - { - "epoch": 1.82976, - "grad_norm": 47.07693862915039, - "learning_rate": 1.8915555555555556e-06, - "loss": 1.5907, - "step": 57180 - }, - { - "epoch": 1.8300800000000002, - "grad_norm": 46.30846405029297, - "learning_rate": 1.8880000000000002e-06, - "loss": 1.591, - "step": 57190 - }, - { - "epoch": 1.8304, - "grad_norm": 48.294960021972656, - "learning_rate": 1.8844444444444445e-06, - "loss": 1.5888, - "step": 57200 - }, - { - "epoch": 1.83072, - "grad_norm": 45.11846160888672, - "learning_rate": 1.880888888888889e-06, - "loss": 1.579, - "step": 57210 - }, - { - "epoch": 1.83104, - "grad_norm": 44.790489196777344, - "learning_rate": 1.8773333333333334e-06, - "loss": 1.591, - "step": 57220 - }, - { - "epoch": 1.83136, - "grad_norm": 45.15334701538086, - "learning_rate": 1.873777777777778e-06, - "loss": 1.61, - "step": 57230 - }, - { - "epoch": 1.83168, - "grad_norm": 46.32352828979492, - "learning_rate": 1.8702222222222225e-06, - "loss": 1.61, - "step": 57240 - }, - { - "epoch": 1.8319999999999999, - "grad_norm": 46.18299865722656, - "learning_rate": 1.8666666666666669e-06, - "loss": 1.5517, - "step": 57250 - }, - { - "epoch": 1.83232, - "grad_norm": 44.044254302978516, - "learning_rate": 1.8631111111111114e-06, - "loss": 1.5758, - "step": 57260 - }, - { - "epoch": 1.83264, - "grad_norm": 46.72759246826172, - "learning_rate": 1.8595555555555558e-06, - "loss": 1.6195, - "step": 57270 - }, - { - "epoch": 1.83296, - "grad_norm": 45.010780334472656, - "learning_rate": 1.856e-06, - "loss": 1.5719, - "step": 57280 - }, - { - "epoch": 1.83328, - "grad_norm": 44.996883392333984, - "learning_rate": 1.8524444444444444e-06, - "loss": 1.5723, - "step": 57290 - }, - { - "epoch": 1.8336000000000001, - "grad_norm": 46.22783660888672, - "learning_rate": 1.848888888888889e-06, - "loss": 1.6152, - "step": 57300 - }, - { - "epoch": 1.83392, - "grad_norm": 45.228973388671875, - "learning_rate": 1.8453333333333333e-06, - "loss": 1.6088, - "step": 57310 - }, - { - "epoch": 1.8342399999999999, - "grad_norm": 46.14060974121094, - "learning_rate": 1.8417777777777779e-06, - "loss": 1.5891, - "step": 57320 - }, - { - "epoch": 1.83456, - "grad_norm": 46.00922775268555, - "learning_rate": 1.8382222222222224e-06, - "loss": 1.5811, - "step": 57330 - }, - { - "epoch": 1.83488, - "grad_norm": 45.049034118652344, - "learning_rate": 1.8346666666666668e-06, - "loss": 1.5736, - "step": 57340 - }, - { - "epoch": 1.8352, - "grad_norm": 45.5625, - "learning_rate": 1.8311111111111113e-06, - "loss": 1.5981, - "step": 57350 - }, - { - "epoch": 1.83552, - "grad_norm": 46.143245697021484, - "learning_rate": 1.8275555555555557e-06, - "loss": 1.59, - "step": 57360 - }, - { - "epoch": 1.8358400000000001, - "grad_norm": 44.78925323486328, - "learning_rate": 1.8240000000000002e-06, - "loss": 1.5689, - "step": 57370 - }, - { - "epoch": 1.83616, - "grad_norm": 46.51466369628906, - "learning_rate": 1.8204444444444446e-06, - "loss": 1.5868, - "step": 57380 - }, - { - "epoch": 1.83648, - "grad_norm": 46.2537956237793, - "learning_rate": 1.8168888888888891e-06, - "loss": 1.5799, - "step": 57390 - }, - { - "epoch": 1.8368, - "grad_norm": 47.15256881713867, - "learning_rate": 1.8133333333333337e-06, - "loss": 1.5817, - "step": 57400 - }, - { - "epoch": 1.83712, - "grad_norm": 43.75941467285156, - "learning_rate": 1.8097777777777778e-06, - "loss": 1.5915, - "step": 57410 - }, - { - "epoch": 1.83744, - "grad_norm": 45.291229248046875, - "learning_rate": 1.8062222222222222e-06, - "loss": 1.584, - "step": 57420 - }, - { - "epoch": 1.8377599999999998, - "grad_norm": 46.50223159790039, - "learning_rate": 1.8026666666666667e-06, - "loss": 1.6023, - "step": 57430 - }, - { - "epoch": 1.8380800000000002, - "grad_norm": 45.744083404541016, - "learning_rate": 1.7991111111111113e-06, - "loss": 1.5789, - "step": 57440 - }, - { - "epoch": 1.8384, - "grad_norm": 46.86274337768555, - "learning_rate": 1.7955555555555556e-06, - "loss": 1.5859, - "step": 57450 - }, - { - "epoch": 1.83872, - "grad_norm": 44.3387336730957, - "learning_rate": 1.7920000000000002e-06, - "loss": 1.5576, - "step": 57460 - }, - { - "epoch": 1.83904, - "grad_norm": 48.54364013671875, - "learning_rate": 1.7884444444444445e-06, - "loss": 1.5805, - "step": 57470 - }, - { - "epoch": 1.83936, - "grad_norm": 45.73419952392578, - "learning_rate": 1.784888888888889e-06, - "loss": 1.5986, - "step": 57480 - }, - { - "epoch": 1.83968, - "grad_norm": 46.14574432373047, - "learning_rate": 1.7813333333333336e-06, - "loss": 1.5944, - "step": 57490 - }, - { - "epoch": 1.8399999999999999, - "grad_norm": 45.9505615234375, - "learning_rate": 1.777777777777778e-06, - "loss": 1.5767, - "step": 57500 - }, - { - "epoch": 1.84032, - "grad_norm": 47.18000411987305, - "learning_rate": 1.7742222222222225e-06, - "loss": 1.5771, - "step": 57510 - }, - { - "epoch": 1.84064, - "grad_norm": 47.036415100097656, - "learning_rate": 1.7706666666666669e-06, - "loss": 1.5874, - "step": 57520 - }, - { - "epoch": 1.84096, - "grad_norm": 45.88491439819336, - "learning_rate": 1.7671111111111114e-06, - "loss": 1.5872, - "step": 57530 - }, - { - "epoch": 1.84128, - "grad_norm": 45.427555084228516, - "learning_rate": 1.7635555555555555e-06, - "loss": 1.5916, - "step": 57540 - }, - { - "epoch": 1.8416000000000001, - "grad_norm": 47.513031005859375, - "learning_rate": 1.76e-06, - "loss": 1.58, - "step": 57550 - }, - { - "epoch": 1.84192, - "grad_norm": 46.50860595703125, - "learning_rate": 1.7564444444444444e-06, - "loss": 1.6057, - "step": 57560 - }, - { - "epoch": 1.8422399999999999, - "grad_norm": 44.6576042175293, - "learning_rate": 1.752888888888889e-06, - "loss": 1.577, - "step": 57570 - }, - { - "epoch": 1.84256, - "grad_norm": 45.63453674316406, - "learning_rate": 1.7493333333333335e-06, - "loss": 1.6053, - "step": 57580 - }, - { - "epoch": 1.84288, - "grad_norm": 45.769283294677734, - "learning_rate": 1.7457777777777779e-06, - "loss": 1.5621, - "step": 57590 - }, - { - "epoch": 1.8432, - "grad_norm": 44.64745330810547, - "learning_rate": 1.7422222222222224e-06, - "loss": 1.595, - "step": 57600 - }, - { - "epoch": 1.84352, - "grad_norm": 46.16799545288086, - "learning_rate": 1.7386666666666668e-06, - "loss": 1.5653, - "step": 57610 - }, - { - "epoch": 1.8438400000000001, - "grad_norm": 46.11171340942383, - "learning_rate": 1.7351111111111113e-06, - "loss": 1.5946, - "step": 57620 - }, - { - "epoch": 1.84416, - "grad_norm": 46.63682174682617, - "learning_rate": 1.7315555555555557e-06, - "loss": 1.622, - "step": 57630 - }, - { - "epoch": 1.84448, - "grad_norm": 45.37915802001953, - "learning_rate": 1.7280000000000002e-06, - "loss": 1.5712, - "step": 57640 - }, - { - "epoch": 1.8448, - "grad_norm": 42.86368179321289, - "learning_rate": 1.7244444444444448e-06, - "loss": 1.5723, - "step": 57650 - }, - { - "epoch": 1.84512, - "grad_norm": 45.46616744995117, - "learning_rate": 1.7208888888888891e-06, - "loss": 1.5876, - "step": 57660 - }, - { - "epoch": 1.84544, - "grad_norm": 44.86207962036133, - "learning_rate": 1.7173333333333333e-06, - "loss": 1.5989, - "step": 57670 - }, - { - "epoch": 1.8457599999999998, - "grad_norm": 44.445980072021484, - "learning_rate": 1.7137777777777778e-06, - "loss": 1.589, - "step": 57680 - }, - { - "epoch": 1.8460800000000002, - "grad_norm": 46.44892501831055, - "learning_rate": 1.7102222222222224e-06, - "loss": 1.5794, - "step": 57690 - }, - { - "epoch": 1.8464, - "grad_norm": 45.75631332397461, - "learning_rate": 1.7066666666666667e-06, - "loss": 1.5837, - "step": 57700 - }, - { - "epoch": 1.84672, - "grad_norm": 45.64183044433594, - "learning_rate": 1.7031111111111113e-06, - "loss": 1.5808, - "step": 57710 - }, - { - "epoch": 1.84704, - "grad_norm": 45.0387077331543, - "learning_rate": 1.6995555555555556e-06, - "loss": 1.6077, - "step": 57720 - }, - { - "epoch": 1.8473600000000001, - "grad_norm": 48.08765411376953, - "learning_rate": 1.6960000000000002e-06, - "loss": 1.5649, - "step": 57730 - }, - { - "epoch": 1.84768, - "grad_norm": 47.8968505859375, - "learning_rate": 1.6924444444444447e-06, - "loss": 1.6016, - "step": 57740 - }, - { - "epoch": 1.8479999999999999, - "grad_norm": 43.858253479003906, - "learning_rate": 1.688888888888889e-06, - "loss": 1.5971, - "step": 57750 - }, - { - "epoch": 1.84832, - "grad_norm": 46.54716491699219, - "learning_rate": 1.6853333333333336e-06, - "loss": 1.6025, - "step": 57760 - }, - { - "epoch": 1.84864, - "grad_norm": 46.3195915222168, - "learning_rate": 1.681777777777778e-06, - "loss": 1.5747, - "step": 57770 - }, - { - "epoch": 1.84896, - "grad_norm": 46.96797561645508, - "learning_rate": 1.6782222222222225e-06, - "loss": 1.5735, - "step": 57780 - }, - { - "epoch": 1.84928, - "grad_norm": 46.94729995727539, - "learning_rate": 1.6746666666666668e-06, - "loss": 1.5827, - "step": 57790 - }, - { - "epoch": 1.8496000000000001, - "grad_norm": 45.07993698120117, - "learning_rate": 1.6711111111111112e-06, - "loss": 1.5997, - "step": 57800 - }, - { - "epoch": 1.84992, - "grad_norm": 44.66395950317383, - "learning_rate": 1.6675555555555555e-06, - "loss": 1.6015, - "step": 57810 - }, - { - "epoch": 1.8502399999999999, - "grad_norm": 45.70362091064453, - "learning_rate": 1.664e-06, - "loss": 1.5976, - "step": 57820 - }, - { - "epoch": 1.85056, - "grad_norm": 43.54787063598633, - "learning_rate": 1.6604444444444446e-06, - "loss": 1.5739, - "step": 57830 - }, - { - "epoch": 1.85088, - "grad_norm": 46.17134475708008, - "learning_rate": 1.656888888888889e-06, - "loss": 1.572, - "step": 57840 - }, - { - "epoch": 1.8512, - "grad_norm": 45.06362533569336, - "learning_rate": 1.6533333333333335e-06, - "loss": 1.5897, - "step": 57850 - }, - { - "epoch": 1.85152, - "grad_norm": 45.86150360107422, - "learning_rate": 1.6497777777777779e-06, - "loss": 1.5891, - "step": 57860 - }, - { - "epoch": 1.8518400000000002, - "grad_norm": 46.10201644897461, - "learning_rate": 1.6462222222222224e-06, - "loss": 1.5785, - "step": 57870 - }, - { - "epoch": 1.85216, - "grad_norm": 45.461830139160156, - "learning_rate": 1.6426666666666668e-06, - "loss": 1.5795, - "step": 57880 - }, - { - "epoch": 1.85248, - "grad_norm": 44.85392761230469, - "learning_rate": 1.6391111111111113e-06, - "loss": 1.619, - "step": 57890 - }, - { - "epoch": 1.8528, - "grad_norm": 49.077579498291016, - "learning_rate": 1.6355555555555559e-06, - "loss": 1.5659, - "step": 57900 - }, - { - "epoch": 1.85312, - "grad_norm": 47.31373596191406, - "learning_rate": 1.6320000000000002e-06, - "loss": 1.5953, - "step": 57910 - }, - { - "epoch": 1.85344, - "grad_norm": 45.67685317993164, - "learning_rate": 1.6284444444444448e-06, - "loss": 1.5822, - "step": 57920 - }, - { - "epoch": 1.8537599999999999, - "grad_norm": 45.24131393432617, - "learning_rate": 1.624888888888889e-06, - "loss": 1.5587, - "step": 57930 - }, - { - "epoch": 1.85408, - "grad_norm": 44.38886642456055, - "learning_rate": 1.6213333333333335e-06, - "loss": 1.5701, - "step": 57940 - }, - { - "epoch": 1.8544, - "grad_norm": 48.03721237182617, - "learning_rate": 1.6177777777777778e-06, - "loss": 1.5777, - "step": 57950 - }, - { - "epoch": 1.85472, - "grad_norm": 47.05076599121094, - "learning_rate": 1.6142222222222223e-06, - "loss": 1.569, - "step": 57960 - }, - { - "epoch": 1.85504, - "grad_norm": 45.041385650634766, - "learning_rate": 1.6106666666666667e-06, - "loss": 1.5948, - "step": 57970 - }, - { - "epoch": 1.8553600000000001, - "grad_norm": 46.04569625854492, - "learning_rate": 1.6071111111111112e-06, - "loss": 1.5724, - "step": 57980 - }, - { - "epoch": 1.85568, - "grad_norm": 46.36091995239258, - "learning_rate": 1.6035555555555558e-06, - "loss": 1.5818, - "step": 57990 - }, - { - "epoch": 1.8559999999999999, - "grad_norm": 45.36357116699219, - "learning_rate": 1.6000000000000001e-06, - "loss": 1.5672, - "step": 58000 - }, - { - "epoch": 1.85632, - "grad_norm": 45.477821350097656, - "learning_rate": 1.5964444444444447e-06, - "loss": 1.5666, - "step": 58010 - }, - { - "epoch": 1.85664, - "grad_norm": 47.501224517822266, - "learning_rate": 1.592888888888889e-06, - "loss": 1.5782, - "step": 58020 - }, - { - "epoch": 1.85696, - "grad_norm": 46.90428161621094, - "learning_rate": 1.5893333333333336e-06, - "loss": 1.5975, - "step": 58030 - }, - { - "epoch": 1.85728, - "grad_norm": 47.66033935546875, - "learning_rate": 1.585777777777778e-06, - "loss": 1.5813, - "step": 58040 - }, - { - "epoch": 1.8576000000000001, - "grad_norm": 48.22922134399414, - "learning_rate": 1.5822222222222223e-06, - "loss": 1.6164, - "step": 58050 - }, - { - "epoch": 1.85792, - "grad_norm": 46.36241149902344, - "learning_rate": 1.5786666666666666e-06, - "loss": 1.6169, - "step": 58060 - }, - { - "epoch": 1.85824, - "grad_norm": 46.98133850097656, - "learning_rate": 1.5751111111111112e-06, - "loss": 1.5872, - "step": 58070 - }, - { - "epoch": 1.85856, - "grad_norm": 46.50725555419922, - "learning_rate": 1.5715555555555555e-06, - "loss": 1.5744, - "step": 58080 - }, - { - "epoch": 1.85888, - "grad_norm": 46.72871398925781, - "learning_rate": 1.568e-06, - "loss": 1.5727, - "step": 58090 - }, - { - "epoch": 1.8592, - "grad_norm": 44.92698287963867, - "learning_rate": 1.5644444444444446e-06, - "loss": 1.5775, - "step": 58100 - }, - { - "epoch": 1.8595199999999998, - "grad_norm": 44.662654876708984, - "learning_rate": 1.560888888888889e-06, - "loss": 1.5792, - "step": 58110 - }, - { - "epoch": 1.8598400000000002, - "grad_norm": 46.40205764770508, - "learning_rate": 1.5573333333333335e-06, - "loss": 1.5961, - "step": 58120 - }, - { - "epoch": 1.86016, - "grad_norm": 46.643638610839844, - "learning_rate": 1.5537777777777779e-06, - "loss": 1.5731, - "step": 58130 - }, - { - "epoch": 1.86048, - "grad_norm": 45.57893753051758, - "learning_rate": 1.5502222222222224e-06, - "loss": 1.5809, - "step": 58140 - }, - { - "epoch": 1.8608, - "grad_norm": 47.35958480834961, - "learning_rate": 1.546666666666667e-06, - "loss": 1.6098, - "step": 58150 - }, - { - "epoch": 1.86112, - "grad_norm": 46.90447235107422, - "learning_rate": 1.5431111111111113e-06, - "loss": 1.5867, - "step": 58160 - }, - { - "epoch": 1.86144, - "grad_norm": 46.82596206665039, - "learning_rate": 1.5395555555555559e-06, - "loss": 1.595, - "step": 58170 - }, - { - "epoch": 1.8617599999999999, - "grad_norm": 45.697059631347656, - "learning_rate": 1.536e-06, - "loss": 1.6127, - "step": 58180 - }, - { - "epoch": 1.86208, - "grad_norm": 44.96653747558594, - "learning_rate": 1.5324444444444445e-06, - "loss": 1.5948, - "step": 58190 - }, - { - "epoch": 1.8624, - "grad_norm": 46.40520095825195, - "learning_rate": 1.5288888888888889e-06, - "loss": 1.5979, - "step": 58200 - }, - { - "epoch": 1.86272, - "grad_norm": 46.27067184448242, - "learning_rate": 1.5253333333333334e-06, - "loss": 1.5443, - "step": 58210 - }, - { - "epoch": 1.86304, - "grad_norm": 46.467071533203125, - "learning_rate": 1.5217777777777778e-06, - "loss": 1.6093, - "step": 58220 - }, - { - "epoch": 1.8633600000000001, - "grad_norm": 47.1767463684082, - "learning_rate": 1.5182222222222223e-06, - "loss": 1.588, - "step": 58230 - }, - { - "epoch": 1.86368, - "grad_norm": 46.3292121887207, - "learning_rate": 1.5146666666666669e-06, - "loss": 1.5683, - "step": 58240 - }, - { - "epoch": 1.8639999999999999, - "grad_norm": 45.56246566772461, - "learning_rate": 1.5111111111111112e-06, - "loss": 1.5762, - "step": 58250 - }, - { - "epoch": 1.86432, - "grad_norm": 45.426937103271484, - "learning_rate": 1.5075555555555558e-06, - "loss": 1.5917, - "step": 58260 - }, - { - "epoch": 1.86464, - "grad_norm": 48.11009979248047, - "learning_rate": 1.5040000000000001e-06, - "loss": 1.5851, - "step": 58270 - }, - { - "epoch": 1.86496, - "grad_norm": 45.19404220581055, - "learning_rate": 1.5004444444444447e-06, - "loss": 1.5762, - "step": 58280 - }, - { - "epoch": 1.86528, - "grad_norm": 45.58405303955078, - "learning_rate": 1.496888888888889e-06, - "loss": 1.5829, - "step": 58290 - }, - { - "epoch": 1.8656000000000001, - "grad_norm": 46.4255485534668, - "learning_rate": 1.4933333333333336e-06, - "loss": 1.5929, - "step": 58300 - }, - { - "epoch": 1.86592, - "grad_norm": 49.003501892089844, - "learning_rate": 1.4897777777777777e-06, - "loss": 1.5872, - "step": 58310 - }, - { - "epoch": 1.86624, - "grad_norm": 45.09196853637695, - "learning_rate": 1.4862222222222223e-06, - "loss": 1.5773, - "step": 58320 - }, - { - "epoch": 1.86656, - "grad_norm": 45.00556945800781, - "learning_rate": 1.4826666666666666e-06, - "loss": 1.5949, - "step": 58330 - }, - { - "epoch": 1.86688, - "grad_norm": 47.26096725463867, - "learning_rate": 1.4791111111111112e-06, - "loss": 1.6113, - "step": 58340 - }, - { - "epoch": 1.8672, - "grad_norm": 45.97952651977539, - "learning_rate": 1.4755555555555557e-06, - "loss": 1.572, - "step": 58350 - }, - { - "epoch": 1.8675199999999998, - "grad_norm": 44.738224029541016, - "learning_rate": 1.472e-06, - "loss": 1.5745, - "step": 58360 - }, - { - "epoch": 1.86784, - "grad_norm": 43.97645950317383, - "learning_rate": 1.4684444444444446e-06, - "loss": 1.6096, - "step": 58370 - }, - { - "epoch": 1.86816, - "grad_norm": 47.645835876464844, - "learning_rate": 1.464888888888889e-06, - "loss": 1.5828, - "step": 58380 - }, - { - "epoch": 1.86848, - "grad_norm": 46.627506256103516, - "learning_rate": 1.4613333333333335e-06, - "loss": 1.5731, - "step": 58390 - }, - { - "epoch": 1.8688, - "grad_norm": 48.27983474731445, - "learning_rate": 1.457777777777778e-06, - "loss": 1.5859, - "step": 58400 - }, - { - "epoch": 1.8691200000000001, - "grad_norm": 45.809268951416016, - "learning_rate": 1.4542222222222224e-06, - "loss": 1.5843, - "step": 58410 - }, - { - "epoch": 1.86944, - "grad_norm": 46.93641662597656, - "learning_rate": 1.450666666666667e-06, - "loss": 1.5581, - "step": 58420 - }, - { - "epoch": 1.8697599999999999, - "grad_norm": 45.70205307006836, - "learning_rate": 1.4471111111111113e-06, - "loss": 1.5773, - "step": 58430 - }, - { - "epoch": 1.87008, - "grad_norm": 47.058006286621094, - "learning_rate": 1.4435555555555556e-06, - "loss": 1.5623, - "step": 58440 - }, - { - "epoch": 1.8704, - "grad_norm": 43.49956512451172, - "learning_rate": 1.44e-06, - "loss": 1.5601, - "step": 58450 - }, - { - "epoch": 1.87072, - "grad_norm": 47.000003814697266, - "learning_rate": 1.4364444444444445e-06, - "loss": 1.565, - "step": 58460 - }, - { - "epoch": 1.87104, - "grad_norm": 43.984580993652344, - "learning_rate": 1.4328888888888889e-06, - "loss": 1.5795, - "step": 58470 - }, - { - "epoch": 1.8713600000000001, - "grad_norm": 44.211822509765625, - "learning_rate": 1.4293333333333334e-06, - "loss": 1.5957, - "step": 58480 - }, - { - "epoch": 1.87168, - "grad_norm": 47.10588073730469, - "learning_rate": 1.4257777777777778e-06, - "loss": 1.5852, - "step": 58490 - }, - { - "epoch": 1.8719999999999999, - "grad_norm": 45.84103775024414, - "learning_rate": 1.4222222222222223e-06, - "loss": 1.5888, - "step": 58500 - }, - { - "epoch": 1.87232, - "grad_norm": 47.888851165771484, - "learning_rate": 1.4186666666666669e-06, - "loss": 1.6286, - "step": 58510 - }, - { - "epoch": 1.87264, - "grad_norm": 46.03474807739258, - "learning_rate": 1.4151111111111112e-06, - "loss": 1.6226, - "step": 58520 - }, - { - "epoch": 1.87296, - "grad_norm": 45.63921356201172, - "learning_rate": 1.4115555555555558e-06, - "loss": 1.5904, - "step": 58530 - }, - { - "epoch": 1.8732799999999998, - "grad_norm": 46.81883239746094, - "learning_rate": 1.4080000000000001e-06, - "loss": 1.5945, - "step": 58540 - }, - { - "epoch": 1.8736000000000002, - "grad_norm": 45.993690490722656, - "learning_rate": 1.4044444444444447e-06, - "loss": 1.5804, - "step": 58550 - }, - { - "epoch": 1.87392, - "grad_norm": 47.72322082519531, - "learning_rate": 1.4008888888888892e-06, - "loss": 1.5495, - "step": 58560 - }, - { - "epoch": 1.87424, - "grad_norm": 46.83625411987305, - "learning_rate": 1.3973333333333334e-06, - "loss": 1.6094, - "step": 58570 - }, - { - "epoch": 1.87456, - "grad_norm": 46.28458786010742, - "learning_rate": 1.3937777777777777e-06, - "loss": 1.6091, - "step": 58580 - }, - { - "epoch": 1.87488, - "grad_norm": 45.972381591796875, - "learning_rate": 1.3902222222222222e-06, - "loss": 1.6046, - "step": 58590 - }, - { - "epoch": 1.8752, - "grad_norm": 44.876861572265625, - "learning_rate": 1.3866666666666668e-06, - "loss": 1.5909, - "step": 58600 - }, - { - "epoch": 1.8755199999999999, - "grad_norm": 45.53847122192383, - "learning_rate": 1.3831111111111111e-06, - "loss": 1.5822, - "step": 58610 - }, - { - "epoch": 1.87584, - "grad_norm": 45.47560501098633, - "learning_rate": 1.3795555555555557e-06, - "loss": 1.5897, - "step": 58620 - }, - { - "epoch": 1.87616, - "grad_norm": 46.871673583984375, - "learning_rate": 1.376e-06, - "loss": 1.6033, - "step": 58630 - }, - { - "epoch": 1.87648, - "grad_norm": 45.24031448364258, - "learning_rate": 1.3724444444444446e-06, - "loss": 1.6099, - "step": 58640 - }, - { - "epoch": 1.8768, - "grad_norm": 44.30140686035156, - "learning_rate": 1.3688888888888891e-06, - "loss": 1.6058, - "step": 58650 - }, - { - "epoch": 1.8771200000000001, - "grad_norm": 47.0979118347168, - "learning_rate": 1.3653333333333335e-06, - "loss": 1.6106, - "step": 58660 - }, - { - "epoch": 1.87744, - "grad_norm": 50.710105895996094, - "learning_rate": 1.361777777777778e-06, - "loss": 1.5798, - "step": 58670 - }, - { - "epoch": 1.8777599999999999, - "grad_norm": 47.16532897949219, - "learning_rate": 1.3582222222222224e-06, - "loss": 1.5975, - "step": 58680 - }, - { - "epoch": 1.87808, - "grad_norm": 47.154144287109375, - "learning_rate": 1.354666666666667e-06, - "loss": 1.6065, - "step": 58690 - }, - { - "epoch": 1.8784, - "grad_norm": 44.24601745605469, - "learning_rate": 1.351111111111111e-06, - "loss": 1.5771, - "step": 58700 - }, - { - "epoch": 1.87872, - "grad_norm": 45.991661071777344, - "learning_rate": 1.3475555555555556e-06, - "loss": 1.5962, - "step": 58710 - }, - { - "epoch": 1.87904, - "grad_norm": 45.59969711303711, - "learning_rate": 1.344e-06, - "loss": 1.6073, - "step": 58720 - }, - { - "epoch": 1.8793600000000001, - "grad_norm": 46.94809341430664, - "learning_rate": 1.3404444444444445e-06, - "loss": 1.6476, - "step": 58730 - }, - { - "epoch": 1.87968, - "grad_norm": 45.00490951538086, - "learning_rate": 1.3368888888888889e-06, - "loss": 1.5637, - "step": 58740 - }, - { - "epoch": 1.88, - "grad_norm": 46.691856384277344, - "learning_rate": 1.3333333333333334e-06, - "loss": 1.5814, - "step": 58750 - }, - { - "epoch": 1.88032, - "grad_norm": 45.54383850097656, - "learning_rate": 1.329777777777778e-06, - "loss": 1.5851, - "step": 58760 - }, - { - "epoch": 1.88064, - "grad_norm": 47.79774856567383, - "learning_rate": 1.3262222222222223e-06, - "loss": 1.582, - "step": 58770 - }, - { - "epoch": 1.88096, - "grad_norm": 44.72974395751953, - "learning_rate": 1.3226666666666669e-06, - "loss": 1.5903, - "step": 58780 - }, - { - "epoch": 1.8812799999999998, - "grad_norm": 46.53852081298828, - "learning_rate": 1.3191111111111112e-06, - "loss": 1.5552, - "step": 58790 - }, - { - "epoch": 1.8816000000000002, - "grad_norm": 47.67750930786133, - "learning_rate": 1.3155555555555558e-06, - "loss": 1.5877, - "step": 58800 - }, - { - "epoch": 1.88192, - "grad_norm": 46.062530517578125, - "learning_rate": 1.3120000000000003e-06, - "loss": 1.6022, - "step": 58810 - }, - { - "epoch": 1.88224, - "grad_norm": 44.168548583984375, - "learning_rate": 1.3084444444444447e-06, - "loss": 1.5655, - "step": 58820 - }, - { - "epoch": 1.88256, - "grad_norm": 47.523563385009766, - "learning_rate": 1.3048888888888888e-06, - "loss": 1.6147, - "step": 58830 - }, - { - "epoch": 1.88288, - "grad_norm": 47.97063446044922, - "learning_rate": 1.3013333333333333e-06, - "loss": 1.5899, - "step": 58840 - }, - { - "epoch": 1.8832, - "grad_norm": 45.672088623046875, - "learning_rate": 1.2977777777777779e-06, - "loss": 1.5966, - "step": 58850 - }, - { - "epoch": 1.8835199999999999, - "grad_norm": 47.125675201416016, - "learning_rate": 1.2942222222222222e-06, - "loss": 1.5992, - "step": 58860 - }, - { - "epoch": 1.88384, - "grad_norm": 46.30353927612305, - "learning_rate": 1.2906666666666668e-06, - "loss": 1.5933, - "step": 58870 - }, - { - "epoch": 1.88416, - "grad_norm": 45.49856185913086, - "learning_rate": 1.2871111111111111e-06, - "loss": 1.5756, - "step": 58880 - }, - { - "epoch": 1.88448, - "grad_norm": 44.48989486694336, - "learning_rate": 1.2835555555555557e-06, - "loss": 1.5633, - "step": 58890 - }, - { - "epoch": 1.8848, - "grad_norm": 45.93333435058594, - "learning_rate": 1.28e-06, - "loss": 1.5816, - "step": 58900 - }, - { - "epoch": 1.8851200000000001, - "grad_norm": 45.116729736328125, - "learning_rate": 1.2764444444444446e-06, - "loss": 1.6162, - "step": 58910 - }, - { - "epoch": 1.88544, - "grad_norm": 44.85403823852539, - "learning_rate": 1.2728888888888891e-06, - "loss": 1.5737, - "step": 58920 - }, - { - "epoch": 1.8857599999999999, - "grad_norm": 45.997314453125, - "learning_rate": 1.2693333333333335e-06, - "loss": 1.5953, - "step": 58930 - }, - { - "epoch": 1.88608, - "grad_norm": 45.64460372924805, - "learning_rate": 1.265777777777778e-06, - "loss": 1.593, - "step": 58940 - }, - { - "epoch": 1.8864, - "grad_norm": 51.03400421142578, - "learning_rate": 1.2622222222222224e-06, - "loss": 1.5836, - "step": 58950 - }, - { - "epoch": 1.88672, - "grad_norm": 45.448829650878906, - "learning_rate": 1.2586666666666667e-06, - "loss": 1.5865, - "step": 58960 - }, - { - "epoch": 1.88704, - "grad_norm": 43.558738708496094, - "learning_rate": 1.255111111111111e-06, - "loss": 1.5797, - "step": 58970 - }, - { - "epoch": 1.8873600000000001, - "grad_norm": 46.4416389465332, - "learning_rate": 1.2515555555555556e-06, - "loss": 1.6047, - "step": 58980 - }, - { - "epoch": 1.88768, - "grad_norm": 43.49354553222656, - "learning_rate": 1.248e-06, - "loss": 1.5868, - "step": 58990 - }, - { - "epoch": 1.888, - "grad_norm": 46.484886169433594, - "learning_rate": 1.2444444444444445e-06, - "loss": 1.6069, - "step": 59000 - }, - { - "epoch": 1.88832, - "grad_norm": 46.54124069213867, - "learning_rate": 1.240888888888889e-06, - "loss": 1.6042, - "step": 59010 - }, - { - "epoch": 1.88864, - "grad_norm": 44.74590301513672, - "learning_rate": 1.2373333333333334e-06, - "loss": 1.6299, - "step": 59020 - }, - { - "epoch": 1.88896, - "grad_norm": 47.56993865966797, - "learning_rate": 1.233777777777778e-06, - "loss": 1.6019, - "step": 59030 - }, - { - "epoch": 1.8892799999999998, - "grad_norm": 47.559814453125, - "learning_rate": 1.2302222222222223e-06, - "loss": 1.5463, - "step": 59040 - }, - { - "epoch": 1.8896, - "grad_norm": 46.389854431152344, - "learning_rate": 1.2266666666666666e-06, - "loss": 1.5817, - "step": 59050 - }, - { - "epoch": 1.88992, - "grad_norm": 46.8560791015625, - "learning_rate": 1.2231111111111112e-06, - "loss": 1.5512, - "step": 59060 - }, - { - "epoch": 1.89024, - "grad_norm": 48.501808166503906, - "learning_rate": 1.2195555555555555e-06, - "loss": 1.6029, - "step": 59070 - }, - { - "epoch": 1.89056, - "grad_norm": 44.118343353271484, - "learning_rate": 1.216e-06, - "loss": 1.576, - "step": 59080 - }, - { - "epoch": 1.8908800000000001, - "grad_norm": 45.0555305480957, - "learning_rate": 1.2124444444444446e-06, - "loss": 1.5718, - "step": 59090 - }, - { - "epoch": 1.8912, - "grad_norm": 46.08884048461914, - "learning_rate": 1.208888888888889e-06, - "loss": 1.5912, - "step": 59100 - }, - { - "epoch": 1.8915199999999999, - "grad_norm": 45.68895721435547, - "learning_rate": 1.2053333333333335e-06, - "loss": 1.5773, - "step": 59110 - }, - { - "epoch": 1.89184, - "grad_norm": 47.57466506958008, - "learning_rate": 1.2017777777777779e-06, - "loss": 1.5952, - "step": 59120 - }, - { - "epoch": 1.89216, - "grad_norm": 46.38992691040039, - "learning_rate": 1.1982222222222222e-06, - "loss": 1.5856, - "step": 59130 - }, - { - "epoch": 1.89248, - "grad_norm": 45.40290069580078, - "learning_rate": 1.1946666666666668e-06, - "loss": 1.5807, - "step": 59140 - }, - { - "epoch": 1.8928, - "grad_norm": 44.96638107299805, - "learning_rate": 1.1911111111111111e-06, - "loss": 1.567, - "step": 59150 - }, - { - "epoch": 1.8931200000000001, - "grad_norm": 45.714420318603516, - "learning_rate": 1.1875555555555557e-06, - "loss": 1.5797, - "step": 59160 - }, - { - "epoch": 1.89344, - "grad_norm": 44.78868865966797, - "learning_rate": 1.1840000000000002e-06, - "loss": 1.5492, - "step": 59170 - }, - { - "epoch": 1.8937599999999999, - "grad_norm": 48.12905502319336, - "learning_rate": 1.1804444444444446e-06, - "loss": 1.568, - "step": 59180 - }, - { - "epoch": 1.89408, - "grad_norm": 46.8428955078125, - "learning_rate": 1.176888888888889e-06, - "loss": 1.5864, - "step": 59190 - }, - { - "epoch": 1.8944, - "grad_norm": 47.230228424072266, - "learning_rate": 1.1733333333333335e-06, - "loss": 1.5794, - "step": 59200 - }, - { - "epoch": 1.89472, - "grad_norm": 46.057979583740234, - "learning_rate": 1.1697777777777778e-06, - "loss": 1.5976, - "step": 59210 - }, - { - "epoch": 1.8950399999999998, - "grad_norm": 49.09218215942383, - "learning_rate": 1.1662222222222224e-06, - "loss": 1.6047, - "step": 59220 - }, - { - "epoch": 1.8953600000000002, - "grad_norm": 45.197288513183594, - "learning_rate": 1.1626666666666667e-06, - "loss": 1.5651, - "step": 59230 - }, - { - "epoch": 1.89568, - "grad_norm": 46.362613677978516, - "learning_rate": 1.1591111111111113e-06, - "loss": 1.6009, - "step": 59240 - }, - { - "epoch": 1.896, - "grad_norm": 45.984867095947266, - "learning_rate": 1.1555555555555556e-06, - "loss": 1.5618, - "step": 59250 - }, - { - "epoch": 1.89632, - "grad_norm": 46.774959564208984, - "learning_rate": 1.1520000000000002e-06, - "loss": 1.5684, - "step": 59260 - }, - { - "epoch": 1.89664, - "grad_norm": 45.798126220703125, - "learning_rate": 1.1484444444444445e-06, - "loss": 1.5963, - "step": 59270 - }, - { - "epoch": 1.89696, - "grad_norm": 46.795806884765625, - "learning_rate": 1.144888888888889e-06, - "loss": 1.564, - "step": 59280 - }, - { - "epoch": 1.8972799999999999, - "grad_norm": 46.9173469543457, - "learning_rate": 1.1413333333333334e-06, - "loss": 1.5965, - "step": 59290 - }, - { - "epoch": 1.8976, - "grad_norm": 45.2878532409668, - "learning_rate": 1.137777777777778e-06, - "loss": 1.5755, - "step": 59300 - }, - { - "epoch": 1.89792, - "grad_norm": 46.399452209472656, - "learning_rate": 1.1342222222222223e-06, - "loss": 1.5575, - "step": 59310 - }, - { - "epoch": 1.89824, - "grad_norm": 45.01802062988281, - "learning_rate": 1.1306666666666666e-06, - "loss": 1.5821, - "step": 59320 - }, - { - "epoch": 1.89856, - "grad_norm": 46.38847351074219, - "learning_rate": 1.1271111111111112e-06, - "loss": 1.587, - "step": 59330 - }, - { - "epoch": 1.8988800000000001, - "grad_norm": 47.17742156982422, - "learning_rate": 1.1235555555555557e-06, - "loss": 1.6044, - "step": 59340 - }, - { - "epoch": 1.8992, - "grad_norm": 47.04226303100586, - "learning_rate": 1.12e-06, - "loss": 1.5855, - "step": 59350 - }, - { - "epoch": 1.8995199999999999, - "grad_norm": 46.88846206665039, - "learning_rate": 1.1164444444444446e-06, - "loss": 1.5676, - "step": 59360 - }, - { - "epoch": 1.89984, - "grad_norm": 44.450767517089844, - "learning_rate": 1.112888888888889e-06, - "loss": 1.6056, - "step": 59370 - }, - { - "epoch": 1.90016, - "grad_norm": 43.49285888671875, - "learning_rate": 1.1093333333333333e-06, - "loss": 1.5647, - "step": 59380 - }, - { - "epoch": 1.90048, - "grad_norm": 55.5472526550293, - "learning_rate": 1.1057777777777779e-06, - "loss": 1.5936, - "step": 59390 - }, - { - "epoch": 1.9008, - "grad_norm": 44.852840423583984, - "learning_rate": 1.1022222222222222e-06, - "loss": 1.5658, - "step": 59400 - }, - { - "epoch": 1.9011200000000001, - "grad_norm": 45.111331939697266, - "learning_rate": 1.0986666666666668e-06, - "loss": 1.6017, - "step": 59410 - }, - { - "epoch": 1.90144, - "grad_norm": 45.814456939697266, - "learning_rate": 1.0951111111111113e-06, - "loss": 1.5871, - "step": 59420 - }, - { - "epoch": 1.90176, - "grad_norm": 46.770057678222656, - "learning_rate": 1.0915555555555557e-06, - "loss": 1.5695, - "step": 59430 - }, - { - "epoch": 1.90208, - "grad_norm": 45.28121566772461, - "learning_rate": 1.088e-06, - "loss": 1.59, - "step": 59440 - }, - { - "epoch": 1.9024, - "grad_norm": 46.66707229614258, - "learning_rate": 1.0844444444444446e-06, - "loss": 1.588, - "step": 59450 - }, - { - "epoch": 1.90272, - "grad_norm": 45.3797721862793, - "learning_rate": 1.080888888888889e-06, - "loss": 1.5848, - "step": 59460 - }, - { - "epoch": 1.9030399999999998, - "grad_norm": 45.462615966796875, - "learning_rate": 1.0773333333333335e-06, - "loss": 1.5868, - "step": 59470 - }, - { - "epoch": 1.9033600000000002, - "grad_norm": 46.903961181640625, - "learning_rate": 1.0737777777777778e-06, - "loss": 1.5728, - "step": 59480 - }, - { - "epoch": 1.90368, - "grad_norm": 45.53373336791992, - "learning_rate": 1.0702222222222223e-06, - "loss": 1.5891, - "step": 59490 - }, - { - "epoch": 1.904, - "grad_norm": 46.55686950683594, - "learning_rate": 1.066666666666667e-06, - "loss": 1.6067, - "step": 59500 - }, - { - "epoch": 1.90432, - "grad_norm": 43.63496780395508, - "learning_rate": 1.0631111111111112e-06, - "loss": 1.5753, - "step": 59510 - }, - { - "epoch": 1.90464, - "grad_norm": 45.74709701538086, - "learning_rate": 1.0595555555555556e-06, - "loss": 1.5759, - "step": 59520 - }, - { - "epoch": 1.90496, - "grad_norm": 46.07301330566406, - "learning_rate": 1.0560000000000001e-06, - "loss": 1.5863, - "step": 59530 - }, - { - "epoch": 1.9052799999999999, - "grad_norm": 46.41236877441406, - "learning_rate": 1.0524444444444445e-06, - "loss": 1.61, - "step": 59540 - }, - { - "epoch": 1.9056, - "grad_norm": 47.11589813232422, - "learning_rate": 1.048888888888889e-06, - "loss": 1.579, - "step": 59550 - }, - { - "epoch": 1.90592, - "grad_norm": 46.7946662902832, - "learning_rate": 1.0453333333333334e-06, - "loss": 1.5736, - "step": 59560 - }, - { - "epoch": 1.90624, - "grad_norm": 46.487003326416016, - "learning_rate": 1.0417777777777777e-06, - "loss": 1.6078, - "step": 59570 - }, - { - "epoch": 1.90656, - "grad_norm": 45.817604064941406, - "learning_rate": 1.0382222222222223e-06, - "loss": 1.6001, - "step": 59580 - }, - { - "epoch": 1.9068800000000001, - "grad_norm": 46.2359733581543, - "learning_rate": 1.0346666666666668e-06, - "loss": 1.5903, - "step": 59590 - }, - { - "epoch": 1.9072, - "grad_norm": 46.53152084350586, - "learning_rate": 1.0311111111111112e-06, - "loss": 1.5816, - "step": 59600 - }, - { - "epoch": 1.9075199999999999, - "grad_norm": 45.750003814697266, - "learning_rate": 1.0275555555555557e-06, - "loss": 1.5715, - "step": 59610 - }, - { - "epoch": 1.90784, - "grad_norm": 45.677181243896484, - "learning_rate": 1.024e-06, - "loss": 1.5714, - "step": 59620 - }, - { - "epoch": 1.90816, - "grad_norm": 45.89830780029297, - "learning_rate": 1.0204444444444444e-06, - "loss": 1.5841, - "step": 59630 - }, - { - "epoch": 1.90848, - "grad_norm": 44.421539306640625, - "learning_rate": 1.016888888888889e-06, - "loss": 1.5739, - "step": 59640 - }, - { - "epoch": 1.9088, - "grad_norm": 44.68858337402344, - "learning_rate": 1.0133333333333333e-06, - "loss": 1.5422, - "step": 59650 - }, - { - "epoch": 1.9091200000000002, - "grad_norm": 45.24201202392578, - "learning_rate": 1.0097777777777779e-06, - "loss": 1.5637, - "step": 59660 - }, - { - "epoch": 1.90944, - "grad_norm": 45.254791259765625, - "learning_rate": 1.0062222222222224e-06, - "loss": 1.5583, - "step": 59670 - }, - { - "epoch": 1.90976, - "grad_norm": 46.583251953125, - "learning_rate": 1.0026666666666668e-06, - "loss": 1.5624, - "step": 59680 - }, - { - "epoch": 1.91008, - "grad_norm": 47.10293960571289, - "learning_rate": 9.991111111111113e-07, - "loss": 1.6102, - "step": 59690 - }, - { - "epoch": 1.9104, - "grad_norm": 47.41055679321289, - "learning_rate": 9.955555555555556e-07, - "loss": 1.5775, - "step": 59700 - }, - { - "epoch": 1.91072, - "grad_norm": 47.239410400390625, - "learning_rate": 9.92e-07, - "loss": 1.6053, - "step": 59710 - }, - { - "epoch": 1.9110399999999998, - "grad_norm": 45.12579345703125, - "learning_rate": 9.884444444444445e-07, - "loss": 1.5884, - "step": 59720 - }, - { - "epoch": 1.91136, - "grad_norm": 43.950286865234375, - "learning_rate": 9.848888888888889e-07, - "loss": 1.5695, - "step": 59730 - }, - { - "epoch": 1.91168, - "grad_norm": 45.75297927856445, - "learning_rate": 9.813333333333334e-07, - "loss": 1.5729, - "step": 59740 - }, - { - "epoch": 1.912, - "grad_norm": 46.05004119873047, - "learning_rate": 9.77777777777778e-07, - "loss": 1.5979, - "step": 59750 - }, - { - "epoch": 1.91232, - "grad_norm": 45.577293395996094, - "learning_rate": 9.742222222222223e-07, - "loss": 1.5887, - "step": 59760 - }, - { - "epoch": 1.9126400000000001, - "grad_norm": 47.2728157043457, - "learning_rate": 9.706666666666667e-07, - "loss": 1.5899, - "step": 59770 - }, - { - "epoch": 1.91296, - "grad_norm": 46.47046661376953, - "learning_rate": 9.671111111111112e-07, - "loss": 1.5795, - "step": 59780 - }, - { - "epoch": 1.9132799999999999, - "grad_norm": 47.05642318725586, - "learning_rate": 9.635555555555556e-07, - "loss": 1.5726, - "step": 59790 - }, - { - "epoch": 1.9136, - "grad_norm": 55.471946716308594, - "learning_rate": 9.600000000000001e-07, - "loss": 1.5624, - "step": 59800 - }, - { - "epoch": 1.91392, - "grad_norm": 46.46736145019531, - "learning_rate": 9.564444444444445e-07, - "loss": 1.567, - "step": 59810 - }, - { - "epoch": 1.91424, - "grad_norm": 45.56180953979492, - "learning_rate": 9.52888888888889e-07, - "loss": 1.604, - "step": 59820 - }, - { - "epoch": 1.91456, - "grad_norm": 46.7037353515625, - "learning_rate": 9.493333333333334e-07, - "loss": 1.5803, - "step": 59830 - }, - { - "epoch": 1.9148800000000001, - "grad_norm": 43.98044204711914, - "learning_rate": 9.457777777777778e-07, - "loss": 1.5684, - "step": 59840 - }, - { - "epoch": 1.9152, - "grad_norm": 46.93412780761719, - "learning_rate": 9.422222222222223e-07, - "loss": 1.5731, - "step": 59850 - }, - { - "epoch": 1.91552, - "grad_norm": 48.32170486450195, - "learning_rate": 9.386666666666667e-07, - "loss": 1.6005, - "step": 59860 - }, - { - "epoch": 1.91584, - "grad_norm": 48.01280975341797, - "learning_rate": 9.351111111111113e-07, - "loss": 1.565, - "step": 59870 - }, - { - "epoch": 1.91616, - "grad_norm": 45.45317077636719, - "learning_rate": 9.315555555555557e-07, - "loss": 1.5921, - "step": 59880 - }, - { - "epoch": 1.91648, - "grad_norm": 45.727210998535156, - "learning_rate": 9.28e-07, - "loss": 1.5935, - "step": 59890 - }, - { - "epoch": 1.9167999999999998, - "grad_norm": 46.390628814697266, - "learning_rate": 9.244444444444445e-07, - "loss": 1.5885, - "step": 59900 - }, - { - "epoch": 1.9171200000000002, - "grad_norm": 47.422386169433594, - "learning_rate": 9.208888888888889e-07, - "loss": 1.5562, - "step": 59910 - }, - { - "epoch": 1.91744, - "grad_norm": 44.31741714477539, - "learning_rate": 9.173333333333334e-07, - "loss": 1.5926, - "step": 59920 - }, - { - "epoch": 1.91776, - "grad_norm": 48.28932571411133, - "learning_rate": 9.137777777777778e-07, - "loss": 1.5839, - "step": 59930 - }, - { - "epoch": 1.91808, - "grad_norm": 46.55033493041992, - "learning_rate": 9.102222222222223e-07, - "loss": 1.5974, - "step": 59940 - }, - { - "epoch": 1.9184, - "grad_norm": 48.564693450927734, - "learning_rate": 9.066666666666668e-07, - "loss": 1.5815, - "step": 59950 - }, - { - "epoch": 1.91872, - "grad_norm": 47.17411804199219, - "learning_rate": 9.031111111111111e-07, - "loss": 1.5669, - "step": 59960 - }, - { - "epoch": 1.9190399999999999, - "grad_norm": 45.52403259277344, - "learning_rate": 8.995555555555556e-07, - "loss": 1.5781, - "step": 59970 - }, - { - "epoch": 1.91936, - "grad_norm": 47.34508514404297, - "learning_rate": 8.960000000000001e-07, - "loss": 1.5662, - "step": 59980 - }, - { - "epoch": 1.91968, - "grad_norm": 45.80388641357422, - "learning_rate": 8.924444444444445e-07, - "loss": 1.561, - "step": 59990 - }, - { - "epoch": 1.92, - "grad_norm": 43.46025848388672, - "learning_rate": 8.88888888888889e-07, - "loss": 1.5916, - "step": 60000 - }, - { - "epoch": 1.92032, - "grad_norm": 47.30317306518555, - "learning_rate": 8.853333333333334e-07, - "loss": 1.5744, - "step": 60010 - }, - { - "epoch": 1.9206400000000001, - "grad_norm": 47.16181945800781, - "learning_rate": 8.817777777777778e-07, - "loss": 1.6053, - "step": 60020 - }, - { - "epoch": 1.92096, - "grad_norm": 46.15432357788086, - "learning_rate": 8.782222222222222e-07, - "loss": 1.5566, - "step": 60030 - }, - { - "epoch": 1.9212799999999999, - "grad_norm": 46.11896896362305, - "learning_rate": 8.746666666666668e-07, - "loss": 1.589, - "step": 60040 - }, - { - "epoch": 1.9216, - "grad_norm": 46.01621627807617, - "learning_rate": 8.711111111111112e-07, - "loss": 1.5917, - "step": 60050 - }, - { - "epoch": 1.92192, - "grad_norm": 46.327884674072266, - "learning_rate": 8.675555555555557e-07, - "loss": 1.6255, - "step": 60060 - }, - { - "epoch": 1.92224, - "grad_norm": 48.15599060058594, - "learning_rate": 8.640000000000001e-07, - "loss": 1.6075, - "step": 60070 - }, - { - "epoch": 1.92256, - "grad_norm": 45.618263244628906, - "learning_rate": 8.604444444444446e-07, - "loss": 1.5768, - "step": 60080 - }, - { - "epoch": 1.9228800000000001, - "grad_norm": 49.41157531738281, - "learning_rate": 8.568888888888889e-07, - "loss": 1.5467, - "step": 60090 - }, - { - "epoch": 1.9232, - "grad_norm": 46.044471740722656, - "learning_rate": 8.533333333333334e-07, - "loss": 1.5702, - "step": 60100 - }, - { - "epoch": 1.92352, - "grad_norm": 45.666664123535156, - "learning_rate": 8.497777777777778e-07, - "loss": 1.5838, - "step": 60110 - }, - { - "epoch": 1.92384, - "grad_norm": 46.842872619628906, - "learning_rate": 8.462222222222224e-07, - "loss": 1.5762, - "step": 60120 - }, - { - "epoch": 1.92416, - "grad_norm": 44.966983795166016, - "learning_rate": 8.426666666666668e-07, - "loss": 1.598, - "step": 60130 - }, - { - "epoch": 1.92448, - "grad_norm": 45.7222785949707, - "learning_rate": 8.391111111111112e-07, - "loss": 1.5715, - "step": 60140 - }, - { - "epoch": 1.9247999999999998, - "grad_norm": 46.058494567871094, - "learning_rate": 8.355555555555556e-07, - "loss": 1.5818, - "step": 60150 - }, - { - "epoch": 1.9251200000000002, - "grad_norm": 45.96712112426758, - "learning_rate": 8.32e-07, - "loss": 1.5925, - "step": 60160 - }, - { - "epoch": 1.92544, - "grad_norm": 46.22428512573242, - "learning_rate": 8.284444444444445e-07, - "loss": 1.5582, - "step": 60170 - }, - { - "epoch": 1.92576, - "grad_norm": 47.24472427368164, - "learning_rate": 8.248888888888889e-07, - "loss": 1.588, - "step": 60180 - }, - { - "epoch": 1.92608, - "grad_norm": 46.91118240356445, - "learning_rate": 8.213333333333334e-07, - "loss": 1.5993, - "step": 60190 - }, - { - "epoch": 1.9264000000000001, - "grad_norm": 47.56013488769531, - "learning_rate": 8.177777777777779e-07, - "loss": 1.5783, - "step": 60200 - }, - { - "epoch": 1.92672, - "grad_norm": 45.22146987915039, - "learning_rate": 8.142222222222224e-07, - "loss": 1.5904, - "step": 60210 - }, - { - "epoch": 1.9270399999999999, - "grad_norm": 45.85641860961914, - "learning_rate": 8.106666666666667e-07, - "loss": 1.5745, - "step": 60220 - }, - { - "epoch": 1.92736, - "grad_norm": 43.82615280151367, - "learning_rate": 8.071111111111112e-07, - "loss": 1.5985, - "step": 60230 - }, - { - "epoch": 1.92768, - "grad_norm": 46.80747604370117, - "learning_rate": 8.035555555555556e-07, - "loss": 1.6022, - "step": 60240 - }, - { - "epoch": 1.928, - "grad_norm": 45.95657730102539, - "learning_rate": 8.000000000000001e-07, - "loss": 1.5953, - "step": 60250 - }, - { - "epoch": 1.92832, - "grad_norm": 46.12740707397461, - "learning_rate": 7.964444444444445e-07, - "loss": 1.5758, - "step": 60260 - }, - { - "epoch": 1.9286400000000001, - "grad_norm": 45.15321731567383, - "learning_rate": 7.92888888888889e-07, - "loss": 1.5914, - "step": 60270 - }, - { - "epoch": 1.92896, - "grad_norm": 46.08040237426758, - "learning_rate": 7.893333333333333e-07, - "loss": 1.6012, - "step": 60280 - }, - { - "epoch": 1.9292799999999999, - "grad_norm": 44.98908996582031, - "learning_rate": 7.857777777777778e-07, - "loss": 1.5835, - "step": 60290 - }, - { - "epoch": 1.9296, - "grad_norm": 45.012203216552734, - "learning_rate": 7.822222222222223e-07, - "loss": 1.5841, - "step": 60300 - }, - { - "epoch": 1.92992, - "grad_norm": 44.48496627807617, - "learning_rate": 7.786666666666668e-07, - "loss": 1.5726, - "step": 60310 - }, - { - "epoch": 1.93024, - "grad_norm": 43.63051986694336, - "learning_rate": 7.751111111111112e-07, - "loss": 1.5879, - "step": 60320 - }, - { - "epoch": 1.93056, - "grad_norm": 45.21602249145508, - "learning_rate": 7.715555555555557e-07, - "loss": 1.5784, - "step": 60330 - }, - { - "epoch": 1.9308800000000002, - "grad_norm": 46.25171661376953, - "learning_rate": 7.68e-07, - "loss": 1.6037, - "step": 60340 - }, - { - "epoch": 1.9312, - "grad_norm": 44.56587600708008, - "learning_rate": 7.644444444444444e-07, - "loss": 1.5812, - "step": 60350 - }, - { - "epoch": 1.93152, - "grad_norm": 45.03590393066406, - "learning_rate": 7.608888888888889e-07, - "loss": 1.6225, - "step": 60360 - }, - { - "epoch": 1.93184, - "grad_norm": 44.908931732177734, - "learning_rate": 7.573333333333334e-07, - "loss": 1.5856, - "step": 60370 - }, - { - "epoch": 1.93216, - "grad_norm": 43.26665496826172, - "learning_rate": 7.537777777777779e-07, - "loss": 1.5954, - "step": 60380 - }, - { - "epoch": 1.93248, - "grad_norm": 45.77558135986328, - "learning_rate": 7.502222222222223e-07, - "loss": 1.592, - "step": 60390 - }, - { - "epoch": 1.9327999999999999, - "grad_norm": 44.50881576538086, - "learning_rate": 7.466666666666668e-07, - "loss": 1.5838, - "step": 60400 - }, - { - "epoch": 1.93312, - "grad_norm": 45.751163482666016, - "learning_rate": 7.431111111111111e-07, - "loss": 1.5825, - "step": 60410 - }, - { - "epoch": 1.93344, - "grad_norm": 47.217376708984375, - "learning_rate": 7.395555555555556e-07, - "loss": 1.5713, - "step": 60420 - }, - { - "epoch": 1.93376, - "grad_norm": 46.75165939331055, - "learning_rate": 7.36e-07, - "loss": 1.5891, - "step": 60430 - }, - { - "epoch": 1.93408, - "grad_norm": 45.729713439941406, - "learning_rate": 7.324444444444445e-07, - "loss": 1.5803, - "step": 60440 - }, - { - "epoch": 1.9344000000000001, - "grad_norm": 44.836177825927734, - "learning_rate": 7.28888888888889e-07, - "loss": 1.5715, - "step": 60450 - }, - { - "epoch": 1.93472, - "grad_norm": 46.619300842285156, - "learning_rate": 7.253333333333335e-07, - "loss": 1.5787, - "step": 60460 - }, - { - "epoch": 1.9350399999999999, - "grad_norm": 45.582069396972656, - "learning_rate": 7.217777777777778e-07, - "loss": 1.5786, - "step": 60470 - }, - { - "epoch": 1.93536, - "grad_norm": 43.99139404296875, - "learning_rate": 7.182222222222223e-07, - "loss": 1.6028, - "step": 60480 - }, - { - "epoch": 1.93568, - "grad_norm": 47.75700759887695, - "learning_rate": 7.146666666666667e-07, - "loss": 1.5846, - "step": 60490 - }, - { - "epoch": 1.936, - "grad_norm": 47.97227096557617, - "learning_rate": 7.111111111111112e-07, - "loss": 1.5992, - "step": 60500 - }, - { - "epoch": 1.93632, - "grad_norm": 45.68336486816406, - "learning_rate": 7.075555555555556e-07, - "loss": 1.5879, - "step": 60510 - }, - { - "epoch": 1.9366400000000001, - "grad_norm": 44.39118194580078, - "learning_rate": 7.040000000000001e-07, - "loss": 1.613, - "step": 60520 - }, - { - "epoch": 1.93696, - "grad_norm": 47.586116790771484, - "learning_rate": 7.004444444444446e-07, - "loss": 1.569, - "step": 60530 - }, - { - "epoch": 1.93728, - "grad_norm": 46.94290542602539, - "learning_rate": 6.968888888888888e-07, - "loss": 1.5928, - "step": 60540 - }, - { - "epoch": 1.9376, - "grad_norm": 44.617095947265625, - "learning_rate": 6.933333333333334e-07, - "loss": 1.5557, - "step": 60550 - }, - { - "epoch": 1.93792, - "grad_norm": 45.4676513671875, - "learning_rate": 6.897777777777778e-07, - "loss": 1.5778, - "step": 60560 - }, - { - "epoch": 1.93824, - "grad_norm": 46.230648040771484, - "learning_rate": 6.862222222222223e-07, - "loss": 1.5916, - "step": 60570 - }, - { - "epoch": 1.9385599999999998, - "grad_norm": 45.70417404174805, - "learning_rate": 6.826666666666667e-07, - "loss": 1.5775, - "step": 60580 - }, - { - "epoch": 1.9388800000000002, - "grad_norm": 46.56899642944336, - "learning_rate": 6.791111111111112e-07, - "loss": 1.5836, - "step": 60590 - }, - { - "epoch": 1.9392, - "grad_norm": 44.46154022216797, - "learning_rate": 6.755555555555555e-07, - "loss": 1.5723, - "step": 60600 - }, - { - "epoch": 1.93952, - "grad_norm": 45.880916595458984, - "learning_rate": 6.72e-07, - "loss": 1.573, - "step": 60610 - }, - { - "epoch": 1.93984, - "grad_norm": 46.16361618041992, - "learning_rate": 6.684444444444444e-07, - "loss": 1.5931, - "step": 60620 - }, - { - "epoch": 1.94016, - "grad_norm": 43.51244354248047, - "learning_rate": 6.64888888888889e-07, - "loss": 1.5756, - "step": 60630 - }, - { - "epoch": 1.94048, - "grad_norm": 44.64166259765625, - "learning_rate": 6.613333333333334e-07, - "loss": 1.5582, - "step": 60640 - }, - { - "epoch": 1.9407999999999999, - "grad_norm": 46.329654693603516, - "learning_rate": 6.577777777777779e-07, - "loss": 1.5762, - "step": 60650 - }, - { - "epoch": 1.94112, - "grad_norm": 47.477203369140625, - "learning_rate": 6.542222222222223e-07, - "loss": 1.582, - "step": 60660 - }, - { - "epoch": 1.94144, - "grad_norm": 44.95314407348633, - "learning_rate": 6.506666666666667e-07, - "loss": 1.5694, - "step": 60670 - }, - { - "epoch": 1.94176, - "grad_norm": 46.40793228149414, - "learning_rate": 6.471111111111111e-07, - "loss": 1.5762, - "step": 60680 - }, - { - "epoch": 1.94208, - "grad_norm": 47.274681091308594, - "learning_rate": 6.435555555555556e-07, - "loss": 1.5578, - "step": 60690 - }, - { - "epoch": 1.9424000000000001, - "grad_norm": 45.94260787963867, - "learning_rate": 6.4e-07, - "loss": 1.5773, - "step": 60700 - }, - { - "epoch": 1.94272, - "grad_norm": 45.475677490234375, - "learning_rate": 6.364444444444446e-07, - "loss": 1.5859, - "step": 60710 - }, - { - "epoch": 1.9430399999999999, - "grad_norm": 49.357112884521484, - "learning_rate": 6.32888888888889e-07, - "loss": 1.5858, - "step": 60720 - }, - { - "epoch": 1.94336, - "grad_norm": 45.360870361328125, - "learning_rate": 6.293333333333334e-07, - "loss": 1.5734, - "step": 60730 - }, - { - "epoch": 1.94368, - "grad_norm": 46.292423248291016, - "learning_rate": 6.257777777777778e-07, - "loss": 1.5622, - "step": 60740 - }, - { - "epoch": 1.944, - "grad_norm": 46.921993255615234, - "learning_rate": 6.222222222222223e-07, - "loss": 1.5833, - "step": 60750 - }, - { - "epoch": 1.94432, - "grad_norm": 45.2756462097168, - "learning_rate": 6.186666666666667e-07, - "loss": 1.5684, - "step": 60760 - }, - { - "epoch": 1.9446400000000001, - "grad_norm": 44.58122634887695, - "learning_rate": 6.151111111111111e-07, - "loss": 1.5887, - "step": 60770 - }, - { - "epoch": 1.94496, - "grad_norm": 46.2196044921875, - "learning_rate": 6.115555555555556e-07, - "loss": 1.5575, - "step": 60780 - }, - { - "epoch": 1.94528, - "grad_norm": 45.56631851196289, - "learning_rate": 6.08e-07, - "loss": 1.5961, - "step": 60790 - }, - { - "epoch": 1.9456, - "grad_norm": 46.696372985839844, - "learning_rate": 6.044444444444445e-07, - "loss": 1.5915, - "step": 60800 - }, - { - "epoch": 1.94592, - "grad_norm": 46.68487548828125, - "learning_rate": 6.008888888888889e-07, - "loss": 1.5701, - "step": 60810 - }, - { - "epoch": 1.94624, - "grad_norm": 46.34423828125, - "learning_rate": 5.973333333333334e-07, - "loss": 1.585, - "step": 60820 - }, - { - "epoch": 1.9465599999999998, - "grad_norm": 45.44008255004883, - "learning_rate": 5.937777777777778e-07, - "loss": 1.5653, - "step": 60830 - }, - { - "epoch": 1.9468800000000002, - "grad_norm": 46.109352111816406, - "learning_rate": 5.902222222222223e-07, - "loss": 1.603, - "step": 60840 - }, - { - "epoch": 1.9472, - "grad_norm": 47.0877799987793, - "learning_rate": 5.866666666666667e-07, - "loss": 1.5975, - "step": 60850 - }, - { - "epoch": 1.94752, - "grad_norm": 45.722328186035156, - "learning_rate": 5.831111111111112e-07, - "loss": 1.5726, - "step": 60860 - }, - { - "epoch": 1.94784, - "grad_norm": 47.501426696777344, - "learning_rate": 5.795555555555556e-07, - "loss": 1.6096, - "step": 60870 - }, - { - "epoch": 1.9481600000000001, - "grad_norm": 46.5894889831543, - "learning_rate": 5.760000000000001e-07, - "loss": 1.5495, - "step": 60880 - }, - { - "epoch": 1.94848, - "grad_norm": 44.740455627441406, - "learning_rate": 5.724444444444445e-07, - "loss": 1.5948, - "step": 60890 - }, - { - "epoch": 1.9487999999999999, - "grad_norm": 45.47201919555664, - "learning_rate": 5.68888888888889e-07, - "loss": 1.5641, - "step": 60900 - }, - { - "epoch": 1.94912, - "grad_norm": 45.011924743652344, - "learning_rate": 5.653333333333333e-07, - "loss": 1.5768, - "step": 60910 - }, - { - "epoch": 1.94944, - "grad_norm": 45.84285354614258, - "learning_rate": 5.617777777777779e-07, - "loss": 1.5938, - "step": 60920 - }, - { - "epoch": 1.94976, - "grad_norm": 46.208534240722656, - "learning_rate": 5.582222222222223e-07, - "loss": 1.5976, - "step": 60930 - }, - { - "epoch": 1.95008, - "grad_norm": 46.11305618286133, - "learning_rate": 5.546666666666667e-07, - "loss": 1.5793, - "step": 60940 - }, - { - "epoch": 1.9504000000000001, - "grad_norm": 45.28938674926758, - "learning_rate": 5.511111111111111e-07, - "loss": 1.5667, - "step": 60950 - }, - { - "epoch": 1.95072, - "grad_norm": 46.1634521484375, - "learning_rate": 5.475555555555557e-07, - "loss": 1.597, - "step": 60960 - }, - { - "epoch": 1.9510399999999999, - "grad_norm": 44.47576904296875, - "learning_rate": 5.44e-07, - "loss": 1.5682, - "step": 60970 - }, - { - "epoch": 1.95136, - "grad_norm": 44.76446533203125, - "learning_rate": 5.404444444444444e-07, - "loss": 1.5536, - "step": 60980 - }, - { - "epoch": 1.95168, - "grad_norm": 46.57415771484375, - "learning_rate": 5.368888888888889e-07, - "loss": 1.5834, - "step": 60990 - }, - { - "epoch": 1.952, - "grad_norm": 46.719879150390625, - "learning_rate": 5.333333333333335e-07, - "loss": 1.5883, - "step": 61000 - }, - { - "epoch": 1.95232, - "grad_norm": 45.457401275634766, - "learning_rate": 5.297777777777778e-07, - "loss": 1.6105, - "step": 61010 - }, - { - "epoch": 1.9526400000000002, - "grad_norm": 46.55323028564453, - "learning_rate": 5.262222222222222e-07, - "loss": 1.5661, - "step": 61020 - }, - { - "epoch": 1.95296, - "grad_norm": 46.279483795166016, - "learning_rate": 5.226666666666667e-07, - "loss": 1.6017, - "step": 61030 - }, - { - "epoch": 1.95328, - "grad_norm": 45.748374938964844, - "learning_rate": 5.191111111111111e-07, - "loss": 1.5772, - "step": 61040 - }, - { - "epoch": 1.9536, - "grad_norm": 46.23691177368164, - "learning_rate": 5.155555555555556e-07, - "loss": 1.5956, - "step": 61050 - }, - { - "epoch": 1.95392, - "grad_norm": 45.71986389160156, - "learning_rate": 5.12e-07, - "loss": 1.5746, - "step": 61060 - }, - { - "epoch": 1.95424, - "grad_norm": 46.47679138183594, - "learning_rate": 5.084444444444445e-07, - "loss": 1.5706, - "step": 61070 - }, - { - "epoch": 1.9545599999999999, - "grad_norm": 44.26342010498047, - "learning_rate": 5.048888888888889e-07, - "loss": 1.5738, - "step": 61080 - }, - { - "epoch": 1.95488, - "grad_norm": 46.3544921875, - "learning_rate": 5.013333333333334e-07, - "loss": 1.6082, - "step": 61090 - }, - { - "epoch": 1.9552, - "grad_norm": 47.279273986816406, - "learning_rate": 4.977777777777778e-07, - "loss": 1.5935, - "step": 61100 - }, - { - "epoch": 1.95552, - "grad_norm": 45.07588577270508, - "learning_rate": 4.942222222222223e-07, - "loss": 1.5689, - "step": 61110 - }, - { - "epoch": 1.95584, - "grad_norm": 46.14258575439453, - "learning_rate": 4.906666666666667e-07, - "loss": 1.5729, - "step": 61120 - }, - { - "epoch": 1.9561600000000001, - "grad_norm": 46.78408432006836, - "learning_rate": 4.871111111111112e-07, - "loss": 1.5916, - "step": 61130 - }, - { - "epoch": 1.95648, - "grad_norm": 46.98505783081055, - "learning_rate": 4.835555555555556e-07, - "loss": 1.5717, - "step": 61140 - }, - { - "epoch": 1.9567999999999999, - "grad_norm": 47.45516586303711, - "learning_rate": 4.800000000000001e-07, - "loss": 1.5884, - "step": 61150 - }, - { - "epoch": 1.95712, - "grad_norm": 45.89262390136719, - "learning_rate": 4.764444444444445e-07, - "loss": 1.5847, - "step": 61160 - }, - { - "epoch": 1.95744, - "grad_norm": 46.40864181518555, - "learning_rate": 4.728888888888889e-07, - "loss": 1.5957, - "step": 61170 - }, - { - "epoch": 1.95776, - "grad_norm": 46.90511703491211, - "learning_rate": 4.6933333333333335e-07, - "loss": 1.5829, - "step": 61180 - }, - { - "epoch": 1.95808, - "grad_norm": 47.575321197509766, - "learning_rate": 4.6577777777777785e-07, - "loss": 1.5846, - "step": 61190 - }, - { - "epoch": 1.9584000000000001, - "grad_norm": 46.93019485473633, - "learning_rate": 4.6222222222222225e-07, - "loss": 1.5949, - "step": 61200 - }, - { - "epoch": 1.95872, - "grad_norm": 44.054134368896484, - "learning_rate": 4.586666666666667e-07, - "loss": 1.6058, - "step": 61210 - }, - { - "epoch": 1.95904, - "grad_norm": 43.94884490966797, - "learning_rate": 4.5511111111111115e-07, - "loss": 1.5917, - "step": 61220 - }, - { - "epoch": 1.95936, - "grad_norm": 47.810829162597656, - "learning_rate": 4.5155555555555554e-07, - "loss": 1.5809, - "step": 61230 - }, - { - "epoch": 1.95968, - "grad_norm": 46.7258415222168, - "learning_rate": 4.4800000000000004e-07, - "loss": 1.5671, - "step": 61240 - }, - { - "epoch": 1.96, - "grad_norm": 46.42376708984375, - "learning_rate": 4.444444444444445e-07, - "loss": 1.5741, - "step": 61250 - }, - { - "epoch": 1.9603199999999998, - "grad_norm": 44.41875457763672, - "learning_rate": 4.408888888888889e-07, - "loss": 1.5793, - "step": 61260 - }, - { - "epoch": 1.9606400000000002, - "grad_norm": 46.66280746459961, - "learning_rate": 4.373333333333334e-07, - "loss": 1.587, - "step": 61270 - }, - { - "epoch": 1.96096, - "grad_norm": 46.54970169067383, - "learning_rate": 4.3377777777777783e-07, - "loss": 1.5802, - "step": 61280 - }, - { - "epoch": 1.96128, - "grad_norm": 46.72460174560547, - "learning_rate": 4.302222222222223e-07, - "loss": 1.5841, - "step": 61290 - }, - { - "epoch": 1.9616, - "grad_norm": 46.453678131103516, - "learning_rate": 4.266666666666667e-07, - "loss": 1.5926, - "step": 61300 - }, - { - "epoch": 1.96192, - "grad_norm": 44.05784225463867, - "learning_rate": 4.231111111111112e-07, - "loss": 1.5745, - "step": 61310 - }, - { - "epoch": 1.96224, - "grad_norm": 46.23289108276367, - "learning_rate": 4.195555555555556e-07, - "loss": 1.561, - "step": 61320 - }, - { - "epoch": 1.9625599999999999, - "grad_norm": 47.4354362487793, - "learning_rate": 4.16e-07, - "loss": 1.6063, - "step": 61330 - }, - { - "epoch": 1.96288, - "grad_norm": 46.875816345214844, - "learning_rate": 4.1244444444444447e-07, - "loss": 1.5885, - "step": 61340 - }, - { - "epoch": 1.9632, - "grad_norm": 46.69877243041992, - "learning_rate": 4.0888888888888897e-07, - "loss": 1.5855, - "step": 61350 - }, - { - "epoch": 1.96352, - "grad_norm": 45.40488052368164, - "learning_rate": 4.0533333333333336e-07, - "loss": 1.5825, - "step": 61360 - }, - { - "epoch": 1.96384, - "grad_norm": 44.27571105957031, - "learning_rate": 4.017777777777778e-07, - "loss": 1.5694, - "step": 61370 - }, - { - "epoch": 1.9641600000000001, - "grad_norm": 48.688377380371094, - "learning_rate": 3.9822222222222226e-07, - "loss": 1.5695, - "step": 61380 - }, - { - "epoch": 1.96448, - "grad_norm": 46.94468688964844, - "learning_rate": 3.9466666666666665e-07, - "loss": 1.6029, - "step": 61390 - }, - { - "epoch": 1.9647999999999999, - "grad_norm": 46.42035675048828, - "learning_rate": 3.9111111111111115e-07, - "loss": 1.6019, - "step": 61400 - }, - { - "epoch": 1.96512, - "grad_norm": 47.09354782104492, - "learning_rate": 3.875555555555556e-07, - "loss": 1.6001, - "step": 61410 - }, - { - "epoch": 1.96544, - "grad_norm": 44.141197204589844, - "learning_rate": 3.84e-07, - "loss": 1.5785, - "step": 61420 - }, - { - "epoch": 1.96576, - "grad_norm": 48.216793060302734, - "learning_rate": 3.8044444444444445e-07, - "loss": 1.588, - "step": 61430 - }, - { - "epoch": 1.96608, - "grad_norm": 46.14058303833008, - "learning_rate": 3.7688888888888895e-07, - "loss": 1.5482, - "step": 61440 - }, - { - "epoch": 1.9664000000000001, - "grad_norm": 46.50981140136719, - "learning_rate": 3.733333333333334e-07, - "loss": 1.5704, - "step": 61450 - }, - { - "epoch": 1.96672, - "grad_norm": 46.209110260009766, - "learning_rate": 3.697777777777778e-07, - "loss": 1.5685, - "step": 61460 - }, - { - "epoch": 1.96704, - "grad_norm": 45.74169158935547, - "learning_rate": 3.6622222222222224e-07, - "loss": 1.62, - "step": 61470 - }, - { - "epoch": 1.96736, - "grad_norm": 46.35404968261719, - "learning_rate": 3.6266666666666674e-07, - "loss": 1.5676, - "step": 61480 - }, - { - "epoch": 1.96768, - "grad_norm": 47.3962287902832, - "learning_rate": 3.5911111111111113e-07, - "loss": 1.591, - "step": 61490 - }, - { - "epoch": 1.968, - "grad_norm": 46.52360534667969, - "learning_rate": 3.555555555555556e-07, - "loss": 1.5589, - "step": 61500 - }, - { - "epoch": 1.9683199999999998, - "grad_norm": 44.55923843383789, - "learning_rate": 3.5200000000000003e-07, - "loss": 1.588, - "step": 61510 - }, - { - "epoch": 1.96864, - "grad_norm": 45.096378326416016, - "learning_rate": 3.484444444444444e-07, - "loss": 1.6024, - "step": 61520 - }, - { - "epoch": 1.96896, - "grad_norm": 44.80331802368164, - "learning_rate": 3.448888888888889e-07, - "loss": 1.5675, - "step": 61530 - }, - { - "epoch": 1.96928, - "grad_norm": 45.03395462036133, - "learning_rate": 3.4133333333333337e-07, - "loss": 1.6069, - "step": 61540 - }, - { - "epoch": 1.9696, - "grad_norm": 46.5980224609375, - "learning_rate": 3.3777777777777777e-07, - "loss": 1.612, - "step": 61550 - }, - { - "epoch": 1.9699200000000001, - "grad_norm": 46.33894348144531, - "learning_rate": 3.342222222222222e-07, - "loss": 1.5674, - "step": 61560 - }, - { - "epoch": 1.97024, - "grad_norm": 45.43359375, - "learning_rate": 3.306666666666667e-07, - "loss": 1.5893, - "step": 61570 - }, - { - "epoch": 1.9705599999999999, - "grad_norm": 44.497108459472656, - "learning_rate": 3.2711111111111116e-07, - "loss": 1.5841, - "step": 61580 - }, - { - "epoch": 1.97088, - "grad_norm": 45.044307708740234, - "learning_rate": 3.2355555555555556e-07, - "loss": 1.5929, - "step": 61590 - }, - { - "epoch": 1.9712, - "grad_norm": 45.46745681762695, - "learning_rate": 3.2e-07, - "loss": 1.5928, - "step": 61600 - }, - { - "epoch": 1.97152, - "grad_norm": 44.18543243408203, - "learning_rate": 3.164444444444445e-07, - "loss": 1.5643, - "step": 61610 - }, - { - "epoch": 1.97184, - "grad_norm": 48.65835189819336, - "learning_rate": 3.128888888888889e-07, - "loss": 1.5772, - "step": 61620 - }, - { - "epoch": 1.9721600000000001, - "grad_norm": 43.52328872680664, - "learning_rate": 3.0933333333333335e-07, - "loss": 1.5517, - "step": 61630 - }, - { - "epoch": 1.97248, - "grad_norm": 45.04435729980469, - "learning_rate": 3.057777777777778e-07, - "loss": 1.5833, - "step": 61640 - }, - { - "epoch": 1.9727999999999999, - "grad_norm": 49.300537109375, - "learning_rate": 3.0222222222222225e-07, - "loss": 1.581, - "step": 61650 - }, - { - "epoch": 1.97312, - "grad_norm": 45.710182189941406, - "learning_rate": 2.986666666666667e-07, - "loss": 1.6062, - "step": 61660 - }, - { - "epoch": 1.97344, - "grad_norm": 45.44401550292969, - "learning_rate": 2.9511111111111114e-07, - "loss": 1.5691, - "step": 61670 - }, - { - "epoch": 1.97376, - "grad_norm": 46.06452178955078, - "learning_rate": 2.915555555555556e-07, - "loss": 1.5492, - "step": 61680 - }, - { - "epoch": 1.9740799999999998, - "grad_norm": 45.38791275024414, - "learning_rate": 2.8800000000000004e-07, - "loss": 1.5955, - "step": 61690 - }, - { - "epoch": 1.9744000000000002, - "grad_norm": 45.86880874633789, - "learning_rate": 2.844444444444445e-07, - "loss": 1.6253, - "step": 61700 - }, - { - "epoch": 1.97472, - "grad_norm": 46.864437103271484, - "learning_rate": 2.8088888888888893e-07, - "loss": 1.5992, - "step": 61710 - }, - { - "epoch": 1.97504, - "grad_norm": 46.647247314453125, - "learning_rate": 2.7733333333333333e-07, - "loss": 1.5904, - "step": 61720 - }, - { - "epoch": 1.97536, - "grad_norm": 46.546382904052734, - "learning_rate": 2.7377777777777783e-07, - "loss": 1.6046, - "step": 61730 - }, - { - "epoch": 1.97568, - "grad_norm": 45.93708038330078, - "learning_rate": 2.702222222222222e-07, - "loss": 1.5922, - "step": 61740 - }, - { - "epoch": 1.976, - "grad_norm": 47.503761291503906, - "learning_rate": 2.666666666666667e-07, - "loss": 1.5745, - "step": 61750 - }, - { - "epoch": 1.9763199999999999, - "grad_norm": 47.48344039916992, - "learning_rate": 2.631111111111111e-07, - "loss": 1.5903, - "step": 61760 - }, - { - "epoch": 1.97664, - "grad_norm": 46.735836029052734, - "learning_rate": 2.5955555555555557e-07, - "loss": 1.5687, - "step": 61770 - }, - { - "epoch": 1.97696, - "grad_norm": 45.55669403076172, - "learning_rate": 2.56e-07, - "loss": 1.5991, - "step": 61780 - }, - { - "epoch": 1.97728, - "grad_norm": 45.0802001953125, - "learning_rate": 2.5244444444444446e-07, - "loss": 1.5932, - "step": 61790 - }, - { - "epoch": 1.9776, - "grad_norm": 44.832820892333984, - "learning_rate": 2.488888888888889e-07, - "loss": 1.5915, - "step": 61800 - }, - { - "epoch": 1.9779200000000001, - "grad_norm": 46.13886642456055, - "learning_rate": 2.4533333333333336e-07, - "loss": 1.6123, - "step": 61810 - }, - { - "epoch": 1.97824, - "grad_norm": 44.888118743896484, - "learning_rate": 2.417777777777778e-07, - "loss": 1.5735, - "step": 61820 - }, - { - "epoch": 1.9785599999999999, - "grad_norm": 46.51178741455078, - "learning_rate": 2.3822222222222226e-07, - "loss": 1.5806, - "step": 61830 - }, - { - "epoch": 1.97888, - "grad_norm": 46.9849967956543, - "learning_rate": 2.3466666666666668e-07, - "loss": 1.6006, - "step": 61840 - }, - { - "epoch": 1.9792, - "grad_norm": 45.56910705566406, - "learning_rate": 2.3111111111111112e-07, - "loss": 1.5665, - "step": 61850 - }, - { - "epoch": 1.97952, - "grad_norm": 46.16838073730469, - "learning_rate": 2.2755555555555557e-07, - "loss": 1.5849, - "step": 61860 - }, - { - "epoch": 1.97984, - "grad_norm": 46.8780403137207, - "learning_rate": 2.2400000000000002e-07, - "loss": 1.5849, - "step": 61870 - }, - { - "epoch": 1.9801600000000001, - "grad_norm": 46.16302490234375, - "learning_rate": 2.2044444444444444e-07, - "loss": 1.5572, - "step": 61880 - }, - { - "epoch": 1.98048, - "grad_norm": 46.81636428833008, - "learning_rate": 2.1688888888888892e-07, - "loss": 1.5691, - "step": 61890 - }, - { - "epoch": 1.9808, - "grad_norm": 45.55078887939453, - "learning_rate": 2.1333333333333334e-07, - "loss": 1.5925, - "step": 61900 - }, - { - "epoch": 1.98112, - "grad_norm": 46.17424011230469, - "learning_rate": 2.097777777777778e-07, - "loss": 1.606, - "step": 61910 - }, - { - "epoch": 1.98144, - "grad_norm": 46.1997184753418, - "learning_rate": 2.0622222222222223e-07, - "loss": 1.5906, - "step": 61920 - }, - { - "epoch": 1.98176, - "grad_norm": 46.45069885253906, - "learning_rate": 2.0266666666666668e-07, - "loss": 1.5905, - "step": 61930 - }, - { - "epoch": 1.9820799999999998, - "grad_norm": 47.12847137451172, - "learning_rate": 1.9911111111111113e-07, - "loss": 1.5989, - "step": 61940 - }, - { - "epoch": 1.9824000000000002, - "grad_norm": 47.08571243286133, - "learning_rate": 1.9555555555555558e-07, - "loss": 1.5649, - "step": 61950 - }, - { - "epoch": 1.98272, - "grad_norm": 43.01755905151367, - "learning_rate": 1.92e-07, - "loss": 1.5765, - "step": 61960 - }, - { - "epoch": 1.98304, - "grad_norm": 45.58803176879883, - "learning_rate": 1.8844444444444447e-07, - "loss": 1.5654, - "step": 61970 - }, - { - "epoch": 1.98336, - "grad_norm": 46.55001449584961, - "learning_rate": 1.848888888888889e-07, - "loss": 1.5973, - "step": 61980 - }, - { - "epoch": 1.98368, - "grad_norm": 48.73170471191406, - "learning_rate": 1.8133333333333337e-07, - "loss": 1.5939, - "step": 61990 - }, - { - "epoch": 1.984, - "grad_norm": 46.5644416809082, - "learning_rate": 1.777777777777778e-07, - "loss": 1.6406, - "step": 62000 - }, - { - "epoch": 1.9843199999999999, - "grad_norm": 47.24319076538086, - "learning_rate": 1.742222222222222e-07, - "loss": 1.5819, - "step": 62010 - }, - { - "epoch": 1.98464, - "grad_norm": 46.026180267333984, - "learning_rate": 1.7066666666666669e-07, - "loss": 1.6059, - "step": 62020 - }, - { - "epoch": 1.98496, - "grad_norm": 47.73154830932617, - "learning_rate": 1.671111111111111e-07, - "loss": 1.5876, - "step": 62030 - }, - { - "epoch": 1.98528, - "grad_norm": 47.258453369140625, - "learning_rate": 1.6355555555555558e-07, - "loss": 1.5868, - "step": 62040 - }, - { - "epoch": 1.9856, - "grad_norm": 44.5970344543457, - "learning_rate": 1.6e-07, - "loss": 1.5916, - "step": 62050 - }, - { - "epoch": 1.9859200000000001, - "grad_norm": 46.769012451171875, - "learning_rate": 1.5644444444444445e-07, - "loss": 1.5834, - "step": 62060 - }, - { - "epoch": 1.98624, - "grad_norm": 46.680118560791016, - "learning_rate": 1.528888888888889e-07, - "loss": 1.5683, - "step": 62070 - }, - { - "epoch": 1.9865599999999999, - "grad_norm": 44.251956939697266, - "learning_rate": 1.4933333333333335e-07, - "loss": 1.5555, - "step": 62080 - }, - { - "epoch": 1.98688, - "grad_norm": 44.65584182739258, - "learning_rate": 1.457777777777778e-07, - "loss": 1.5926, - "step": 62090 - }, - { - "epoch": 1.9872, - "grad_norm": 46.639320373535156, - "learning_rate": 1.4222222222222224e-07, - "loss": 1.5981, - "step": 62100 - }, - { - "epoch": 1.98752, - "grad_norm": 47.51844024658203, - "learning_rate": 1.3866666666666666e-07, - "loss": 1.613, - "step": 62110 - }, - { - "epoch": 1.98784, - "grad_norm": 46.52462387084961, - "learning_rate": 1.351111111111111e-07, - "loss": 1.5845, - "step": 62120 - }, - { - "epoch": 1.9881600000000001, - "grad_norm": 44.17267990112305, - "learning_rate": 1.3155555555555556e-07, - "loss": 1.5731, - "step": 62130 - }, - { - "epoch": 1.98848, - "grad_norm": 45.80207824707031, - "learning_rate": 1.28e-07, - "loss": 1.5949, - "step": 62140 - }, - { - "epoch": 1.9888, - "grad_norm": 47.79655456542969, - "learning_rate": 1.2444444444444446e-07, - "loss": 1.6052, - "step": 62150 - }, - { - "epoch": 1.98912, - "grad_norm": 44.944766998291016, - "learning_rate": 1.208888888888889e-07, - "loss": 1.5771, - "step": 62160 - }, - { - "epoch": 1.98944, - "grad_norm": 46.573734283447266, - "learning_rate": 1.1733333333333334e-07, - "loss": 1.5782, - "step": 62170 - }, - { - "epoch": 1.98976, - "grad_norm": 47.87807083129883, - "learning_rate": 1.1377777777777779e-07, - "loss": 1.5974, - "step": 62180 - }, - { - "epoch": 1.9900799999999998, - "grad_norm": 45.32151412963867, - "learning_rate": 1.1022222222222222e-07, - "loss": 1.5836, - "step": 62190 - }, - { - "epoch": 1.9904, - "grad_norm": 46.42742919921875, - "learning_rate": 1.0666666666666667e-07, - "loss": 1.5909, - "step": 62200 - }, - { - "epoch": 1.99072, - "grad_norm": 48.42001724243164, - "learning_rate": 1.0311111111111112e-07, - "loss": 1.5701, - "step": 62210 - }, - { - "epoch": 1.99104, - "grad_norm": 43.873111724853516, - "learning_rate": 9.955555555555556e-08, - "loss": 1.5792, - "step": 62220 - }, - { - "epoch": 1.99136, - "grad_norm": 46.204490661621094, - "learning_rate": 9.6e-08, - "loss": 1.6342, - "step": 62230 - }, - { - "epoch": 1.9916800000000001, - "grad_norm": 47.35697555541992, - "learning_rate": 9.244444444444445e-08, - "loss": 1.6296, - "step": 62240 - }, - { - "epoch": 1.992, - "grad_norm": 44.651161193847656, - "learning_rate": 8.88888888888889e-08, - "loss": 1.6051, - "step": 62250 - }, - { - "epoch": 1.9923199999999999, - "grad_norm": 47.13521194458008, - "learning_rate": 8.533333333333334e-08, - "loss": 1.6059, - "step": 62260 - }, - { - "epoch": 1.99264, - "grad_norm": 44.870391845703125, - "learning_rate": 8.177777777777779e-08, - "loss": 1.5809, - "step": 62270 - }, - { - "epoch": 1.99296, - "grad_norm": 45.75646209716797, - "learning_rate": 7.822222222222223e-08, - "loss": 1.5928, - "step": 62280 - }, - { - "epoch": 1.99328, - "grad_norm": 46.337249755859375, - "learning_rate": 7.466666666666667e-08, - "loss": 1.592, - "step": 62290 - }, - { - "epoch": 1.9936, - "grad_norm": 43.975128173828125, - "learning_rate": 7.111111111111112e-08, - "loss": 1.5866, - "step": 62300 - }, - { - "epoch": 1.9939200000000001, - "grad_norm": 47.980926513671875, - "learning_rate": 6.755555555555556e-08, - "loss": 1.5795, - "step": 62310 - }, - { - "epoch": 1.99424, - "grad_norm": 48.362449645996094, - "learning_rate": 6.4e-08, - "loss": 1.6139, - "step": 62320 - }, - { - "epoch": 1.9945599999999999, - "grad_norm": 47.03620529174805, - "learning_rate": 6.044444444444445e-08, - "loss": 1.6054, - "step": 62330 - }, - { - "epoch": 1.99488, - "grad_norm": 47.1905632019043, - "learning_rate": 5.688888888888889e-08, - "loss": 1.591, - "step": 62340 - }, - { - "epoch": 1.9952, - "grad_norm": 45.94517517089844, - "learning_rate": 5.3333333333333334e-08, - "loss": 1.5694, - "step": 62350 - }, - { - "epoch": 1.99552, - "grad_norm": 47.92856979370117, - "learning_rate": 4.977777777777778e-08, - "loss": 1.565, - "step": 62360 - }, - { - "epoch": 1.9958399999999998, - "grad_norm": 45.670406341552734, - "learning_rate": 4.6222222222222224e-08, - "loss": 1.571, - "step": 62370 - }, - { - "epoch": 1.9961600000000002, - "grad_norm": 46.5335693359375, - "learning_rate": 4.266666666666667e-08, - "loss": 1.5581, - "step": 62380 - }, - { - "epoch": 1.99648, - "grad_norm": 48.223350524902344, - "learning_rate": 3.911111111111111e-08, - "loss": 1.6012, - "step": 62390 - }, - { - "epoch": 1.9968, - "grad_norm": 45.21166229248047, - "learning_rate": 3.555555555555556e-08, - "loss": 1.5798, - "step": 62400 - }, - { - "epoch": 1.99712, - "grad_norm": 45.345008850097656, - "learning_rate": 3.2e-08, - "loss": 1.5842, - "step": 62410 - }, - { - "epoch": 1.99744, - "grad_norm": 47.583457946777344, - "learning_rate": 2.8444444444444447e-08, - "loss": 1.5653, - "step": 62420 - }, - { - "epoch": 1.99776, - "grad_norm": 45.92597579956055, - "learning_rate": 2.488888888888889e-08, - "loss": 1.5627, - "step": 62430 - }, - { - "epoch": 1.9980799999999999, - "grad_norm": 44.429718017578125, - "learning_rate": 2.1333333333333336e-08, - "loss": 1.5901, - "step": 62440 - }, - { - "epoch": 1.9984, - "grad_norm": 46.34611511230469, - "learning_rate": 1.777777777777778e-08, - "loss": 1.5488, - "step": 62450 - }, - { - "epoch": 1.99872, - "grad_norm": 46.130760192871094, - "learning_rate": 1.4222222222222223e-08, - "loss": 1.5986, - "step": 62460 - }, - { - "epoch": 1.99904, - "grad_norm": 46.61756896972656, - "learning_rate": 1.0666666666666668e-08, - "loss": 1.5739, - "step": 62470 - }, - { - "epoch": 1.99936, - "grad_norm": 47.170257568359375, - "learning_rate": 7.111111111111112e-09, - "loss": 1.5921, - "step": 62480 - }, - { - "epoch": 1.9996800000000001, - "grad_norm": 48.82705307006836, - "learning_rate": 3.555555555555556e-09, - "loss": 1.5959, - "step": 62490 - }, - { - "epoch": 2.0, - "grad_norm": 45.47258758544922, - "learning_rate": 0.0, - "loss": 1.5795, - "step": 62500 - } - ], - "logging_steps": 10, - "max_steps": 62500, - "num_input_tokens_seen": 0, - "num_train_epochs": 2, - "save_steps": 100, - "stateful_callbacks": { - "TrainerControl": { - "args": { - "should_epoch_stop": false, - "should_evaluate": false, - "should_log": false, - "should_save": true, - "should_training_stop": true - }, - "attributes": {} - } - }, - "total_flos": 1.05551622144e+18, - "train_batch_size": 8, - "trial_name": null, - "trial_params": null -} +{ + "epoch": 1.0, + "global_step": 15617, + "max_steps": 15617, + "logging_steps": 5, + "eval_steps": 200, + "save_steps": 0, + "train_batch_size": 8, + "num_train_epochs": 1, + "num_input_tokens_seen": 0, + "total_flos": 5.2748789856731136e+17, + "log_history": [ + { + "loss": 2.6996, + "grad_norm": 2.458204984664917, + "learning_rate": 1.6005121638924457e-07, + "epoch": 0.0003201639239290517, + "step": 5 + }, + { + "loss": 2.6896, + "grad_norm": 2.7179408073425293, + "learning_rate": 3.2010243277848913e-07, + "epoch": 0.0006403278478581034, + "step": 10 + }, + { + "loss": 2.6812, + "grad_norm": 2.403977870941162, + "learning_rate": 4.801536491677337e-07, + "epoch": 0.0009604917717871551, + "step": 15 + }, + { + "loss": 2.6922, + "grad_norm": 2.4200124740600586, + "learning_rate": 6.402048655569783e-07, + "epoch": 0.0012806556957162068, + "step": 20 + }, + { + "loss": 2.6932, + "grad_norm": 2.451019048690796, + "learning_rate": 8.002560819462229e-07, + "epoch": 0.0016008196196452584, + "step": 25 + }, + { + "loss": 2.6732, + "grad_norm": 2.2547831535339355, + "learning_rate": 9.603072983354673e-07, + "epoch": 0.0019209835435743102, + "step": 30 + }, + { + "loss": 2.7105, + "grad_norm": 2.2860751152038574, + "learning_rate": 1.120358514724712e-06, + "epoch": 0.0022411474675033617, + "step": 35 + }, + { + "loss": 2.6946, + "grad_norm": 2.1596930027008057, + "learning_rate": 1.2804097311139565e-06, + "epoch": 0.0025613113914324135, + "step": 40 + }, + { + "loss": 2.697, + "grad_norm": 2.220602512359619, + "learning_rate": 1.4404609475032012e-06, + "epoch": 0.002881475315361465, + "step": 45 + }, + { + "loss": 2.6972, + "grad_norm": 2.0607404708862305, + "learning_rate": 1.6005121638924457e-06, + "epoch": 0.0032016392392905167, + "step": 50 + }, + { + "loss": 2.6861, + "grad_norm": 2.138319492340088, + "learning_rate": 1.7605633802816904e-06, + "epoch": 0.0035218031632195685, + "step": 55 + }, + { + "loss": 2.6478, + "grad_norm": 2.244121789932251, + "learning_rate": 1.9206145966709347e-06, + "epoch": 0.0038419670871486203, + "step": 60 + }, + { + "loss": 2.6676, + "grad_norm": 2.0738816261291504, + "learning_rate": 2.0806658130601794e-06, + "epoch": 0.004162131011077672, + "step": 65 + }, + { + "loss": 2.6839, + "grad_norm": 2.0179197788238525, + "learning_rate": 2.240717029449424e-06, + "epoch": 0.0044822949350067235, + "step": 70 + }, + { + "loss": 2.6514, + "grad_norm": 1.970077633857727, + "learning_rate": 2.4007682458386688e-06, + "epoch": 0.004802458858935775, + "step": 75 + }, + { + "loss": 2.6685, + "grad_norm": 1.9740138053894043, + "learning_rate": 2.560819462227913e-06, + "epoch": 0.005122622782864827, + "step": 80 + }, + { + "loss": 2.6835, + "grad_norm": 2.0440573692321777, + "learning_rate": 2.7208706786171577e-06, + "epoch": 0.0054427867067938784, + "step": 85 + }, + { + "loss": 2.6463, + "grad_norm": 1.9804933071136475, + "learning_rate": 2.8809218950064024e-06, + "epoch": 0.00576295063072293, + "step": 90 + }, + { + "loss": 2.6683, + "grad_norm": 1.9438477754592896, + "learning_rate": 3.0409731113956467e-06, + "epoch": 0.006083114554651982, + "step": 95 + }, + { + "loss": 2.6281, + "grad_norm": 2.0071446895599365, + "learning_rate": 3.2010243277848914e-06, + "epoch": 0.006403278478581033, + "step": 100 + }, + { + "loss": 2.6636, + "grad_norm": 1.9696836471557617, + "learning_rate": 3.361075544174136e-06, + "epoch": 0.006723442402510085, + "step": 105 + }, + { + "loss": 2.6637, + "grad_norm": 2.0100696086883545, + "learning_rate": 3.521126760563381e-06, + "epoch": 0.007043606326439137, + "step": 110 + }, + { + "loss": 2.6533, + "grad_norm": 1.9581183195114136, + "learning_rate": 3.681177976952625e-06, + "epoch": 0.007363770250368188, + "step": 115 + }, + { + "loss": 2.6481, + "grad_norm": 1.921207070350647, + "learning_rate": 3.841229193341869e-06, + "epoch": 0.007683934174297241, + "step": 120 + }, + { + "loss": 2.6398, + "grad_norm": 1.9562214612960815, + "learning_rate": 4.001280409731114e-06, + "epoch": 0.008004098098226291, + "step": 125 + }, + { + "loss": 2.6512, + "grad_norm": 2.040691614151001, + "learning_rate": 4.161331626120359e-06, + "epoch": 0.008324262022155344, + "step": 130 + }, + { + "loss": 2.6453, + "grad_norm": 2.003002643585205, + "learning_rate": 4.321382842509603e-06, + "epoch": 0.008644425946084396, + "step": 135 + }, + { + "loss": 2.6442, + "grad_norm": 1.9360718727111816, + "learning_rate": 4.481434058898848e-06, + "epoch": 0.008964589870013447, + "step": 140 + }, + { + "loss": 2.6393, + "grad_norm": 2.054797410964966, + "learning_rate": 4.641485275288092e-06, + "epoch": 0.009284753793942498, + "step": 145 + }, + { + "loss": 2.6241, + "grad_norm": 1.9937611818313599, + "learning_rate": 4.8015364916773375e-06, + "epoch": 0.00960491771787155, + "step": 150 + }, + { + "loss": 2.6592, + "grad_norm": 2.0823230743408203, + "learning_rate": 4.961587708066581e-06, + "epoch": 0.009925081641800603, + "step": 155 + }, + { + "loss": 2.6511, + "grad_norm": 2.1629271507263184, + "learning_rate": 5.121638924455826e-06, + "epoch": 0.010245245565729654, + "step": 160 + }, + { + "loss": 2.6291, + "grad_norm": 1.968585729598999, + "learning_rate": 5.28169014084507e-06, + "epoch": 0.010565409489658706, + "step": 165 + }, + { + "loss": 2.632, + "grad_norm": 2.0315310955047607, + "learning_rate": 5.4417413572343155e-06, + "epoch": 0.010885573413587757, + "step": 170 + }, + { + "loss": 2.6488, + "grad_norm": 2.0342774391174316, + "learning_rate": 5.60179257362356e-06, + "epoch": 0.011205737337516808, + "step": 175 + }, + { + "loss": 2.6346, + "grad_norm": 1.9805340766906738, + "learning_rate": 5.761843790012805e-06, + "epoch": 0.01152590126144586, + "step": 180 + }, + { + "loss": 2.6129, + "grad_norm": 2.0554709434509277, + "learning_rate": 5.921895006402049e-06, + "epoch": 0.011846065185374913, + "step": 185 + }, + { + "loss": 2.5925, + "grad_norm": 1.9576430320739746, + "learning_rate": 6.0819462227912934e-06, + "epoch": 0.012166229109303964, + "step": 190 + }, + { + "loss": 2.6392, + "grad_norm": 2.046264410018921, + "learning_rate": 6.241997439180538e-06, + "epoch": 0.012486393033233015, + "step": 195 + }, + { + "loss": 2.6122, + "grad_norm": 1.9459033012390137, + "learning_rate": 6.402048655569783e-06, + "epoch": 0.012806556957162067, + "step": 200 + }, + { + "eval_loss": 2.469697952270508, + "eval_runtime": 13.9916, + "eval_samples_per_second": 146.373, + "eval_steps_per_second": 18.297, + "epoch": 0.012806556957162067, + "step": 200 + }, + { + "loss": 2.6529, + "grad_norm": 1.9425408840179443, + "learning_rate": 6.562099871959026e-06, + "epoch": 0.013126720881091118, + "step": 205 + }, + { + "loss": 2.624, + "grad_norm": 1.9245175123214722, + "learning_rate": 6.722151088348272e-06, + "epoch": 0.01344688480502017, + "step": 210 + }, + { + "loss": 2.6275, + "grad_norm": 2.075512647628784, + "learning_rate": 6.882202304737516e-06, + "epoch": 0.013767048728949223, + "step": 215 + }, + { + "loss": 2.6339, + "grad_norm": 2.0762271881103516, + "learning_rate": 7.042253521126762e-06, + "epoch": 0.014087212652878274, + "step": 220 + }, + { + "loss": 2.615, + "grad_norm": 1.9588446617126465, + "learning_rate": 7.202304737516005e-06, + "epoch": 0.014407376576807325, + "step": 225 + }, + { + "loss": 2.615, + "grad_norm": 2.0142860412597656, + "learning_rate": 7.36235595390525e-06, + "epoch": 0.014727540500736377, + "step": 230 + }, + { + "loss": 2.6257, + "grad_norm": 2.0105693340301514, + "learning_rate": 7.5224071702944944e-06, + "epoch": 0.015047704424665428, + "step": 235 + }, + { + "loss": 2.6421, + "grad_norm": 2.0586354732513428, + "learning_rate": 7.682458386683739e-06, + "epoch": 0.015367868348594481, + "step": 240 + }, + { + "loss": 2.6181, + "grad_norm": 1.9821962118148804, + "learning_rate": 7.842509603072984e-06, + "epoch": 0.015688032272523533, + "step": 245 + }, + { + "loss": 2.593, + "grad_norm": 2.0167949199676514, + "learning_rate": 8.002560819462227e-06, + "epoch": 0.016008196196452582, + "step": 250 + }, + { + "loss": 2.6102, + "grad_norm": 2.1412761211395264, + "learning_rate": 8.162612035851472e-06, + "epoch": 0.016328360120381635, + "step": 255 + }, + { + "loss": 2.6243, + "grad_norm": 1.926281213760376, + "learning_rate": 8.322663252240718e-06, + "epoch": 0.01664852404431069, + "step": 260 + }, + { + "loss": 2.6286, + "grad_norm": 1.9839813709259033, + "learning_rate": 8.482714468629963e-06, + "epoch": 0.016968687968239738, + "step": 265 + }, + { + "loss": 2.5883, + "grad_norm": 2.0237507820129395, + "learning_rate": 8.642765685019206e-06, + "epoch": 0.01728885189216879, + "step": 270 + }, + { + "loss": 2.6162, + "grad_norm": 1.937261939048767, + "learning_rate": 8.802816901408451e-06, + "epoch": 0.01760901581609784, + "step": 275 + }, + { + "loss": 2.6302, + "grad_norm": 2.0588278770446777, + "learning_rate": 8.962868117797696e-06, + "epoch": 0.017929179740026894, + "step": 280 + }, + { + "loss": 2.6054, + "grad_norm": 2.097682476043701, + "learning_rate": 9.12291933418694e-06, + "epoch": 0.018249343663955947, + "step": 285 + }, + { + "loss": 2.6023, + "grad_norm": 2.114814043045044, + "learning_rate": 9.282970550576185e-06, + "epoch": 0.018569507587884997, + "step": 290 + }, + { + "loss": 2.5885, + "grad_norm": 1.9583766460418701, + "learning_rate": 9.44302176696543e-06, + "epoch": 0.01888967151181405, + "step": 295 + }, + { + "loss": 2.6237, + "grad_norm": 2.047853708267212, + "learning_rate": 9.603072983354675e-06, + "epoch": 0.0192098354357431, + "step": 300 + }, + { + "loss": 2.5634, + "grad_norm": 2.0519795417785645, + "learning_rate": 9.763124199743919e-06, + "epoch": 0.019529999359672152, + "step": 305 + }, + { + "loss": 2.624, + "grad_norm": 2.057596445083618, + "learning_rate": 9.923175416133162e-06, + "epoch": 0.019850163283601206, + "step": 310 + }, + { + "loss": 2.6016, + "grad_norm": 2.060136079788208, + "learning_rate": 1.0083226632522407e-05, + "epoch": 0.020170327207530255, + "step": 315 + }, + { + "loss": 2.6114, + "grad_norm": 2.0268754959106445, + "learning_rate": 1.0243277848911652e-05, + "epoch": 0.02049049113145931, + "step": 320 + }, + { + "loss": 2.5783, + "grad_norm": 2.0680699348449707, + "learning_rate": 1.0403329065300897e-05, + "epoch": 0.020810655055388358, + "step": 325 + }, + { + "loss": 2.5616, + "grad_norm": 1.9991204738616943, + "learning_rate": 1.056338028169014e-05, + "epoch": 0.02113081897931741, + "step": 330 + }, + { + "loss": 2.6128, + "grad_norm": 2.09833025932312, + "learning_rate": 1.0723431498079386e-05, + "epoch": 0.02145098290324646, + "step": 335 + }, + { + "loss": 2.6025, + "grad_norm": 1.9758498668670654, + "learning_rate": 1.0883482714468631e-05, + "epoch": 0.021771146827175514, + "step": 340 + }, + { + "loss": 2.5805, + "grad_norm": 2.0436413288116455, + "learning_rate": 1.1043533930857874e-05, + "epoch": 0.022091310751104567, + "step": 345 + }, + { + "loss": 2.5944, + "grad_norm": 2.0317132472991943, + "learning_rate": 1.120358514724712e-05, + "epoch": 0.022411474675033616, + "step": 350 + }, + { + "loss": 2.5963, + "grad_norm": 2.199904680252075, + "learning_rate": 1.1363636363636365e-05, + "epoch": 0.02273163859896267, + "step": 355 + }, + { + "loss": 2.5799, + "grad_norm": 1.9787744283676147, + "learning_rate": 1.152368758002561e-05, + "epoch": 0.02305180252289172, + "step": 360 + }, + { + "loss": 2.6212, + "grad_norm": 1.890915870666504, + "learning_rate": 1.1683738796414853e-05, + "epoch": 0.023371966446820772, + "step": 365 + }, + { + "loss": 2.5704, + "grad_norm": 2.013899087905884, + "learning_rate": 1.1843790012804098e-05, + "epoch": 0.023692130370749825, + "step": 370 + }, + { + "loss": 2.6101, + "grad_norm": 2.0689878463745117, + "learning_rate": 1.2003841229193342e-05, + "epoch": 0.024012294294678875, + "step": 375 + }, + { + "loss": 2.599, + "grad_norm": 2.028207540512085, + "learning_rate": 1.2163892445582587e-05, + "epoch": 0.024332458218607928, + "step": 380 + }, + { + "loss": 2.6125, + "grad_norm": 1.9936445951461792, + "learning_rate": 1.2323943661971832e-05, + "epoch": 0.024652622142536978, + "step": 385 + }, + { + "loss": 2.5983, + "grad_norm": 2.112257719039917, + "learning_rate": 1.2483994878361075e-05, + "epoch": 0.02497278606646603, + "step": 390 + }, + { + "loss": 2.6074, + "grad_norm": 2.079145669937134, + "learning_rate": 1.264404609475032e-05, + "epoch": 0.025292949990395084, + "step": 395 + }, + { + "loss": 2.5697, + "grad_norm": 2.074415922164917, + "learning_rate": 1.2804097311139566e-05, + "epoch": 0.025613113914324134, + "step": 400 + }, + { + "eval_loss": 2.420924663543701, + "eval_runtime": 14.9165, + "eval_samples_per_second": 137.297, + "eval_steps_per_second": 17.162, + "epoch": 0.025613113914324134, + "step": 400 + }, + { + "loss": 2.5931, + "grad_norm": 1.9997014999389648, + "learning_rate": 1.296414852752881e-05, + "epoch": 0.025933277838253187, + "step": 405 + }, + { + "loss": 2.5684, + "grad_norm": 2.0531606674194336, + "learning_rate": 1.3124199743918053e-05, + "epoch": 0.026253441762182236, + "step": 410 + }, + { + "loss": 2.5935, + "grad_norm": 2.0724613666534424, + "learning_rate": 1.3284250960307298e-05, + "epoch": 0.02657360568611129, + "step": 415 + }, + { + "loss": 2.5658, + "grad_norm": 2.071101188659668, + "learning_rate": 1.3444302176696544e-05, + "epoch": 0.02689376961004034, + "step": 420 + }, + { + "loss": 2.5874, + "grad_norm": 1.9947307109832764, + "learning_rate": 1.360435339308579e-05, + "epoch": 0.027213933533969392, + "step": 425 + }, + { + "loss": 2.5713, + "grad_norm": 2.066715717315674, + "learning_rate": 1.3764404609475031e-05, + "epoch": 0.027534097457898445, + "step": 430 + }, + { + "loss": 2.5831, + "grad_norm": 2.0885956287384033, + "learning_rate": 1.3924455825864276e-05, + "epoch": 0.027854261381827495, + "step": 435 + }, + { + "loss": 2.5972, + "grad_norm": 2.0391199588775635, + "learning_rate": 1.4084507042253523e-05, + "epoch": 0.028174425305756548, + "step": 440 + }, + { + "loss": 2.5874, + "grad_norm": 1.9865524768829346, + "learning_rate": 1.4244558258642765e-05, + "epoch": 0.028494589229685598, + "step": 445 + }, + { + "loss": 2.5465, + "grad_norm": 2.0678815841674805, + "learning_rate": 1.440460947503201e-05, + "epoch": 0.02881475315361465, + "step": 450 + }, + { + "loss": 2.5946, + "grad_norm": 2.057670831680298, + "learning_rate": 1.4564660691421255e-05, + "epoch": 0.029134917077543704, + "step": 455 + }, + { + "loss": 2.6092, + "grad_norm": 2.101698875427246, + "learning_rate": 1.47247119078105e-05, + "epoch": 0.029455081001472753, + "step": 460 + }, + { + "loss": 2.5654, + "grad_norm": 2.0881927013397217, + "learning_rate": 1.4884763124199744e-05, + "epoch": 0.029775244925401807, + "step": 465 + }, + { + "loss": 2.5596, + "grad_norm": 2.091878890991211, + "learning_rate": 1.5044814340588989e-05, + "epoch": 0.030095408849330856, + "step": 470 + }, + { + "loss": 2.5909, + "grad_norm": 2.0245919227600098, + "learning_rate": 1.5204865556978234e-05, + "epoch": 0.03041557277325991, + "step": 475 + }, + { + "loss": 2.5902, + "grad_norm": 2.0498111248016357, + "learning_rate": 1.5364916773367477e-05, + "epoch": 0.030735736697188962, + "step": 480 + }, + { + "loss": 2.5611, + "grad_norm": 2.0705480575561523, + "learning_rate": 1.5524967989756723e-05, + "epoch": 0.031055900621118012, + "step": 485 + }, + { + "loss": 2.5305, + "grad_norm": 2.0246481895446777, + "learning_rate": 1.5685019206145968e-05, + "epoch": 0.031376064545047065, + "step": 490 + }, + { + "loss": 2.5752, + "grad_norm": 1.9090466499328613, + "learning_rate": 1.5845070422535213e-05, + "epoch": 0.031696228468976115, + "step": 495 + }, + { + "loss": 2.5908, + "grad_norm": 2.044546604156494, + "learning_rate": 1.6005121638924455e-05, + "epoch": 0.032016392392905164, + "step": 500 + }, + { + "loss": 2.5695, + "grad_norm": 2.096444845199585, + "learning_rate": 1.61651728553137e-05, + "epoch": 0.03233655631683422, + "step": 505 + }, + { + "loss": 2.5942, + "grad_norm": 2.1051011085510254, + "learning_rate": 1.6325224071702945e-05, + "epoch": 0.03265672024076327, + "step": 510 + }, + { + "loss": 2.5629, + "grad_norm": 1.9321959018707275, + "learning_rate": 1.648527528809219e-05, + "epoch": 0.03297688416469232, + "step": 515 + }, + { + "loss": 2.5745, + "grad_norm": 2.038756847381592, + "learning_rate": 1.6645326504481435e-05, + "epoch": 0.03329704808862138, + "step": 520 + }, + { + "loss": 2.5618, + "grad_norm": 2.018385410308838, + "learning_rate": 1.680537772087068e-05, + "epoch": 0.033617212012550426, + "step": 525 + }, + { + "loss": 2.5884, + "grad_norm": 2.459459066390991, + "learning_rate": 1.6965428937259925e-05, + "epoch": 0.033937375936479476, + "step": 530 + }, + { + "loss": 2.5639, + "grad_norm": 1.96848726272583, + "learning_rate": 1.7125480153649167e-05, + "epoch": 0.034257539860408526, + "step": 535 + }, + { + "loss": 2.56, + "grad_norm": 2.0582144260406494, + "learning_rate": 1.7285531370038412e-05, + "epoch": 0.03457770378433758, + "step": 540 + }, + { + "loss": 2.5354, + "grad_norm": 2.1106033325195312, + "learning_rate": 1.7445582586427657e-05, + "epoch": 0.03489786770826663, + "step": 545 + }, + { + "loss": 2.5768, + "grad_norm": 2.137942314147949, + "learning_rate": 1.7605633802816902e-05, + "epoch": 0.03521803163219568, + "step": 550 + }, + { + "loss": 2.5491, + "grad_norm": 2.1352152824401855, + "learning_rate": 1.7765685019206147e-05, + "epoch": 0.03553819555612474, + "step": 555 + }, + { + "loss": 2.5545, + "grad_norm": 2.0633294582366943, + "learning_rate": 1.7925736235595393e-05, + "epoch": 0.03585835948005379, + "step": 560 + }, + { + "loss": 2.5579, + "grad_norm": 2.02809476852417, + "learning_rate": 1.8085787451984638e-05, + "epoch": 0.03617852340398284, + "step": 565 + }, + { + "loss": 2.5585, + "grad_norm": 2.1206002235412598, + "learning_rate": 1.824583866837388e-05, + "epoch": 0.036498687327911894, + "step": 570 + }, + { + "loss": 2.5356, + "grad_norm": 2.072930335998535, + "learning_rate": 1.8405889884763125e-05, + "epoch": 0.036818851251840944, + "step": 575 + }, + { + "loss": 2.5238, + "grad_norm": 1.958531379699707, + "learning_rate": 1.856594110115237e-05, + "epoch": 0.03713901517576999, + "step": 580 + }, + { + "loss": 2.5486, + "grad_norm": 2.0069925785064697, + "learning_rate": 1.872599231754161e-05, + "epoch": 0.03745917909969904, + "step": 585 + }, + { + "loss": 2.5567, + "grad_norm": 2.0546441078186035, + "learning_rate": 1.888604353393086e-05, + "epoch": 0.0377793430236281, + "step": 590 + }, + { + "loss": 2.5517, + "grad_norm": 2.076535940170288, + "learning_rate": 1.9046094750320105e-05, + "epoch": 0.03809950694755715, + "step": 595 + }, + { + "loss": 2.5513, + "grad_norm": 2.17091703414917, + "learning_rate": 1.920614596670935e-05, + "epoch": 0.0384196708714862, + "step": 600 + }, + { + "eval_loss": 2.400339126586914, + "eval_runtime": 10.2049, + "eval_samples_per_second": 200.687, + "eval_steps_per_second": 25.086, + "epoch": 0.0384196708714862, + "step": 600 + }, + { + "loss": 2.5444, + "grad_norm": 1.9744805097579956, + "learning_rate": 1.9366197183098592e-05, + "epoch": 0.038739834795415255, + "step": 605 + }, + { + "loss": 2.5378, + "grad_norm": 1.9619749784469604, + "learning_rate": 1.9526248399487837e-05, + "epoch": 0.039059998719344305, + "step": 610 + }, + { + "loss": 2.5362, + "grad_norm": 2.023552656173706, + "learning_rate": 1.9686299615877082e-05, + "epoch": 0.039380162643273355, + "step": 615 + }, + { + "loss": 2.563, + "grad_norm": 2.148352861404419, + "learning_rate": 1.9846350832266324e-05, + "epoch": 0.03970032656720241, + "step": 620 + }, + { + "loss": 2.5317, + "grad_norm": 2.052964925765991, + "learning_rate": 2.000640204865557e-05, + "epoch": 0.04002049049113146, + "step": 625 + }, + { + "loss": 2.5463, + "grad_norm": 2.0874581336975098, + "learning_rate": 2.0166453265044814e-05, + "epoch": 0.04034065441506051, + "step": 630 + }, + { + "loss": 2.5432, + "grad_norm": 2.1276135444641113, + "learning_rate": 2.0326504481434063e-05, + "epoch": 0.04066081833898956, + "step": 635 + }, + { + "loss": 2.5441, + "grad_norm": 2.120331048965454, + "learning_rate": 2.0486555697823304e-05, + "epoch": 0.04098098226291862, + "step": 640 + }, + { + "loss": 2.5594, + "grad_norm": 1.9808118343353271, + "learning_rate": 2.064660691421255e-05, + "epoch": 0.041301146186847666, + "step": 645 + }, + { + "loss": 2.5496, + "grad_norm": 2.133842706680298, + "learning_rate": 2.0806658130601795e-05, + "epoch": 0.041621310110776716, + "step": 650 + }, + { + "loss": 2.5293, + "grad_norm": 2.060401439666748, + "learning_rate": 2.0966709346991036e-05, + "epoch": 0.04194147403470577, + "step": 655 + }, + { + "loss": 2.5548, + "grad_norm": 2.127145767211914, + "learning_rate": 2.112676056338028e-05, + "epoch": 0.04226163795863482, + "step": 660 + }, + { + "loss": 2.5335, + "grad_norm": 2.0574846267700195, + "learning_rate": 2.1286811779769527e-05, + "epoch": 0.04258180188256387, + "step": 665 + }, + { + "loss": 2.569, + "grad_norm": 2.057927131652832, + "learning_rate": 2.1446862996158772e-05, + "epoch": 0.04290196580649292, + "step": 670 + }, + { + "loss": 2.5401, + "grad_norm": 2.064457654953003, + "learning_rate": 2.1606914212548017e-05, + "epoch": 0.04322212973042198, + "step": 675 + }, + { + "loss": 2.5279, + "grad_norm": 2.1458826065063477, + "learning_rate": 2.1766965428937262e-05, + "epoch": 0.04354229365435103, + "step": 680 + }, + { + "loss": 2.5387, + "grad_norm": 1.951903223991394, + "learning_rate": 2.1927016645326507e-05, + "epoch": 0.04386245757828008, + "step": 685 + }, + { + "loss": 2.4947, + "grad_norm": 2.019552707672119, + "learning_rate": 2.208706786171575e-05, + "epoch": 0.044182621502209134, + "step": 690 + }, + { + "loss": 2.5334, + "grad_norm": 2.1926770210266113, + "learning_rate": 2.2247119078104994e-05, + "epoch": 0.04450278542613818, + "step": 695 + }, + { + "loss": 2.5333, + "grad_norm": 2.108576774597168, + "learning_rate": 2.240717029449424e-05, + "epoch": 0.04482294935006723, + "step": 700 + }, + { + "loss": 2.5361, + "grad_norm": 2.0758907794952393, + "learning_rate": 2.2567221510883484e-05, + "epoch": 0.04514311327399629, + "step": 705 + }, + { + "loss": 2.5382, + "grad_norm": 2.120901107788086, + "learning_rate": 2.272727272727273e-05, + "epoch": 0.04546327719792534, + "step": 710 + }, + { + "loss": 2.5182, + "grad_norm": 2.098022699356079, + "learning_rate": 2.2887323943661974e-05, + "epoch": 0.04578344112185439, + "step": 715 + }, + { + "loss": 2.5062, + "grad_norm": 2.1501033306121826, + "learning_rate": 2.304737516005122e-05, + "epoch": 0.04610360504578344, + "step": 720 + }, + { + "loss": 2.5476, + "grad_norm": 2.16194748878479, + "learning_rate": 2.320742637644046e-05, + "epoch": 0.046423768969712495, + "step": 725 + }, + { + "loss": 2.5253, + "grad_norm": 2.200193405151367, + "learning_rate": 2.3367477592829706e-05, + "epoch": 0.046743932893641545, + "step": 730 + }, + { + "loss": 2.5261, + "grad_norm": 2.143402099609375, + "learning_rate": 2.352752880921895e-05, + "epoch": 0.047064096817570594, + "step": 735 + }, + { + "loss": 2.5514, + "grad_norm": 2.0588419437408447, + "learning_rate": 2.3687580025608197e-05, + "epoch": 0.04738426074149965, + "step": 740 + }, + { + "loss": 2.5248, + "grad_norm": 2.039383888244629, + "learning_rate": 2.384763124199744e-05, + "epoch": 0.0477044246654287, + "step": 745 + }, + { + "loss": 2.5464, + "grad_norm": 2.1919689178466797, + "learning_rate": 2.4007682458386683e-05, + "epoch": 0.04802458858935775, + "step": 750 + }, + { + "loss": 2.5142, + "grad_norm": 2.0702781677246094, + "learning_rate": 2.4167733674775932e-05, + "epoch": 0.0483447525132868, + "step": 755 + }, + { + "loss": 2.5399, + "grad_norm": 2.0351169109344482, + "learning_rate": 2.4327784891165174e-05, + "epoch": 0.048664916437215856, + "step": 760 + }, + { + "loss": 2.5392, + "grad_norm": 2.276874542236328, + "learning_rate": 2.448783610755442e-05, + "epoch": 0.048985080361144906, + "step": 765 + }, + { + "loss": 2.5221, + "grad_norm": 2.1543283462524414, + "learning_rate": 2.4647887323943664e-05, + "epoch": 0.049305244285073956, + "step": 770 + }, + { + "loss": 2.5136, + "grad_norm": 2.0453758239746094, + "learning_rate": 2.480793854033291e-05, + "epoch": 0.04962540820900301, + "step": 775 + }, + { + "loss": 2.5164, + "grad_norm": 2.0810751914978027, + "learning_rate": 2.496798975672215e-05, + "epoch": 0.04994557213293206, + "step": 780 + }, + { + "loss": 2.5324, + "grad_norm": 2.0781326293945312, + "learning_rate": 2.5128040973111393e-05, + "epoch": 0.05026573605686111, + "step": 785 + }, + { + "loss": 2.4946, + "grad_norm": 1.9677348136901855, + "learning_rate": 2.528809218950064e-05, + "epoch": 0.05058589998079017, + "step": 790 + }, + { + "loss": 2.5258, + "grad_norm": 2.114290475845337, + "learning_rate": 2.5448143405889886e-05, + "epoch": 0.05090606390471922, + "step": 795 + }, + { + "loss": 2.5322, + "grad_norm": 2.0765063762664795, + "learning_rate": 2.560819462227913e-05, + "epoch": 0.05122622782864827, + "step": 800 + }, + { + "eval_loss": 2.371161460876465, + "eval_runtime": 12.4134, + "eval_samples_per_second": 164.983, + "eval_steps_per_second": 20.623, + "epoch": 0.05122622782864827, + "step": 800 + }, + { + "loss": 2.5383, + "grad_norm": 2.069668769836426, + "learning_rate": 2.5768245838668376e-05, + "epoch": 0.05154639175257732, + "step": 805 + }, + { + "loss": 2.5004, + "grad_norm": 2.1110806465148926, + "learning_rate": 2.592829705505762e-05, + "epoch": 0.05186655567650637, + "step": 810 + }, + { + "loss": 2.5093, + "grad_norm": 2.0620675086975098, + "learning_rate": 2.6088348271446867e-05, + "epoch": 0.05218671960043542, + "step": 815 + }, + { + "loss": 2.4999, + "grad_norm": 1.9995859861373901, + "learning_rate": 2.6248399487836105e-05, + "epoch": 0.05250688352436447, + "step": 820 + }, + { + "loss": 2.5001, + "grad_norm": 2.050431489944458, + "learning_rate": 2.640845070422535e-05, + "epoch": 0.05282704744829353, + "step": 825 + }, + { + "loss": 2.5435, + "grad_norm": 2.142716646194458, + "learning_rate": 2.6568501920614595e-05, + "epoch": 0.05314721137222258, + "step": 830 + }, + { + "loss": 2.5513, + "grad_norm": 2.053705930709839, + "learning_rate": 2.6728553137003844e-05, + "epoch": 0.05346737529615163, + "step": 835 + }, + { + "loss": 2.5534, + "grad_norm": 1.9456514120101929, + "learning_rate": 2.688860435339309e-05, + "epoch": 0.05378753922008068, + "step": 840 + }, + { + "loss": 2.5351, + "grad_norm": 2.099213123321533, + "learning_rate": 2.7048655569782334e-05, + "epoch": 0.054107703144009735, + "step": 845 + }, + { + "loss": 2.5077, + "grad_norm": 2.0750980377197266, + "learning_rate": 2.720870678617158e-05, + "epoch": 0.054427867067938784, + "step": 850 + }, + { + "loss": 2.4956, + "grad_norm": 2.1118557453155518, + "learning_rate": 2.7368758002560817e-05, + "epoch": 0.054748030991867834, + "step": 855 + }, + { + "loss": 2.5411, + "grad_norm": 2.021570920944214, + "learning_rate": 2.7528809218950063e-05, + "epoch": 0.05506819491579689, + "step": 860 + }, + { + "loss": 2.5294, + "grad_norm": 2.095647096633911, + "learning_rate": 2.7688860435339308e-05, + "epoch": 0.05538835883972594, + "step": 865 + }, + { + "loss": 2.5224, + "grad_norm": 2.172281503677368, + "learning_rate": 2.7848911651728553e-05, + "epoch": 0.05570852276365499, + "step": 870 + }, + { + "loss": 2.51, + "grad_norm": 2.0782620906829834, + "learning_rate": 2.8008962868117798e-05, + "epoch": 0.056028686687584046, + "step": 875 + }, + { + "loss": 2.5158, + "grad_norm": 2.1138572692871094, + "learning_rate": 2.8169014084507046e-05, + "epoch": 0.056348850611513096, + "step": 880 + }, + { + "loss": 2.5186, + "grad_norm": 1.9800282716751099, + "learning_rate": 2.832906530089629e-05, + "epoch": 0.056669014535442146, + "step": 885 + }, + { + "loss": 2.4717, + "grad_norm": 2.043164014816284, + "learning_rate": 2.848911651728553e-05, + "epoch": 0.056989178459371195, + "step": 890 + }, + { + "loss": 2.5061, + "grad_norm": 2.0079712867736816, + "learning_rate": 2.8649167733674775e-05, + "epoch": 0.05730934238330025, + "step": 895 + }, + { + "loss": 2.4861, + "grad_norm": 2.0949175357818604, + "learning_rate": 2.880921895006402e-05, + "epoch": 0.0576295063072293, + "step": 900 + }, + { + "loss": 2.5082, + "grad_norm": 2.1614928245544434, + "learning_rate": 2.8969270166453265e-05, + "epoch": 0.05794967023115835, + "step": 905 + }, + { + "loss": 2.5003, + "grad_norm": 2.0658435821533203, + "learning_rate": 2.912932138284251e-05, + "epoch": 0.05826983415508741, + "step": 910 + }, + { + "loss": 2.5126, + "grad_norm": 2.056453227996826, + "learning_rate": 2.9289372599231756e-05, + "epoch": 0.05858999807901646, + "step": 915 + }, + { + "loss": 2.5079, + "grad_norm": 2.0968542098999023, + "learning_rate": 2.9449423815621e-05, + "epoch": 0.05891016200294551, + "step": 920 + }, + { + "loss": 2.5189, + "grad_norm": 2.0200068950653076, + "learning_rate": 2.9609475032010242e-05, + "epoch": 0.05923032592687456, + "step": 925 + }, + { + "loss": 2.531, + "grad_norm": 2.081430435180664, + "learning_rate": 2.9769526248399488e-05, + "epoch": 0.05955048985080361, + "step": 930 + }, + { + "loss": 2.4866, + "grad_norm": 2.0429458618164062, + "learning_rate": 2.9929577464788733e-05, + "epoch": 0.05987065377473266, + "step": 935 + }, + { + "loss": 2.51, + "grad_norm": 2.0854263305664062, + "learning_rate": 3.0089628681177978e-05, + "epoch": 0.06019081769866171, + "step": 940 + }, + { + "loss": 2.5245, + "grad_norm": 2.192448854446411, + "learning_rate": 3.0249679897567223e-05, + "epoch": 0.06051098162259077, + "step": 945 + }, + { + "loss": 2.504, + "grad_norm": 2.0920021533966064, + "learning_rate": 3.0409731113956468e-05, + "epoch": 0.06083114554651982, + "step": 950 + }, + { + "loss": 2.4903, + "grad_norm": 2.0515296459198, + "learning_rate": 3.056978233034571e-05, + "epoch": 0.06115130947044887, + "step": 955 + }, + { + "loss": 2.5079, + "grad_norm": 2.128044605255127, + "learning_rate": 3.0729833546734955e-05, + "epoch": 0.061471473394377925, + "step": 960 + }, + { + "loss": 2.4882, + "grad_norm": 2.0168440341949463, + "learning_rate": 3.0889884763124197e-05, + "epoch": 0.061791637318306974, + "step": 965 + }, + { + "loss": 2.5079, + "grad_norm": 2.090272903442383, + "learning_rate": 3.1049935979513445e-05, + "epoch": 0.062111801242236024, + "step": 970 + }, + { + "loss": 2.5153, + "grad_norm": 2.0438127517700195, + "learning_rate": 3.120998719590269e-05, + "epoch": 0.062431965166165074, + "step": 975 + }, + { + "loss": 2.4814, + "grad_norm": 2.051866292953491, + "learning_rate": 3.1370038412291935e-05, + "epoch": 0.06275212909009413, + "step": 980 + }, + { + "loss": 2.5105, + "grad_norm": 2.1288902759552, + "learning_rate": 3.1530089628681184e-05, + "epoch": 0.06307229301402317, + "step": 985 + }, + { + "loss": 2.5006, + "grad_norm": 2.1590287685394287, + "learning_rate": 3.1690140845070426e-05, + "epoch": 0.06339245693795223, + "step": 990 + }, + { + "loss": 2.5005, + "grad_norm": 2.182297945022583, + "learning_rate": 3.185019206145967e-05, + "epoch": 0.06371262086188129, + "step": 995 + }, + { + "loss": 2.5104, + "grad_norm": 1.9559736251831055, + "learning_rate": 3.201024327784891e-05, + "epoch": 0.06403278478581033, + "step": 1000 + }, + { + "eval_loss": 2.3531105518341064, + "eval_runtime": 14.4683, + "eval_samples_per_second": 141.551, + "eval_steps_per_second": 17.694, + "epoch": 0.06403278478581033, + "step": 1000 + }, + { + "loss": 2.4771, + "grad_norm": 2.445621967315674, + "learning_rate": 3.217029449423816e-05, + "epoch": 0.06435294870973939, + "step": 1005 + }, + { + "loss": 2.4956, + "grad_norm": 2.1934397220611572, + "learning_rate": 3.23303457106274e-05, + "epoch": 0.06467311263366844, + "step": 1010 + }, + { + "loss": 2.4993, + "grad_norm": 2.0829265117645264, + "learning_rate": 3.249039692701665e-05, + "epoch": 0.06499327655759748, + "step": 1015 + }, + { + "loss": 2.5011, + "grad_norm": 2.163093328475952, + "learning_rate": 3.265044814340589e-05, + "epoch": 0.06531344048152654, + "step": 1020 + }, + { + "loss": 2.4802, + "grad_norm": 2.2062385082244873, + "learning_rate": 3.281049935979514e-05, + "epoch": 0.0656336044054556, + "step": 1025 + }, + { + "loss": 2.5182, + "grad_norm": 2.199197769165039, + "learning_rate": 3.297055057618438e-05, + "epoch": 0.06595376832938464, + "step": 1030 + }, + { + "loss": 2.4656, + "grad_norm": 2.0707991123199463, + "learning_rate": 3.313060179257362e-05, + "epoch": 0.0662739322533137, + "step": 1035 + }, + { + "loss": 2.5002, + "grad_norm": 2.182140588760376, + "learning_rate": 3.329065300896287e-05, + "epoch": 0.06659409617724275, + "step": 1040 + }, + { + "loss": 2.5177, + "grad_norm": 1.9117063283920288, + "learning_rate": 3.345070422535211e-05, + "epoch": 0.0669142601011718, + "step": 1045 + }, + { + "loss": 2.4955, + "grad_norm": 2.077578067779541, + "learning_rate": 3.361075544174136e-05, + "epoch": 0.06723442402510085, + "step": 1050 + }, + { + "loss": 2.5032, + "grad_norm": 2.046825408935547, + "learning_rate": 3.37708066581306e-05, + "epoch": 0.06755458794902991, + "step": 1055 + }, + { + "loss": 2.51, + "grad_norm": 2.127065420150757, + "learning_rate": 3.393085787451985e-05, + "epoch": 0.06787475187295895, + "step": 1060 + }, + { + "loss": 2.4983, + "grad_norm": 2.076838493347168, + "learning_rate": 3.409090909090909e-05, + "epoch": 0.06819491579688801, + "step": 1065 + }, + { + "loss": 2.4759, + "grad_norm": 1.964388370513916, + "learning_rate": 3.4250960307298334e-05, + "epoch": 0.06851507972081705, + "step": 1070 + }, + { + "loss": 2.4878, + "grad_norm": 2.0563740730285645, + "learning_rate": 3.441101152368758e-05, + "epoch": 0.06883524364474611, + "step": 1075 + }, + { + "loss": 2.5031, + "grad_norm": 2.236644744873047, + "learning_rate": 3.4571062740076824e-05, + "epoch": 0.06915540756867516, + "step": 1080 + }, + { + "loss": 2.5151, + "grad_norm": 2.1143178939819336, + "learning_rate": 3.473111395646607e-05, + "epoch": 0.06947557149260421, + "step": 1085 + }, + { + "loss": 2.4976, + "grad_norm": 2.1225712299346924, + "learning_rate": 3.4891165172855314e-05, + "epoch": 0.06979573541653326, + "step": 1090 + }, + { + "loss": 2.4847, + "grad_norm": 2.148134231567383, + "learning_rate": 3.505121638924456e-05, + "epoch": 0.07011589934046232, + "step": 1095 + }, + { + "loss": 2.5044, + "grad_norm": 2.070889949798584, + "learning_rate": 3.5211267605633805e-05, + "epoch": 0.07043606326439136, + "step": 1100 + }, + { + "loss": 2.4932, + "grad_norm": 2.0513927936553955, + "learning_rate": 3.5371318822023046e-05, + "epoch": 0.07075622718832042, + "step": 1105 + }, + { + "loss": 2.4801, + "grad_norm": 2.2668299674987793, + "learning_rate": 3.5531370038412295e-05, + "epoch": 0.07107639111224948, + "step": 1110 + }, + { + "loss": 2.4892, + "grad_norm": 2.1977014541625977, + "learning_rate": 3.569142125480154e-05, + "epoch": 0.07139655503617852, + "step": 1115 + }, + { + "loss": 2.4956, + "grad_norm": 2.03102707862854, + "learning_rate": 3.5851472471190785e-05, + "epoch": 0.07171671896010758, + "step": 1120 + }, + { + "loss": 2.4646, + "grad_norm": 2.058772325515747, + "learning_rate": 3.601152368758003e-05, + "epoch": 0.07203688288403663, + "step": 1125 + }, + { + "loss": 2.4888, + "grad_norm": 2.179579019546509, + "learning_rate": 3.6171574903969275e-05, + "epoch": 0.07235704680796567, + "step": 1130 + }, + { + "loss": 2.4983, + "grad_norm": 2.1978416442871094, + "learning_rate": 3.633162612035852e-05, + "epoch": 0.07267721073189473, + "step": 1135 + }, + { + "loss": 2.4855, + "grad_norm": 1.9466743469238281, + "learning_rate": 3.649167733674776e-05, + "epoch": 0.07299737465582379, + "step": 1140 + }, + { + "loss": 2.4661, + "grad_norm": 2.139958143234253, + "learning_rate": 3.665172855313701e-05, + "epoch": 0.07331753857975283, + "step": 1145 + }, + { + "loss": 2.4822, + "grad_norm": 2.0397911071777344, + "learning_rate": 3.681177976952625e-05, + "epoch": 0.07363770250368189, + "step": 1150 + }, + { + "loss": 2.5217, + "grad_norm": 2.008603811264038, + "learning_rate": 3.69718309859155e-05, + "epoch": 0.07395786642761093, + "step": 1155 + }, + { + "loss": 2.4926, + "grad_norm": 2.2945339679718018, + "learning_rate": 3.713188220230474e-05, + "epoch": 0.07427803035153999, + "step": 1160 + }, + { + "loss": 2.5163, + "grad_norm": 2.1897239685058594, + "learning_rate": 3.729193341869399e-05, + "epoch": 0.07459819427546904, + "step": 1165 + }, + { + "loss": 2.4827, + "grad_norm": 2.0864007472991943, + "learning_rate": 3.745198463508322e-05, + "epoch": 0.07491835819939809, + "step": 1170 + }, + { + "loss": 2.4885, + "grad_norm": 2.1951565742492676, + "learning_rate": 3.761203585147247e-05, + "epoch": 0.07523852212332714, + "step": 1175 + }, + { + "loss": 2.4403, + "grad_norm": 2.176609516143799, + "learning_rate": 3.777208706786172e-05, + "epoch": 0.0755586860472562, + "step": 1180 + }, + { + "loss": 2.4691, + "grad_norm": 2.2460696697235107, + "learning_rate": 3.793213828425096e-05, + "epoch": 0.07587884997118524, + "step": 1185 + }, + { + "loss": 2.4858, + "grad_norm": 2.059447765350342, + "learning_rate": 3.809218950064021e-05, + "epoch": 0.0761990138951143, + "step": 1190 + }, + { + "loss": 2.4878, + "grad_norm": 2.062699794769287, + "learning_rate": 3.825224071702945e-05, + "epoch": 0.07651917781904335, + "step": 1195 + }, + { + "loss": 2.483, + "grad_norm": 2.070650815963745, + "learning_rate": 3.84122919334187e-05, + "epoch": 0.0768393417429724, + "step": 1200 + }, + { + "eval_loss": 2.3345463275909424, + "eval_runtime": 11.3977, + "eval_samples_per_second": 179.686, + "eval_steps_per_second": 22.461, + "epoch": 0.0768393417429724, + "step": 1200 + }, + { + "loss": 2.4838, + "grad_norm": 2.119915723800659, + "learning_rate": 3.8572343149807935e-05, + "epoch": 0.07715950566690145, + "step": 1205 + }, + { + "loss": 2.4591, + "grad_norm": 2.0714828968048096, + "learning_rate": 3.8732394366197184e-05, + "epoch": 0.07747966959083051, + "step": 1210 + }, + { + "loss": 2.472, + "grad_norm": 2.186169385910034, + "learning_rate": 3.8892445582586426e-05, + "epoch": 0.07779983351475955, + "step": 1215 + }, + { + "loss": 2.4773, + "grad_norm": 2.181817054748535, + "learning_rate": 3.9052496798975674e-05, + "epoch": 0.07811999743868861, + "step": 1220 + }, + { + "loss": 2.4565, + "grad_norm": 2.1009023189544678, + "learning_rate": 3.921254801536492e-05, + "epoch": 0.07844016136261767, + "step": 1225 + }, + { + "loss": 2.4827, + "grad_norm": 1.9727615118026733, + "learning_rate": 3.9372599231754164e-05, + "epoch": 0.07876032528654671, + "step": 1230 + }, + { + "loss": 2.494, + "grad_norm": 2.114440679550171, + "learning_rate": 3.953265044814341e-05, + "epoch": 0.07908048921047577, + "step": 1235 + }, + { + "loss": 2.4776, + "grad_norm": 2.6610660552978516, + "learning_rate": 3.969270166453265e-05, + "epoch": 0.07940065313440482, + "step": 1240 + }, + { + "loss": 2.4643, + "grad_norm": 2.1695549488067627, + "learning_rate": 3.9852752880921896e-05, + "epoch": 0.07972081705833386, + "step": 1245 + }, + { + "loss": 2.4708, + "grad_norm": 2.2169156074523926, + "learning_rate": 4.001280409731114e-05, + "epoch": 0.08004098098226292, + "step": 1250 + }, + { + "loss": 2.4964, + "grad_norm": 2.0154330730438232, + "learning_rate": 4.0172855313700387e-05, + "epoch": 0.08036114490619196, + "step": 1255 + }, + { + "loss": 2.4564, + "grad_norm": 2.2851929664611816, + "learning_rate": 4.033290653008963e-05, + "epoch": 0.08068130883012102, + "step": 1260 + }, + { + "loss": 2.4764, + "grad_norm": 2.229935646057129, + "learning_rate": 4.049295774647888e-05, + "epoch": 0.08100147275405008, + "step": 1265 + }, + { + "loss": 2.4936, + "grad_norm": 2.0593361854553223, + "learning_rate": 4.0653008962868125e-05, + "epoch": 0.08132163667797912, + "step": 1270 + }, + { + "loss": 2.4756, + "grad_norm": 1.9973433017730713, + "learning_rate": 4.081306017925736e-05, + "epoch": 0.08164180060190818, + "step": 1275 + }, + { + "loss": 2.4594, + "grad_norm": 2.005742073059082, + "learning_rate": 4.097311139564661e-05, + "epoch": 0.08196196452583723, + "step": 1280 + }, + { + "loss": 2.4543, + "grad_norm": 2.015453577041626, + "learning_rate": 4.113316261203585e-05, + "epoch": 0.08228212844976628, + "step": 1285 + }, + { + "loss": 2.4474, + "grad_norm": 2.0268640518188477, + "learning_rate": 4.12932138284251e-05, + "epoch": 0.08260229237369533, + "step": 1290 + }, + { + "loss": 2.4646, + "grad_norm": 2.077282428741455, + "learning_rate": 4.145326504481434e-05, + "epoch": 0.08292245629762439, + "step": 1295 + }, + { + "loss": 2.4894, + "grad_norm": 2.0141093730926514, + "learning_rate": 4.161331626120359e-05, + "epoch": 0.08324262022155343, + "step": 1300 + }, + { + "loss": 2.46, + "grad_norm": 2.0732734203338623, + "learning_rate": 4.177336747759283e-05, + "epoch": 0.08356278414548249, + "step": 1305 + }, + { + "loss": 2.4604, + "grad_norm": 2.012782573699951, + "learning_rate": 4.193341869398207e-05, + "epoch": 0.08388294806941154, + "step": 1310 + }, + { + "loss": 2.439, + "grad_norm": 2.024240255355835, + "learning_rate": 4.209346991037132e-05, + "epoch": 0.08420311199334059, + "step": 1315 + }, + { + "loss": 2.4635, + "grad_norm": 2.0818872451782227, + "learning_rate": 4.225352112676056e-05, + "epoch": 0.08452327591726964, + "step": 1320 + }, + { + "loss": 2.4801, + "grad_norm": 2.048849105834961, + "learning_rate": 4.241357234314981e-05, + "epoch": 0.0848434398411987, + "step": 1325 + }, + { + "loss": 2.4625, + "grad_norm": 2.284207344055176, + "learning_rate": 4.257362355953905e-05, + "epoch": 0.08516360376512774, + "step": 1330 + }, + { + "loss": 2.4771, + "grad_norm": 2.018928050994873, + "learning_rate": 4.27336747759283e-05, + "epoch": 0.0854837676890568, + "step": 1335 + }, + { + "loss": 2.4864, + "grad_norm": 2.1332316398620605, + "learning_rate": 4.2893725992317543e-05, + "epoch": 0.08580393161298584, + "step": 1340 + }, + { + "loss": 2.4726, + "grad_norm": 2.0433480739593506, + "learning_rate": 4.3053777208706785e-05, + "epoch": 0.0861240955369149, + "step": 1345 + }, + { + "loss": 2.4597, + "grad_norm": 2.058560609817505, + "learning_rate": 4.3213828425096034e-05, + "epoch": 0.08644425946084396, + "step": 1350 + }, + { + "loss": 2.4773, + "grad_norm": 2.096250534057617, + "learning_rate": 4.3373879641485275e-05, + "epoch": 0.086764423384773, + "step": 1355 + }, + { + "loss": 2.4619, + "grad_norm": 2.168686866760254, + "learning_rate": 4.3533930857874524e-05, + "epoch": 0.08708458730870205, + "step": 1360 + }, + { + "loss": 2.4256, + "grad_norm": 2.0486621856689453, + "learning_rate": 4.3693982074263766e-05, + "epoch": 0.08740475123263111, + "step": 1365 + }, + { + "loss": 2.4488, + "grad_norm": 2.1706786155700684, + "learning_rate": 4.3854033290653014e-05, + "epoch": 0.08772491515656015, + "step": 1370 + }, + { + "loss": 2.4556, + "grad_norm": 1.9638718366622925, + "learning_rate": 4.4014084507042256e-05, + "epoch": 0.08804507908048921, + "step": 1375 + }, + { + "loss": 2.4687, + "grad_norm": 2.0920019149780273, + "learning_rate": 4.41741357234315e-05, + "epoch": 0.08836524300441827, + "step": 1380 + }, + { + "loss": 2.4431, + "grad_norm": 2.1053900718688965, + "learning_rate": 4.4334186939820746e-05, + "epoch": 0.08868540692834731, + "step": 1385 + }, + { + "loss": 2.4907, + "grad_norm": 2.1533970832824707, + "learning_rate": 4.449423815620999e-05, + "epoch": 0.08900557085227637, + "step": 1390 + }, + { + "loss": 2.4612, + "grad_norm": 2.0936789512634277, + "learning_rate": 4.4654289372599236e-05, + "epoch": 0.08932573477620542, + "step": 1395 + }, + { + "loss": 2.4923, + "grad_norm": 2.1903157234191895, + "learning_rate": 4.481434058898848e-05, + "epoch": 0.08964589870013447, + "step": 1400 + }, + { + "eval_loss": 2.323389768600464, + "eval_runtime": 12.3833, + "eval_samples_per_second": 165.383, + "eval_steps_per_second": 20.673, + "epoch": 0.08964589870013447, + "step": 1400 + }, + { + "loss": 2.4615, + "grad_norm": 2.1091010570526123, + "learning_rate": 4.4974391805377727e-05, + "epoch": 0.08996606262406352, + "step": 1405 + }, + { + "loss": 2.4428, + "grad_norm": 2.227038621902466, + "learning_rate": 4.513444302176697e-05, + "epoch": 0.09028622654799258, + "step": 1410 + }, + { + "loss": 2.4297, + "grad_norm": 2.059403419494629, + "learning_rate": 4.529449423815621e-05, + "epoch": 0.09060639047192162, + "step": 1415 + }, + { + "loss": 2.4364, + "grad_norm": 2.005385398864746, + "learning_rate": 4.545454545454546e-05, + "epoch": 0.09092655439585068, + "step": 1420 + }, + { + "loss": 2.4764, + "grad_norm": 2.142878532409668, + "learning_rate": 4.56145966709347e-05, + "epoch": 0.09124671831977972, + "step": 1425 + }, + { + "loss": 2.4698, + "grad_norm": 2.005213499069214, + "learning_rate": 4.577464788732395e-05, + "epoch": 0.09156688224370878, + "step": 1430 + }, + { + "loss": 2.4566, + "grad_norm": 2.094695568084717, + "learning_rate": 4.593469910371319e-05, + "epoch": 0.09188704616763783, + "step": 1435 + }, + { + "loss": 2.4363, + "grad_norm": 2.0781939029693604, + "learning_rate": 4.609475032010244e-05, + "epoch": 0.09220721009156688, + "step": 1440 + }, + { + "loss": 2.4561, + "grad_norm": 2.1999306678771973, + "learning_rate": 4.625480153649168e-05, + "epoch": 0.09252737401549593, + "step": 1445 + }, + { + "loss": 2.456, + "grad_norm": 2.0999979972839355, + "learning_rate": 4.641485275288092e-05, + "epoch": 0.09284753793942499, + "step": 1450 + }, + { + "loss": 2.4783, + "grad_norm": 2.1072137355804443, + "learning_rate": 4.6574903969270164e-05, + "epoch": 0.09316770186335403, + "step": 1455 + }, + { + "loss": 2.4956, + "grad_norm": 1.932655692100525, + "learning_rate": 4.673495518565941e-05, + "epoch": 0.09348786578728309, + "step": 1460 + }, + { + "loss": 2.49, + "grad_norm": 2.29823637008667, + "learning_rate": 4.689500640204866e-05, + "epoch": 0.09380802971121215, + "step": 1465 + }, + { + "loss": 2.4669, + "grad_norm": 2.6139848232269287, + "learning_rate": 4.70550576184379e-05, + "epoch": 0.09412819363514119, + "step": 1470 + }, + { + "loss": 2.4432, + "grad_norm": 2.2109243869781494, + "learning_rate": 4.721510883482715e-05, + "epoch": 0.09444835755907025, + "step": 1475 + }, + { + "loss": 2.4578, + "grad_norm": 2.114405870437622, + "learning_rate": 4.737516005121639e-05, + "epoch": 0.0947685214829993, + "step": 1480 + }, + { + "loss": 2.4617, + "grad_norm": 2.0485222339630127, + "learning_rate": 4.7535211267605635e-05, + "epoch": 0.09508868540692834, + "step": 1485 + }, + { + "loss": 2.4296, + "grad_norm": 2.0749807357788086, + "learning_rate": 4.769526248399488e-05, + "epoch": 0.0954088493308574, + "step": 1490 + }, + { + "loss": 2.4569, + "grad_norm": 2.2009363174438477, + "learning_rate": 4.7855313700384125e-05, + "epoch": 0.09572901325478646, + "step": 1495 + }, + { + "loss": 2.4203, + "grad_norm": 2.4653289318084717, + "learning_rate": 4.801536491677337e-05, + "epoch": 0.0960491771787155, + "step": 1500 + }, + { + "loss": 2.445, + "grad_norm": 2.145634889602661, + "learning_rate": 4.8175416133162615e-05, + "epoch": 0.09636934110264456, + "step": 1505 + }, + { + "loss": 2.4232, + "grad_norm": 2.214996337890625, + "learning_rate": 4.8335467349551864e-05, + "epoch": 0.0966895050265736, + "step": 1510 + }, + { + "loss": 2.5058, + "grad_norm": 2.039727210998535, + "learning_rate": 4.8495518565941106e-05, + "epoch": 0.09700966895050266, + "step": 1515 + }, + { + "loss": 2.4503, + "grad_norm": 2.134812593460083, + "learning_rate": 4.865556978233035e-05, + "epoch": 0.09732983287443171, + "step": 1520 + }, + { + "loss": 2.4561, + "grad_norm": 2.0128939151763916, + "learning_rate": 4.881562099871959e-05, + "epoch": 0.09764999679836076, + "step": 1525 + }, + { + "loss": 2.4869, + "grad_norm": 1.9132862091064453, + "learning_rate": 4.897567221510884e-05, + "epoch": 0.09797016072228981, + "step": 1530 + }, + { + "loss": 2.4637, + "grad_norm": 2.2746827602386475, + "learning_rate": 4.913572343149808e-05, + "epoch": 0.09829032464621887, + "step": 1535 + }, + { + "loss": 2.4481, + "grad_norm": 2.09806489944458, + "learning_rate": 4.929577464788733e-05, + "epoch": 0.09861048857014791, + "step": 1540 + }, + { + "loss": 2.4761, + "grad_norm": 2.1433379650115967, + "learning_rate": 4.945582586427657e-05, + "epoch": 0.09893065249407697, + "step": 1545 + }, + { + "loss": 2.4404, + "grad_norm": 2.127873659133911, + "learning_rate": 4.961587708066582e-05, + "epoch": 0.09925081641800602, + "step": 1550 + }, + { + "loss": 2.4531, + "grad_norm": 2.067396879196167, + "learning_rate": 4.977592829705506e-05, + "epoch": 0.09957098034193507, + "step": 1555 + }, + { + "loss": 2.4531, + "grad_norm": 2.0335302352905273, + "learning_rate": 4.99359795134443e-05, + "epoch": 0.09989114426586412, + "step": 1560 + }, + { + "loss": 2.456, + "grad_norm": 2.0103564262390137, + "learning_rate": 5e-05, + "epoch": 0.10021130818979318, + "step": 1565 + }, + { + "loss": 2.4837, + "grad_norm": 1.9280204772949219, + "learning_rate": 5e-05, + "epoch": 0.10053147211372222, + "step": 1570 + }, + { + "loss": 2.4348, + "grad_norm": 2.0677709579467773, + "learning_rate": 5e-05, + "epoch": 0.10085163603765128, + "step": 1575 + }, + { + "loss": 2.4423, + "grad_norm": 2.088454484939575, + "learning_rate": 5e-05, + "epoch": 0.10117179996158034, + "step": 1580 + }, + { + "loss": 2.4472, + "grad_norm": 2.0001513957977295, + "learning_rate": 5e-05, + "epoch": 0.10149196388550938, + "step": 1585 + }, + { + "loss": 2.4643, + "grad_norm": 2.0158650875091553, + "learning_rate": 5e-05, + "epoch": 0.10181212780943844, + "step": 1590 + }, + { + "loss": 2.4517, + "grad_norm": 2.062638282775879, + "learning_rate": 5e-05, + "epoch": 0.10213229173336748, + "step": 1595 + }, + { + "loss": 2.4274, + "grad_norm": 2.11297345161438, + "learning_rate": 5e-05, + "epoch": 0.10245245565729653, + "step": 1600 + }, + { + "eval_loss": 2.3070931434631348, + "eval_runtime": 9.4351, + "eval_samples_per_second": 217.062, + "eval_steps_per_second": 27.133, + "epoch": 0.10245245565729653, + "step": 1600 + }, + { + "loss": 2.4551, + "grad_norm": 2.169626235961914, + "learning_rate": 5e-05, + "epoch": 0.10277261958122559, + "step": 1605 + }, + { + "loss": 2.4464, + "grad_norm": 2.102466344833374, + "learning_rate": 5e-05, + "epoch": 0.10309278350515463, + "step": 1610 + }, + { + "loss": 2.438, + "grad_norm": 1.9966940879821777, + "learning_rate": 5e-05, + "epoch": 0.10341294742908369, + "step": 1615 + }, + { + "loss": 2.4323, + "grad_norm": 2.103325605392456, + "learning_rate": 5e-05, + "epoch": 0.10373311135301275, + "step": 1620 + }, + { + "loss": 2.4293, + "grad_norm": 2.05993390083313, + "learning_rate": 5e-05, + "epoch": 0.10405327527694179, + "step": 1625 + }, + { + "loss": 2.428, + "grad_norm": 1.9764646291732788, + "learning_rate": 5e-05, + "epoch": 0.10437343920087085, + "step": 1630 + }, + { + "loss": 2.4515, + "grad_norm": 1.9260586500167847, + "learning_rate": 5e-05, + "epoch": 0.1046936031247999, + "step": 1635 + }, + { + "loss": 2.4378, + "grad_norm": 1.9698050022125244, + "learning_rate": 5e-05, + "epoch": 0.10501376704872895, + "step": 1640 + }, + { + "loss": 2.4413, + "grad_norm": 2.1451985836029053, + "learning_rate": 5e-05, + "epoch": 0.105333930972658, + "step": 1645 + }, + { + "loss": 2.4739, + "grad_norm": 2.0343995094299316, + "learning_rate": 5e-05, + "epoch": 0.10565409489658706, + "step": 1650 + }, + { + "loss": 2.4383, + "grad_norm": 2.035264253616333, + "learning_rate": 5e-05, + "epoch": 0.1059742588205161, + "step": 1655 + }, + { + "loss": 2.412, + "grad_norm": 1.9604747295379639, + "learning_rate": 5e-05, + "epoch": 0.10629442274444516, + "step": 1660 + }, + { + "loss": 2.4449, + "grad_norm": 2.0956430435180664, + "learning_rate": 5e-05, + "epoch": 0.10661458666837421, + "step": 1665 + }, + { + "loss": 2.4662, + "grad_norm": 2.05611252784729, + "learning_rate": 5e-05, + "epoch": 0.10693475059230326, + "step": 1670 + }, + { + "loss": 2.4423, + "grad_norm": 2.157836437225342, + "learning_rate": 5e-05, + "epoch": 0.10725491451623231, + "step": 1675 + }, + { + "loss": 2.4285, + "grad_norm": 1.9412627220153809, + "learning_rate": 5e-05, + "epoch": 0.10757507844016136, + "step": 1680 + }, + { + "loss": 2.4448, + "grad_norm": 2.1207661628723145, + "learning_rate": 5e-05, + "epoch": 0.10789524236409041, + "step": 1685 + }, + { + "loss": 2.4737, + "grad_norm": 2.0780351161956787, + "learning_rate": 5e-05, + "epoch": 0.10821540628801947, + "step": 1690 + }, + { + "loss": 2.4195, + "grad_norm": 1.9629524946212769, + "learning_rate": 5e-05, + "epoch": 0.10853557021194851, + "step": 1695 + }, + { + "loss": 2.4542, + "grad_norm": 2.141195774078369, + "learning_rate": 5e-05, + "epoch": 0.10885573413587757, + "step": 1700 + }, + { + "loss": 2.4617, + "grad_norm": 2.042581081390381, + "learning_rate": 5e-05, + "epoch": 0.10917589805980663, + "step": 1705 + }, + { + "loss": 2.4604, + "grad_norm": 2.0919344425201416, + "learning_rate": 5e-05, + "epoch": 0.10949606198373567, + "step": 1710 + }, + { + "loss": 2.438, + "grad_norm": 2.0356478691101074, + "learning_rate": 5e-05, + "epoch": 0.10981622590766472, + "step": 1715 + }, + { + "loss": 2.4202, + "grad_norm": 1.9953988790512085, + "learning_rate": 5e-05, + "epoch": 0.11013638983159378, + "step": 1720 + }, + { + "loss": 2.4372, + "grad_norm": 2.033454656600952, + "learning_rate": 5e-05, + "epoch": 0.11045655375552282, + "step": 1725 + }, + { + "loss": 2.4564, + "grad_norm": 1.898619532585144, + "learning_rate": 5e-05, + "epoch": 0.11077671767945188, + "step": 1730 + }, + { + "loss": 2.4104, + "grad_norm": 1.9848005771636963, + "learning_rate": 5e-05, + "epoch": 0.11109688160338094, + "step": 1735 + }, + { + "loss": 2.4339, + "grad_norm": 2.142657518386841, + "learning_rate": 5e-05, + "epoch": 0.11141704552730998, + "step": 1740 + }, + { + "loss": 2.4583, + "grad_norm": 1.9848843812942505, + "learning_rate": 5e-05, + "epoch": 0.11173720945123904, + "step": 1745 + }, + { + "loss": 2.4431, + "grad_norm": 1.9729015827178955, + "learning_rate": 5e-05, + "epoch": 0.11205737337516809, + "step": 1750 + }, + { + "loss": 2.4312, + "grad_norm": 8.698593139648438, + "learning_rate": 5e-05, + "epoch": 0.11237753729909714, + "step": 1755 + }, + { + "loss": 2.4592, + "grad_norm": 2.070530414581299, + "learning_rate": 5e-05, + "epoch": 0.11269770122302619, + "step": 1760 + }, + { + "loss": 2.4533, + "grad_norm": 2.034292697906494, + "learning_rate": 5e-05, + "epoch": 0.11301786514695523, + "step": 1765 + }, + { + "loss": 2.4622, + "grad_norm": 2.0932867527008057, + "learning_rate": 5e-05, + "epoch": 0.11333802907088429, + "step": 1770 + }, + { + "loss": 2.4165, + "grad_norm": 1.9923781156539917, + "learning_rate": 5e-05, + "epoch": 0.11365819299481335, + "step": 1775 + }, + { + "loss": 2.4384, + "grad_norm": 2.063328504562378, + "learning_rate": 5e-05, + "epoch": 0.11397835691874239, + "step": 1780 + }, + { + "loss": 2.4038, + "grad_norm": 2.021510124206543, + "learning_rate": 5e-05, + "epoch": 0.11429852084267145, + "step": 1785 + }, + { + "loss": 2.4164, + "grad_norm": 1.953681468963623, + "learning_rate": 5e-05, + "epoch": 0.1146186847666005, + "step": 1790 + }, + { + "loss": 2.4103, + "grad_norm": 1.976102590560913, + "learning_rate": 5e-05, + "epoch": 0.11493884869052955, + "step": 1795 + }, + { + "loss": 2.4182, + "grad_norm": 1.886995792388916, + "learning_rate": 5e-05, + "epoch": 0.1152590126144586, + "step": 1800 + }, + { + "eval_loss": 2.283639430999756, + "eval_runtime": 12.6341, + "eval_samples_per_second": 162.101, + "eval_steps_per_second": 20.263, + "epoch": 0.1152590126144586, + "step": 1800 + }, + { + "loss": 2.4338, + "grad_norm": 2.145838975906372, + "learning_rate": 5e-05, + "epoch": 0.11557917653838766, + "step": 1805 + }, + { + "loss": 2.4621, + "grad_norm": 2.145569324493408, + "learning_rate": 5e-05, + "epoch": 0.1158993404623167, + "step": 1810 + }, + { + "loss": 2.4088, + "grad_norm": 2.0806474685668945, + "learning_rate": 5e-05, + "epoch": 0.11621950438624576, + "step": 1815 + }, + { + "loss": 2.4203, + "grad_norm": 2.0347843170166016, + "learning_rate": 5e-05, + "epoch": 0.11653966831017482, + "step": 1820 + }, + { + "loss": 2.4236, + "grad_norm": 1.9410957098007202, + "learning_rate": 5e-05, + "epoch": 0.11685983223410386, + "step": 1825 + }, + { + "loss": 2.4548, + "grad_norm": 1.9339467287063599, + "learning_rate": 5e-05, + "epoch": 0.11717999615803291, + "step": 1830 + }, + { + "loss": 2.4241, + "grad_norm": 2.0596871376037598, + "learning_rate": 5e-05, + "epoch": 0.11750016008196197, + "step": 1835 + }, + { + "loss": 2.4443, + "grad_norm": 2.01784610748291, + "learning_rate": 5e-05, + "epoch": 0.11782032400589101, + "step": 1840 + }, + { + "loss": 2.4297, + "grad_norm": 1.915007472038269, + "learning_rate": 5e-05, + "epoch": 0.11814048792982007, + "step": 1845 + }, + { + "loss": 2.412, + "grad_norm": 2.025275945663452, + "learning_rate": 5e-05, + "epoch": 0.11846065185374911, + "step": 1850 + }, + { + "loss": 2.4118, + "grad_norm": 1.9844189882278442, + "learning_rate": 5e-05, + "epoch": 0.11878081577767817, + "step": 1855 + }, + { + "loss": 2.4055, + "grad_norm": 2.0724167823791504, + "learning_rate": 5e-05, + "epoch": 0.11910097970160723, + "step": 1860 + }, + { + "loss": 2.4241, + "grad_norm": 2.0442521572113037, + "learning_rate": 5e-05, + "epoch": 0.11942114362553627, + "step": 1865 + }, + { + "loss": 2.4462, + "grad_norm": 1.9685866832733154, + "learning_rate": 5e-05, + "epoch": 0.11974130754946533, + "step": 1870 + }, + { + "loss": 2.4232, + "grad_norm": 1.9572803974151611, + "learning_rate": 5e-05, + "epoch": 0.12006147147339438, + "step": 1875 + }, + { + "loss": 2.4565, + "grad_norm": 2.105123519897461, + "learning_rate": 5e-05, + "epoch": 0.12038163539732342, + "step": 1880 + }, + { + "loss": 2.4146, + "grad_norm": 2.017563581466675, + "learning_rate": 5e-05, + "epoch": 0.12070179932125248, + "step": 1885 + }, + { + "loss": 2.4295, + "grad_norm": 1.9783453941345215, + "learning_rate": 5e-05, + "epoch": 0.12102196324518154, + "step": 1890 + }, + { + "loss": 2.4148, + "grad_norm": 2.017634868621826, + "learning_rate": 5e-05, + "epoch": 0.12134212716911058, + "step": 1895 + }, + { + "loss": 2.4214, + "grad_norm": 2.16438627243042, + "learning_rate": 5e-05, + "epoch": 0.12166229109303964, + "step": 1900 + }, + { + "loss": 2.42, + "grad_norm": 1.980455994606018, + "learning_rate": 5e-05, + "epoch": 0.1219824550169687, + "step": 1905 + }, + { + "loss": 2.3945, + "grad_norm": 1.9849518537521362, + "learning_rate": 5e-05, + "epoch": 0.12230261894089774, + "step": 1910 + }, + { + "loss": 2.459, + "grad_norm": 1.9081141948699951, + "learning_rate": 5e-05, + "epoch": 0.1226227828648268, + "step": 1915 + }, + { + "loss": 2.4555, + "grad_norm": 1.9519824981689453, + "learning_rate": 5e-05, + "epoch": 0.12294294678875585, + "step": 1920 + }, + { + "loss": 2.4165, + "grad_norm": 1.8904905319213867, + "learning_rate": 5e-05, + "epoch": 0.12326311071268489, + "step": 1925 + }, + { + "loss": 2.3908, + "grad_norm": 2.1023762226104736, + "learning_rate": 5e-05, + "epoch": 0.12358327463661395, + "step": 1930 + }, + { + "loss": 2.4155, + "grad_norm": 1.937259554862976, + "learning_rate": 5e-05, + "epoch": 0.12390343856054299, + "step": 1935 + }, + { + "loss": 2.3992, + "grad_norm": 2.0456533432006836, + "learning_rate": 5e-05, + "epoch": 0.12422360248447205, + "step": 1940 + }, + { + "loss": 2.4287, + "grad_norm": 2.0166923999786377, + "learning_rate": 5e-05, + "epoch": 0.1245437664084011, + "step": 1945 + }, + { + "loss": 2.4264, + "grad_norm": 2.064141273498535, + "learning_rate": 5e-05, + "epoch": 0.12486393033233015, + "step": 1950 + }, + { + "loss": 2.4268, + "grad_norm": 1.9742683172225952, + "learning_rate": 5e-05, + "epoch": 0.1251840942562592, + "step": 1955 + }, + { + "loss": 2.4056, + "grad_norm": 1.9377827644348145, + "learning_rate": 5e-05, + "epoch": 0.12550425818018826, + "step": 1960 + }, + { + "loss": 2.4298, + "grad_norm": 1.92035710811615, + "learning_rate": 5e-05, + "epoch": 0.12582442210411732, + "step": 1965 + }, + { + "loss": 2.4565, + "grad_norm": 1.9152588844299316, + "learning_rate": 5e-05, + "epoch": 0.12614458602804635, + "step": 1970 + }, + { + "loss": 2.4223, + "grad_norm": 1.9562370777130127, + "learning_rate": 5e-05, + "epoch": 0.1264647499519754, + "step": 1975 + }, + { + "loss": 2.3804, + "grad_norm": 1.9528794288635254, + "learning_rate": 5e-05, + "epoch": 0.12678491387590446, + "step": 1980 + }, + { + "loss": 2.4475, + "grad_norm": 1.9754786491394043, + "learning_rate": 5e-05, + "epoch": 0.12710507779983352, + "step": 1985 + }, + { + "loss": 2.4395, + "grad_norm": 1.9041407108306885, + "learning_rate": 5e-05, + "epoch": 0.12742524172376257, + "step": 1990 + }, + { + "loss": 2.4235, + "grad_norm": 2.0152764320373535, + "learning_rate": 5e-05, + "epoch": 0.12774540564769163, + "step": 1995 + }, + { + "loss": 2.4227, + "grad_norm": 1.8605611324310303, + "learning_rate": 5e-05, + "epoch": 0.12806556957162066, + "step": 2000 + }, + { + "eval_loss": 2.2688512802124023, + "eval_runtime": 9.5336, + "eval_samples_per_second": 214.818, + "eval_steps_per_second": 26.852, + "epoch": 0.12806556957162066, + "step": 2000 + }, + { + "loss": 2.4202, + "grad_norm": 2.040646553039551, + "learning_rate": 5e-05, + "epoch": 0.12838573349554971, + "step": 2005 + }, + { + "loss": 2.4003, + "grad_norm": 2.212007522583008, + "learning_rate": 5e-05, + "epoch": 0.12870589741947877, + "step": 2010 + }, + { + "loss": 2.4121, + "grad_norm": 1.964044451713562, + "learning_rate": 5e-05, + "epoch": 0.12902606134340783, + "step": 2015 + }, + { + "loss": 2.3939, + "grad_norm": 1.9541124105453491, + "learning_rate": 5e-05, + "epoch": 0.12934622526733688, + "step": 2020 + }, + { + "loss": 2.4335, + "grad_norm": 1.8698004484176636, + "learning_rate": 5e-05, + "epoch": 0.12966638919126594, + "step": 2025 + }, + { + "loss": 2.4246, + "grad_norm": 1.956687092781067, + "learning_rate": 5e-05, + "epoch": 0.12998655311519497, + "step": 2030 + }, + { + "loss": 2.4097, + "grad_norm": 2.137901544570923, + "learning_rate": 5e-05, + "epoch": 0.13030671703912403, + "step": 2035 + }, + { + "loss": 2.4066, + "grad_norm": 1.9962667226791382, + "learning_rate": 5e-05, + "epoch": 0.13062688096305308, + "step": 2040 + }, + { + "loss": 2.404, + "grad_norm": 1.9223984479904175, + "learning_rate": 5e-05, + "epoch": 0.13094704488698214, + "step": 2045 + }, + { + "loss": 2.3836, + "grad_norm": 1.956534504890442, + "learning_rate": 5e-05, + "epoch": 0.1312672088109112, + "step": 2050 + }, + { + "loss": 2.4043, + "grad_norm": 1.9311598539352417, + "learning_rate": 5e-05, + "epoch": 0.13158737273484022, + "step": 2055 + }, + { + "loss": 2.4129, + "grad_norm": 2.080878496170044, + "learning_rate": 5e-05, + "epoch": 0.13190753665876928, + "step": 2060 + }, + { + "loss": 2.4115, + "grad_norm": 2.0229105949401855, + "learning_rate": 5e-05, + "epoch": 0.13222770058269834, + "step": 2065 + }, + { + "loss": 2.3804, + "grad_norm": 2.0059521198272705, + "learning_rate": 5e-05, + "epoch": 0.1325478645066274, + "step": 2070 + }, + { + "loss": 2.402, + "grad_norm": 2.0359392166137695, + "learning_rate": 5e-05, + "epoch": 0.13286802843055645, + "step": 2075 + }, + { + "loss": 2.3843, + "grad_norm": 1.9484703540802002, + "learning_rate": 5e-05, + "epoch": 0.1331881923544855, + "step": 2080 + }, + { + "loss": 2.4186, + "grad_norm": 2.008492946624756, + "learning_rate": 5e-05, + "epoch": 0.13350835627841454, + "step": 2085 + }, + { + "loss": 2.4124, + "grad_norm": 2.0623104572296143, + "learning_rate": 5e-05, + "epoch": 0.1338285202023436, + "step": 2090 + }, + { + "loss": 2.3599, + "grad_norm": 1.8425260782241821, + "learning_rate": 5e-05, + "epoch": 0.13414868412627265, + "step": 2095 + }, + { + "loss": 2.3984, + "grad_norm": 2.0146563053131104, + "learning_rate": 5e-05, + "epoch": 0.1344688480502017, + "step": 2100 + }, + { + "loss": 2.3858, + "grad_norm": 2.082679033279419, + "learning_rate": 5e-05, + "epoch": 0.13478901197413076, + "step": 2105 + }, + { + "loss": 2.4171, + "grad_norm": 2.029128074645996, + "learning_rate": 5e-05, + "epoch": 0.13510917589805982, + "step": 2110 + }, + { + "loss": 2.3959, + "grad_norm": 1.987762212753296, + "learning_rate": 5e-05, + "epoch": 0.13542933982198885, + "step": 2115 + }, + { + "loss": 2.3947, + "grad_norm": 2.0950815677642822, + "learning_rate": 5e-05, + "epoch": 0.1357495037459179, + "step": 2120 + }, + { + "loss": 2.4035, + "grad_norm": 2.036588668823242, + "learning_rate": 5e-05, + "epoch": 0.13606966766984696, + "step": 2125 + }, + { + "loss": 2.4071, + "grad_norm": 2.120378017425537, + "learning_rate": 5e-05, + "epoch": 0.13638983159377602, + "step": 2130 + }, + { + "loss": 2.404, + "grad_norm": 1.9909882545471191, + "learning_rate": 5e-05, + "epoch": 0.13670999551770507, + "step": 2135 + }, + { + "loss": 2.4018, + "grad_norm": 2.004340887069702, + "learning_rate": 5e-05, + "epoch": 0.1370301594416341, + "step": 2140 + }, + { + "loss": 2.4058, + "grad_norm": 1.8815783262252808, + "learning_rate": 5e-05, + "epoch": 0.13735032336556316, + "step": 2145 + }, + { + "loss": 2.3965, + "grad_norm": 1.9335230588912964, + "learning_rate": 5e-05, + "epoch": 0.13767048728949222, + "step": 2150 + }, + { + "loss": 2.4259, + "grad_norm": 2.0381062030792236, + "learning_rate": 5e-05, + "epoch": 0.13799065121342127, + "step": 2155 + }, + { + "loss": 2.406, + "grad_norm": 1.9996901750564575, + "learning_rate": 5e-05, + "epoch": 0.13831081513735033, + "step": 2160 + }, + { + "loss": 2.4039, + "grad_norm": 1.899214267730713, + "learning_rate": 5e-05, + "epoch": 0.13863097906127939, + "step": 2165 + }, + { + "loss": 2.4298, + "grad_norm": 1.9462037086486816, + "learning_rate": 5e-05, + "epoch": 0.13895114298520841, + "step": 2170 + }, + { + "loss": 2.4035, + "grad_norm": 1.935436487197876, + "learning_rate": 5e-05, + "epoch": 0.13927130690913747, + "step": 2175 + }, + { + "loss": 2.3991, + "grad_norm": 1.9040533304214478, + "learning_rate": 5e-05, + "epoch": 0.13959147083306653, + "step": 2180 + }, + { + "loss": 2.4263, + "grad_norm": 1.9670405387878418, + "learning_rate": 5e-05, + "epoch": 0.13991163475699558, + "step": 2185 + }, + { + "loss": 2.4282, + "grad_norm": 2.0874922275543213, + "learning_rate": 5e-05, + "epoch": 0.14023179868092464, + "step": 2190 + }, + { + "loss": 2.4144, + "grad_norm": 1.953114628791809, + "learning_rate": 5e-05, + "epoch": 0.1405519626048537, + "step": 2195 + }, + { + "loss": 2.3966, + "grad_norm": 2.0168471336364746, + "learning_rate": 5e-05, + "epoch": 0.14087212652878273, + "step": 2200 + }, + { + "eval_loss": 2.253744125366211, + "eval_runtime": 10.1134, + "eval_samples_per_second": 202.504, + "eval_steps_per_second": 25.313, + "epoch": 0.14087212652878273, + "step": 2200 + }, + { + "loss": 2.3911, + "grad_norm": 1.9229140281677246, + "learning_rate": 5e-05, + "epoch": 0.14119229045271178, + "step": 2205 + }, + { + "loss": 2.4168, + "grad_norm": 1.9505460262298584, + "learning_rate": 5e-05, + "epoch": 0.14151245437664084, + "step": 2210 + }, + { + "loss": 2.4122, + "grad_norm": 1.9959040880203247, + "learning_rate": 5e-05, + "epoch": 0.1418326183005699, + "step": 2215 + }, + { + "loss": 2.4155, + "grad_norm": 2.070401191711426, + "learning_rate": 5e-05, + "epoch": 0.14215278222449895, + "step": 2220 + }, + { + "loss": 2.3864, + "grad_norm": 1.980580449104309, + "learning_rate": 5e-05, + "epoch": 0.14247294614842798, + "step": 2225 + }, + { + "loss": 2.4002, + "grad_norm": 1.9218393564224243, + "learning_rate": 5e-05, + "epoch": 0.14279311007235704, + "step": 2230 + }, + { + "loss": 2.3819, + "grad_norm": 2.057966709136963, + "learning_rate": 5e-05, + "epoch": 0.1431132739962861, + "step": 2235 + }, + { + "loss": 2.4171, + "grad_norm": 2.0004384517669678, + "learning_rate": 5e-05, + "epoch": 0.14343343792021515, + "step": 2240 + }, + { + "loss": 2.3866, + "grad_norm": 2.0221750736236572, + "learning_rate": 5e-05, + "epoch": 0.1437536018441442, + "step": 2245 + }, + { + "loss": 2.4216, + "grad_norm": 2.0313234329223633, + "learning_rate": 5e-05, + "epoch": 0.14407376576807326, + "step": 2250 + }, + { + "loss": 2.3814, + "grad_norm": 2.1582136154174805, + "learning_rate": 5e-05, + "epoch": 0.1443939296920023, + "step": 2255 + }, + { + "loss": 2.4123, + "grad_norm": 2.0171945095062256, + "learning_rate": 5e-05, + "epoch": 0.14471409361593135, + "step": 2260 + }, + { + "loss": 2.3892, + "grad_norm": 1.9124054908752441, + "learning_rate": 5e-05, + "epoch": 0.1450342575398604, + "step": 2265 + }, + { + "loss": 2.4326, + "grad_norm": 1.9355947971343994, + "learning_rate": 5e-05, + "epoch": 0.14535442146378946, + "step": 2270 + }, + { + "loss": 2.4224, + "grad_norm": 2.030381679534912, + "learning_rate": 5e-05, + "epoch": 0.14567458538771852, + "step": 2275 + }, + { + "loss": 2.4016, + "grad_norm": 2.0123848915100098, + "learning_rate": 5e-05, + "epoch": 0.14599474931164758, + "step": 2280 + }, + { + "loss": 2.3838, + "grad_norm": 2.062603712081909, + "learning_rate": 5e-05, + "epoch": 0.1463149132355766, + "step": 2285 + }, + { + "loss": 2.3896, + "grad_norm": 2.0680532455444336, + "learning_rate": 5e-05, + "epoch": 0.14663507715950566, + "step": 2290 + }, + { + "loss": 2.4275, + "grad_norm": 1.846703290939331, + "learning_rate": 5e-05, + "epoch": 0.14695524108343472, + "step": 2295 + }, + { + "loss": 2.3873, + "grad_norm": 1.9122810363769531, + "learning_rate": 5e-05, + "epoch": 0.14727540500736377, + "step": 2300 + }, + { + "loss": 2.4004, + "grad_norm": 1.9584786891937256, + "learning_rate": 5e-05, + "epoch": 0.14759556893129283, + "step": 2305 + }, + { + "loss": 2.3838, + "grad_norm": 1.8874859809875488, + "learning_rate": 5e-05, + "epoch": 0.14791573285522186, + "step": 2310 + }, + { + "loss": 2.3581, + "grad_norm": 1.8960413932800293, + "learning_rate": 5e-05, + "epoch": 0.14823589677915092, + "step": 2315 + }, + { + "loss": 2.3889, + "grad_norm": 1.8831268548965454, + "learning_rate": 5e-05, + "epoch": 0.14855606070307997, + "step": 2320 + }, + { + "loss": 2.3608, + "grad_norm": 1.910288691520691, + "learning_rate": 5e-05, + "epoch": 0.14887622462700903, + "step": 2325 + }, + { + "loss": 2.4205, + "grad_norm": 1.9343372583389282, + "learning_rate": 5e-05, + "epoch": 0.14919638855093809, + "step": 2330 + }, + { + "loss": 2.3924, + "grad_norm": 1.953525424003601, + "learning_rate": 5e-05, + "epoch": 0.14951655247486714, + "step": 2335 + }, + { + "loss": 2.3593, + "grad_norm": 1.9582774639129639, + "learning_rate": 5e-05, + "epoch": 0.14983671639879617, + "step": 2340 + }, + { + "loss": 2.3897, + "grad_norm": 1.9290440082550049, + "learning_rate": 5e-05, + "epoch": 0.15015688032272523, + "step": 2345 + }, + { + "loss": 2.389, + "grad_norm": 1.846218228340149, + "learning_rate": 5e-05, + "epoch": 0.15047704424665428, + "step": 2350 + }, + { + "loss": 2.4196, + "grad_norm": 1.9014278650283813, + "learning_rate": 5e-05, + "epoch": 0.15079720817058334, + "step": 2355 + }, + { + "loss": 2.3938, + "grad_norm": 1.9431865215301514, + "learning_rate": 5e-05, + "epoch": 0.1511173720945124, + "step": 2360 + }, + { + "loss": 2.3931, + "grad_norm": 1.8423478603363037, + "learning_rate": 5e-05, + "epoch": 0.15143753601844145, + "step": 2365 + }, + { + "loss": 2.3864, + "grad_norm": 1.8788933753967285, + "learning_rate": 5e-05, + "epoch": 0.15175769994237048, + "step": 2370 + }, + { + "loss": 2.3934, + "grad_norm": 1.9234330654144287, + "learning_rate": 5e-05, + "epoch": 0.15207786386629954, + "step": 2375 + }, + { + "loss": 2.3706, + "grad_norm": 1.8926013708114624, + "learning_rate": 5e-05, + "epoch": 0.1523980277902286, + "step": 2380 + }, + { + "loss": 2.3912, + "grad_norm": 2.009702682495117, + "learning_rate": 5e-05, + "epoch": 0.15271819171415765, + "step": 2385 + }, + { + "loss": 2.4004, + "grad_norm": 2.00524640083313, + "learning_rate": 5e-05, + "epoch": 0.1530383556380867, + "step": 2390 + }, + { + "loss": 2.3845, + "grad_norm": 1.8410059213638306, + "learning_rate": 5e-05, + "epoch": 0.15335851956201574, + "step": 2395 + }, + { + "loss": 2.3933, + "grad_norm": 1.9169642925262451, + "learning_rate": 5e-05, + "epoch": 0.1536786834859448, + "step": 2400 + }, + { + "eval_loss": 2.2529916763305664, + "eval_runtime": 9.2707, + "eval_samples_per_second": 220.912, + "eval_steps_per_second": 27.614, + "epoch": 0.1536786834859448, + "step": 2400 + }, + { + "loss": 2.3935, + "grad_norm": 1.948587417602539, + "learning_rate": 5e-05, + "epoch": 0.15399884740987385, + "step": 2405 + }, + { + "loss": 2.4017, + "grad_norm": 2.016439914703369, + "learning_rate": 5e-05, + "epoch": 0.1543190113338029, + "step": 2410 + }, + { + "loss": 2.3605, + "grad_norm": 2.007875680923462, + "learning_rate": 5e-05, + "epoch": 0.15463917525773196, + "step": 2415 + }, + { + "loss": 2.3663, + "grad_norm": 1.9632021188735962, + "learning_rate": 5e-05, + "epoch": 0.15495933918166102, + "step": 2420 + }, + { + "loss": 2.3814, + "grad_norm": 1.9989489316940308, + "learning_rate": 5e-05, + "epoch": 0.15527950310559005, + "step": 2425 + }, + { + "loss": 2.3695, + "grad_norm": 2.0570878982543945, + "learning_rate": 5e-05, + "epoch": 0.1555996670295191, + "step": 2430 + }, + { + "loss": 2.3995, + "grad_norm": 2.0144171714782715, + "learning_rate": 5e-05, + "epoch": 0.15591983095344816, + "step": 2435 + }, + { + "loss": 2.4044, + "grad_norm": 1.8741555213928223, + "learning_rate": 5e-05, + "epoch": 0.15623999487737722, + "step": 2440 + }, + { + "loss": 2.3671, + "grad_norm": 1.8373762369155884, + "learning_rate": 5e-05, + "epoch": 0.15656015880130628, + "step": 2445 + }, + { + "loss": 2.3604, + "grad_norm": 1.9890358448028564, + "learning_rate": 5e-05, + "epoch": 0.15688032272523533, + "step": 2450 + }, + { + "loss": 2.3899, + "grad_norm": 2.008896827697754, + "learning_rate": 5e-05, + "epoch": 0.15720048664916436, + "step": 2455 + }, + { + "loss": 2.3925, + "grad_norm": 1.98505699634552, + "learning_rate": 5e-05, + "epoch": 0.15752065057309342, + "step": 2460 + }, + { + "loss": 2.3917, + "grad_norm": 1.958855152130127, + "learning_rate": 5e-05, + "epoch": 0.15784081449702247, + "step": 2465 + }, + { + "loss": 2.3918, + "grad_norm": 1.9323810338974, + "learning_rate": 5e-05, + "epoch": 0.15816097842095153, + "step": 2470 + }, + { + "loss": 2.398, + "grad_norm": 2.0385091304779053, + "learning_rate": 5e-05, + "epoch": 0.1584811423448806, + "step": 2475 + }, + { + "loss": 2.3735, + "grad_norm": 1.877455711364746, + "learning_rate": 5e-05, + "epoch": 0.15880130626880964, + "step": 2480 + }, + { + "loss": 2.4105, + "grad_norm": 1.8172228336334229, + "learning_rate": 5e-05, + "epoch": 0.15912147019273867, + "step": 2485 + }, + { + "loss": 2.3432, + "grad_norm": 1.9281107187271118, + "learning_rate": 5e-05, + "epoch": 0.15944163411666773, + "step": 2490 + }, + { + "loss": 2.4057, + "grad_norm": 2.0879902839660645, + "learning_rate": 5e-05, + "epoch": 0.1597617980405968, + "step": 2495 + }, + { + "loss": 2.3725, + "grad_norm": 1.9170490503311157, + "learning_rate": 5e-05, + "epoch": 0.16008196196452584, + "step": 2500 + }, + { + "loss": 2.3963, + "grad_norm": 1.973979115486145, + "learning_rate": 5e-05, + "epoch": 0.1604021258884549, + "step": 2505 + }, + { + "loss": 2.3596, + "grad_norm": 1.9528260231018066, + "learning_rate": 5e-05, + "epoch": 0.16072228981238393, + "step": 2510 + }, + { + "loss": 2.3587, + "grad_norm": 1.9395289421081543, + "learning_rate": 5e-05, + "epoch": 0.16104245373631298, + "step": 2515 + }, + { + "loss": 2.3813, + "grad_norm": 1.9283883571624756, + "learning_rate": 5e-05, + "epoch": 0.16136261766024204, + "step": 2520 + }, + { + "loss": 2.3981, + "grad_norm": 1.9027310609817505, + "learning_rate": 5e-05, + "epoch": 0.1616827815841711, + "step": 2525 + }, + { + "loss": 2.3892, + "grad_norm": 1.9189422130584717, + "learning_rate": 5e-05, + "epoch": 0.16200294550810015, + "step": 2530 + }, + { + "loss": 2.3914, + "grad_norm": 1.9815748929977417, + "learning_rate": 5e-05, + "epoch": 0.1623231094320292, + "step": 2535 + }, + { + "loss": 2.3935, + "grad_norm": 1.9846832752227783, + "learning_rate": 5e-05, + "epoch": 0.16264327335595824, + "step": 2540 + }, + { + "loss": 2.3638, + "grad_norm": 1.8738396167755127, + "learning_rate": 5e-05, + "epoch": 0.1629634372798873, + "step": 2545 + }, + { + "loss": 2.402, + "grad_norm": 1.9183320999145508, + "learning_rate": 5e-05, + "epoch": 0.16328360120381635, + "step": 2550 + }, + { + "loss": 2.3585, + "grad_norm": 1.9421886205673218, + "learning_rate": 5e-05, + "epoch": 0.1636037651277454, + "step": 2555 + }, + { + "loss": 2.372, + "grad_norm": 1.8870490789413452, + "learning_rate": 5e-05, + "epoch": 0.16392392905167447, + "step": 2560 + }, + { + "loss": 2.3954, + "grad_norm": 1.9618778228759766, + "learning_rate": 5e-05, + "epoch": 0.16424409297560352, + "step": 2565 + }, + { + "loss": 2.3422, + "grad_norm": 2.0231478214263916, + "learning_rate": 5e-05, + "epoch": 0.16456425689953255, + "step": 2570 + }, + { + "loss": 2.375, + "grad_norm": 2.0343830585479736, + "learning_rate": 5e-05, + "epoch": 0.1648844208234616, + "step": 2575 + }, + { + "loss": 2.3795, + "grad_norm": 1.8410435914993286, + "learning_rate": 5e-05, + "epoch": 0.16520458474739066, + "step": 2580 + }, + { + "loss": 2.3509, + "grad_norm": 1.8402718305587769, + "learning_rate": 5e-05, + "epoch": 0.16552474867131972, + "step": 2585 + }, + { + "loss": 2.3851, + "grad_norm": 1.9031217098236084, + "learning_rate": 5e-05, + "epoch": 0.16584491259524878, + "step": 2590 + }, + { + "loss": 2.3865, + "grad_norm": 1.8732967376708984, + "learning_rate": 5e-05, + "epoch": 0.1661650765191778, + "step": 2595 + }, + { + "loss": 2.3787, + "grad_norm": 1.8325210809707642, + "learning_rate": 5e-05, + "epoch": 0.16648524044310686, + "step": 2600 + }, + { + "eval_loss": 2.2488222122192383, + "eval_runtime": 9.2735, + "eval_samples_per_second": 220.844, + "eval_steps_per_second": 27.606, + "epoch": 0.16648524044310686, + "step": 2600 + }, + { + "loss": 2.3569, + "grad_norm": 1.9319522380828857, + "learning_rate": 5e-05, + "epoch": 0.16680540436703592, + "step": 2605 + }, + { + "loss": 2.3725, + "grad_norm": 1.997213363647461, + "learning_rate": 5e-05, + "epoch": 0.16712556829096498, + "step": 2610 + }, + { + "loss": 2.4022, + "grad_norm": 2.0918712615966797, + "learning_rate": 5e-05, + "epoch": 0.16744573221489403, + "step": 2615 + }, + { + "loss": 2.369, + "grad_norm": 2.015212297439575, + "learning_rate": 5e-05, + "epoch": 0.1677658961388231, + "step": 2620 + }, + { + "loss": 2.3813, + "grad_norm": 1.9604344367980957, + "learning_rate": 5e-05, + "epoch": 0.16808606006275212, + "step": 2625 + }, + { + "loss": 2.3668, + "grad_norm": 1.8466747999191284, + "learning_rate": 5e-05, + "epoch": 0.16840622398668117, + "step": 2630 + }, + { + "loss": 2.3645, + "grad_norm": 1.8700547218322754, + "learning_rate": 5e-05, + "epoch": 0.16872638791061023, + "step": 2635 + }, + { + "loss": 2.3847, + "grad_norm": 1.9464409351348877, + "learning_rate": 5e-05, + "epoch": 0.1690465518345393, + "step": 2640 + }, + { + "loss": 2.3664, + "grad_norm": 1.9160685539245605, + "learning_rate": 5e-05, + "epoch": 0.16936671575846834, + "step": 2645 + }, + { + "loss": 2.3949, + "grad_norm": 1.993518590927124, + "learning_rate": 5e-05, + "epoch": 0.1696868796823974, + "step": 2650 + }, + { + "loss": 2.3986, + "grad_norm": 1.8965723514556885, + "learning_rate": 5e-05, + "epoch": 0.17000704360632643, + "step": 2655 + }, + { + "loss": 2.3931, + "grad_norm": 1.8633148670196533, + "learning_rate": 5e-05, + "epoch": 0.1703272075302555, + "step": 2660 + }, + { + "loss": 2.3934, + "grad_norm": 1.8948819637298584, + "learning_rate": 5e-05, + "epoch": 0.17064737145418454, + "step": 2665 + }, + { + "loss": 2.3787, + "grad_norm": 1.9217243194580078, + "learning_rate": 5e-05, + "epoch": 0.1709675353781136, + "step": 2670 + }, + { + "loss": 2.3821, + "grad_norm": 1.774686574935913, + "learning_rate": 5e-05, + "epoch": 0.17128769930204266, + "step": 2675 + }, + { + "loss": 2.364, + "grad_norm": 1.9000036716461182, + "learning_rate": 5e-05, + "epoch": 0.17160786322597169, + "step": 2680 + }, + { + "loss": 2.3718, + "grad_norm": 1.8330143690109253, + "learning_rate": 5e-05, + "epoch": 0.17192802714990074, + "step": 2685 + }, + { + "loss": 2.4015, + "grad_norm": 1.9639065265655518, + "learning_rate": 5e-05, + "epoch": 0.1722481910738298, + "step": 2690 + }, + { + "loss": 2.403, + "grad_norm": 1.8496508598327637, + "learning_rate": 5e-05, + "epoch": 0.17256835499775885, + "step": 2695 + }, + { + "loss": 2.3548, + "grad_norm": 1.8958038091659546, + "learning_rate": 5e-05, + "epoch": 0.1728885189216879, + "step": 2700 + }, + { + "loss": 2.3648, + "grad_norm": 2.150702476501465, + "learning_rate": 5e-05, + "epoch": 0.17320868284561697, + "step": 2705 + }, + { + "loss": 2.3544, + "grad_norm": 2.0381345748901367, + "learning_rate": 5e-05, + "epoch": 0.173528846769546, + "step": 2710 + }, + { + "loss": 2.3726, + "grad_norm": 1.9142519235610962, + "learning_rate": 5e-05, + "epoch": 0.17384901069347505, + "step": 2715 + }, + { + "loss": 2.3721, + "grad_norm": 1.9022127389907837, + "learning_rate": 5e-05, + "epoch": 0.1741691746174041, + "step": 2720 + }, + { + "loss": 2.3595, + "grad_norm": 2.145447254180908, + "learning_rate": 5e-05, + "epoch": 0.17448933854133317, + "step": 2725 + }, + { + "loss": 2.3423, + "grad_norm": 1.8730753660202026, + "learning_rate": 5e-05, + "epoch": 0.17480950246526222, + "step": 2730 + }, + { + "loss": 2.3756, + "grad_norm": 1.9949947595596313, + "learning_rate": 5e-05, + "epoch": 0.17512966638919128, + "step": 2735 + }, + { + "loss": 2.3446, + "grad_norm": 1.9902111291885376, + "learning_rate": 5e-05, + "epoch": 0.1754498303131203, + "step": 2740 + }, + { + "loss": 2.3616, + "grad_norm": 1.9357950687408447, + "learning_rate": 5e-05, + "epoch": 0.17576999423704937, + "step": 2745 + }, + { + "loss": 2.3666, + "grad_norm": 1.9442518949508667, + "learning_rate": 5e-05, + "epoch": 0.17609015816097842, + "step": 2750 + }, + { + "loss": 2.3623, + "grad_norm": 1.8661247491836548, + "learning_rate": 5e-05, + "epoch": 0.17641032208490748, + "step": 2755 + }, + { + "loss": 2.368, + "grad_norm": 1.8535679578781128, + "learning_rate": 5e-05, + "epoch": 0.17673048600883653, + "step": 2760 + }, + { + "loss": 2.3872, + "grad_norm": 1.8927170038223267, + "learning_rate": 5e-05, + "epoch": 0.17705064993276556, + "step": 2765 + }, + { + "loss": 2.3464, + "grad_norm": 1.9512662887573242, + "learning_rate": 5e-05, + "epoch": 0.17737081385669462, + "step": 2770 + }, + { + "loss": 2.3607, + "grad_norm": 1.8841359615325928, + "learning_rate": 5e-05, + "epoch": 0.17769097778062368, + "step": 2775 + }, + { + "loss": 2.3779, + "grad_norm": 1.9043680429458618, + "learning_rate": 5e-05, + "epoch": 0.17801114170455273, + "step": 2780 + }, + { + "loss": 2.3836, + "grad_norm": 1.8490782976150513, + "learning_rate": 5e-05, + "epoch": 0.1783313056284818, + "step": 2785 + }, + { + "loss": 2.3739, + "grad_norm": 1.9332350492477417, + "learning_rate": 5e-05, + "epoch": 0.17865146955241085, + "step": 2790 + }, + { + "loss": 2.3407, + "grad_norm": 1.8520585298538208, + "learning_rate": 5e-05, + "epoch": 0.17897163347633988, + "step": 2795 + }, + { + "loss": 2.3733, + "grad_norm": 1.8878172636032104, + "learning_rate": 5e-05, + "epoch": 0.17929179740026893, + "step": 2800 + }, + { + "eval_loss": 2.2236533164978027, + "eval_runtime": 9.4962, + "eval_samples_per_second": 215.665, + "eval_steps_per_second": 26.958, + "epoch": 0.17929179740026893, + "step": 2800 + }, + { + "loss": 2.3377, + "grad_norm": 1.994707465171814, + "learning_rate": 5e-05, + "epoch": 0.179611961324198, + "step": 2805 + }, + { + "loss": 2.3754, + "grad_norm": 1.7752844095230103, + "learning_rate": 5e-05, + "epoch": 0.17993212524812705, + "step": 2810 + }, + { + "loss": 2.376, + "grad_norm": 1.9438122510910034, + "learning_rate": 5e-05, + "epoch": 0.1802522891720561, + "step": 2815 + }, + { + "loss": 2.3698, + "grad_norm": 1.9461045265197754, + "learning_rate": 5e-05, + "epoch": 0.18057245309598516, + "step": 2820 + }, + { + "loss": 2.3587, + "grad_norm": 1.92300283908844, + "learning_rate": 5e-05, + "epoch": 0.1808926170199142, + "step": 2825 + }, + { + "loss": 2.395, + "grad_norm": 2.004666328430176, + "learning_rate": 5e-05, + "epoch": 0.18121278094384324, + "step": 2830 + }, + { + "loss": 2.3882, + "grad_norm": 1.8876590728759766, + "learning_rate": 5e-05, + "epoch": 0.1815329448677723, + "step": 2835 + }, + { + "loss": 2.337, + "grad_norm": 1.9001890420913696, + "learning_rate": 5e-05, + "epoch": 0.18185310879170136, + "step": 2840 + }, + { + "loss": 2.3796, + "grad_norm": 1.876528024673462, + "learning_rate": 5e-05, + "epoch": 0.1821732727156304, + "step": 2845 + }, + { + "loss": 2.366, + "grad_norm": 1.977066159248352, + "learning_rate": 5e-05, + "epoch": 0.18249343663955944, + "step": 2850 + }, + { + "loss": 2.3681, + "grad_norm": 1.8940303325653076, + "learning_rate": 5e-05, + "epoch": 0.1828136005634885, + "step": 2855 + }, + { + "loss": 2.3721, + "grad_norm": 1.9290567636489868, + "learning_rate": 5e-05, + "epoch": 0.18313376448741756, + "step": 2860 + }, + { + "loss": 2.3555, + "grad_norm": 1.8955270051956177, + "learning_rate": 5e-05, + "epoch": 0.1834539284113466, + "step": 2865 + }, + { + "loss": 2.3675, + "grad_norm": 1.916295051574707, + "learning_rate": 5e-05, + "epoch": 0.18377409233527567, + "step": 2870 + }, + { + "loss": 2.3718, + "grad_norm": 2.090623617172241, + "learning_rate": 5e-05, + "epoch": 0.18409425625920472, + "step": 2875 + }, + { + "loss": 2.3617, + "grad_norm": 1.9669326543807983, + "learning_rate": 5e-05, + "epoch": 0.18441442018313375, + "step": 2880 + }, + { + "loss": 2.3693, + "grad_norm": 1.856131911277771, + "learning_rate": 5e-05, + "epoch": 0.1847345841070628, + "step": 2885 + }, + { + "loss": 2.3714, + "grad_norm": 1.9637651443481445, + "learning_rate": 5e-05, + "epoch": 0.18505474803099187, + "step": 2890 + }, + { + "loss": 2.3669, + "grad_norm": 2.030195951461792, + "learning_rate": 5e-05, + "epoch": 0.18537491195492092, + "step": 2895 + }, + { + "loss": 2.364, + "grad_norm": 1.9500765800476074, + "learning_rate": 5e-05, + "epoch": 0.18569507587884998, + "step": 2900 + }, + { + "loss": 2.3647, + "grad_norm": 1.9658929109573364, + "learning_rate": 5e-05, + "epoch": 0.18601523980277904, + "step": 2905 + }, + { + "loss": 2.3541, + "grad_norm": 1.9205372333526611, + "learning_rate": 5e-05, + "epoch": 0.18633540372670807, + "step": 2910 + }, + { + "loss": 2.3631, + "grad_norm": 1.9161553382873535, + "learning_rate": 5e-05, + "epoch": 0.18665556765063712, + "step": 2915 + }, + { + "loss": 2.3526, + "grad_norm": 1.9861873388290405, + "learning_rate": 5e-05, + "epoch": 0.18697573157456618, + "step": 2920 + }, + { + "loss": 2.3579, + "grad_norm": 1.9132648706436157, + "learning_rate": 5e-05, + "epoch": 0.18729589549849524, + "step": 2925 + }, + { + "loss": 2.358, + "grad_norm": 1.9785979986190796, + "learning_rate": 5e-05, + "epoch": 0.1876160594224243, + "step": 2930 + }, + { + "loss": 2.3736, + "grad_norm": 1.9504092931747437, + "learning_rate": 5e-05, + "epoch": 0.18793622334635332, + "step": 2935 + }, + { + "loss": 2.3665, + "grad_norm": 1.8407647609710693, + "learning_rate": 5e-05, + "epoch": 0.18825638727028238, + "step": 2940 + }, + { + "loss": 2.3351, + "grad_norm": 2.004072666168213, + "learning_rate": 5e-05, + "epoch": 0.18857655119421143, + "step": 2945 + }, + { + "loss": 2.3649, + "grad_norm": 1.8799525499343872, + "learning_rate": 5e-05, + "epoch": 0.1888967151181405, + "step": 2950 + }, + { + "loss": 2.351, + "grad_norm": 2.0074257850646973, + "learning_rate": 5e-05, + "epoch": 0.18921687904206955, + "step": 2955 + }, + { + "loss": 2.3236, + "grad_norm": 1.8301315307617188, + "learning_rate": 5e-05, + "epoch": 0.1895370429659986, + "step": 2960 + }, + { + "loss": 2.3545, + "grad_norm": 1.9088454246520996, + "learning_rate": 5e-05, + "epoch": 0.18985720688992763, + "step": 2965 + }, + { + "loss": 2.3666, + "grad_norm": 1.9445098638534546, + "learning_rate": 5e-05, + "epoch": 0.1901773708138567, + "step": 2970 + }, + { + "loss": 2.3593, + "grad_norm": 1.8038558959960938, + "learning_rate": 5e-05, + "epoch": 0.19049753473778575, + "step": 2975 + }, + { + "loss": 2.3387, + "grad_norm": 1.8952257633209229, + "learning_rate": 5e-05, + "epoch": 0.1908176986617148, + "step": 2980 + }, + { + "loss": 2.3742, + "grad_norm": 1.8767812252044678, + "learning_rate": 5e-05, + "epoch": 0.19113786258564386, + "step": 2985 + }, + { + "loss": 2.3786, + "grad_norm": 1.771638035774231, + "learning_rate": 5e-05, + "epoch": 0.19145802650957292, + "step": 2990 + }, + { + "loss": 2.3528, + "grad_norm": 1.9153273105621338, + "learning_rate": 5e-05, + "epoch": 0.19177819043350194, + "step": 2995 + }, + { + "loss": 2.3781, + "grad_norm": 1.9172663688659668, + "learning_rate": 5e-05, + "epoch": 0.192098354357431, + "step": 3000 + }, + { + "eval_loss": 2.209613800048828, + "eval_runtime": 12.6666, + "eval_samples_per_second": 161.685, + "eval_steps_per_second": 20.211, + "epoch": 0.192098354357431, + "step": 3000 + }, + { + "loss": 2.3762, + "grad_norm": 1.8722891807556152, + "learning_rate": 5e-05, + "epoch": 0.19241851828136006, + "step": 3005 + }, + { + "loss": 2.3861, + "grad_norm": 1.907089352607727, + "learning_rate": 5e-05, + "epoch": 0.1927386822052891, + "step": 3010 + }, + { + "loss": 2.3477, + "grad_norm": 1.982266902923584, + "learning_rate": 5e-05, + "epoch": 0.19305884612921817, + "step": 3015 + }, + { + "loss": 2.3555, + "grad_norm": 1.9882186651229858, + "learning_rate": 5e-05, + "epoch": 0.1933790100531472, + "step": 3020 + }, + { + "loss": 2.327, + "grad_norm": 1.8406589031219482, + "learning_rate": 5e-05, + "epoch": 0.19369917397707626, + "step": 3025 + }, + { + "loss": 2.3411, + "grad_norm": 1.9217675924301147, + "learning_rate": 5e-05, + "epoch": 0.1940193379010053, + "step": 3030 + }, + { + "loss": 2.3554, + "grad_norm": 1.9655086994171143, + "learning_rate": 5e-05, + "epoch": 0.19433950182493437, + "step": 3035 + }, + { + "loss": 2.373, + "grad_norm": 2.0582704544067383, + "learning_rate": 5e-05, + "epoch": 0.19465966574886343, + "step": 3040 + }, + { + "loss": 2.3424, + "grad_norm": 1.8587092161178589, + "learning_rate": 5e-05, + "epoch": 0.19497982967279248, + "step": 3045 + }, + { + "loss": 2.3319, + "grad_norm": 1.8777543306350708, + "learning_rate": 5e-05, + "epoch": 0.1952999935967215, + "step": 3050 + }, + { + "loss": 2.3365, + "grad_norm": 1.9621491432189941, + "learning_rate": 5e-05, + "epoch": 0.19562015752065057, + "step": 3055 + }, + { + "loss": 2.3459, + "grad_norm": 1.8719767332077026, + "learning_rate": 5e-05, + "epoch": 0.19594032144457962, + "step": 3060 + }, + { + "loss": 2.3487, + "grad_norm": 1.970376968383789, + "learning_rate": 5e-05, + "epoch": 0.19626048536850868, + "step": 3065 + }, + { + "loss": 2.3692, + "grad_norm": 1.8878765106201172, + "learning_rate": 5e-05, + "epoch": 0.19658064929243774, + "step": 3070 + }, + { + "loss": 2.3335, + "grad_norm": 1.9047449827194214, + "learning_rate": 5e-05, + "epoch": 0.1969008132163668, + "step": 3075 + }, + { + "loss": 2.3679, + "grad_norm": 1.8994492292404175, + "learning_rate": 5e-05, + "epoch": 0.19722097714029582, + "step": 3080 + }, + { + "loss": 2.3786, + "grad_norm": 1.8764537572860718, + "learning_rate": 5e-05, + "epoch": 0.19754114106422488, + "step": 3085 + }, + { + "loss": 2.3656, + "grad_norm": 1.937950849533081, + "learning_rate": 5e-05, + "epoch": 0.19786130498815394, + "step": 3090 + }, + { + "loss": 2.3501, + "grad_norm": 1.9067658185958862, + "learning_rate": 5e-05, + "epoch": 0.198181468912083, + "step": 3095 + }, + { + "loss": 2.3382, + "grad_norm": 1.9059247970581055, + "learning_rate": 5e-05, + "epoch": 0.19850163283601205, + "step": 3100 + }, + { + "loss": 2.3374, + "grad_norm": 1.9067003726959229, + "learning_rate": 5e-05, + "epoch": 0.19882179675994108, + "step": 3105 + }, + { + "loss": 2.3787, + "grad_norm": 1.860835075378418, + "learning_rate": 5e-05, + "epoch": 0.19914196068387013, + "step": 3110 + }, + { + "loss": 2.3622, + "grad_norm": 1.9090162515640259, + "learning_rate": 5e-05, + "epoch": 0.1994621246077992, + "step": 3115 + }, + { + "loss": 2.3465, + "grad_norm": 2.014218330383301, + "learning_rate": 5e-05, + "epoch": 0.19978228853172825, + "step": 3120 + }, + { + "loss": 2.3767, + "grad_norm": 1.9965459108352661, + "learning_rate": 5e-05, + "epoch": 0.2001024524556573, + "step": 3125 + }, + { + "loss": 2.3674, + "grad_norm": 1.9212764501571655, + "learning_rate": 5e-05, + "epoch": 0.20042261637958636, + "step": 3130 + }, + { + "loss": 2.3373, + "grad_norm": 1.9215936660766602, + "learning_rate": 5e-05, + "epoch": 0.2007427803035154, + "step": 3135 + }, + { + "loss": 2.3155, + "grad_norm": 1.9446462392807007, + "learning_rate": 5e-05, + "epoch": 0.20106294422744445, + "step": 3140 + }, + { + "loss": 2.3644, + "grad_norm": 1.8948733806610107, + "learning_rate": 5e-05, + "epoch": 0.2013831081513735, + "step": 3145 + }, + { + "loss": 2.3357, + "grad_norm": 1.919724702835083, + "learning_rate": 5e-05, + "epoch": 0.20170327207530256, + "step": 3150 + }, + { + "loss": 2.3539, + "grad_norm": 1.8508602380752563, + "learning_rate": 5e-05, + "epoch": 0.20202343599923162, + "step": 3155 + }, + { + "loss": 2.3601, + "grad_norm": 2.0129966735839844, + "learning_rate": 5e-05, + "epoch": 0.20234359992316067, + "step": 3160 + }, + { + "loss": 2.3601, + "grad_norm": 1.693015456199646, + "learning_rate": 5e-05, + "epoch": 0.2026637638470897, + "step": 3165 + }, + { + "loss": 2.3558, + "grad_norm": 1.956392526626587, + "learning_rate": 5e-05, + "epoch": 0.20298392777101876, + "step": 3170 + }, + { + "loss": 2.338, + "grad_norm": 1.818519949913025, + "learning_rate": 5e-05, + "epoch": 0.2033040916949478, + "step": 3175 + }, + { + "loss": 2.3717, + "grad_norm": 1.8295650482177734, + "learning_rate": 5e-05, + "epoch": 0.20362425561887687, + "step": 3180 + }, + { + "loss": 2.3375, + "grad_norm": 2.044961929321289, + "learning_rate": 5e-05, + "epoch": 0.20394441954280593, + "step": 3185 + }, + { + "loss": 2.3358, + "grad_norm": 2.061805248260498, + "learning_rate": 5e-05, + "epoch": 0.20426458346673496, + "step": 3190 + }, + { + "loss": 2.3462, + "grad_norm": 1.870125651359558, + "learning_rate": 5e-05, + "epoch": 0.204584747390664, + "step": 3195 + }, + { + "loss": 2.3482, + "grad_norm": 1.9724948406219482, + "learning_rate": 5e-05, + "epoch": 0.20490491131459307, + "step": 3200 + }, + { + "eval_loss": 2.2023611068725586, + "eval_runtime": 12.9284, + "eval_samples_per_second": 158.411, + "eval_steps_per_second": 19.801, + "epoch": 0.20490491131459307, + "step": 3200 + }, + { + "loss": 2.3575, + "grad_norm": 1.840522289276123, + "learning_rate": 5e-05, + "epoch": 0.20522507523852213, + "step": 3205 + }, + { + "loss": 2.35, + "grad_norm": 1.815750241279602, + "learning_rate": 5e-05, + "epoch": 0.20554523916245118, + "step": 3210 + }, + { + "loss": 2.3852, + "grad_norm": 1.8655439615249634, + "learning_rate": 5e-05, + "epoch": 0.20586540308638024, + "step": 3215 + }, + { + "loss": 2.3345, + "grad_norm": 1.8097730875015259, + "learning_rate": 5e-05, + "epoch": 0.20618556701030927, + "step": 3220 + }, + { + "loss": 2.3516, + "grad_norm": 1.7540837526321411, + "learning_rate": 5e-05, + "epoch": 0.20650573093423832, + "step": 3225 + }, + { + "loss": 2.3371, + "grad_norm": 1.8675291538238525, + "learning_rate": 5e-05, + "epoch": 0.20682589485816738, + "step": 3230 + }, + { + "loss": 2.352, + "grad_norm": 2.067999839782715, + "learning_rate": 5e-05, + "epoch": 0.20714605878209644, + "step": 3235 + }, + { + "loss": 2.3439, + "grad_norm": 1.8403364419937134, + "learning_rate": 5e-05, + "epoch": 0.2074662227060255, + "step": 3240 + }, + { + "loss": 2.3299, + "grad_norm": 1.8896580934524536, + "learning_rate": 5e-05, + "epoch": 0.20778638662995455, + "step": 3245 + }, + { + "loss": 2.3492, + "grad_norm": 1.941513180732727, + "learning_rate": 5e-05, + "epoch": 0.20810655055388358, + "step": 3250 + }, + { + "loss": 2.3744, + "grad_norm": 1.916695237159729, + "learning_rate": 5e-05, + "epoch": 0.20842671447781264, + "step": 3255 + }, + { + "loss": 2.3438, + "grad_norm": 1.907235026359558, + "learning_rate": 5e-05, + "epoch": 0.2087468784017417, + "step": 3260 + }, + { + "loss": 2.3503, + "grad_norm": 1.864814281463623, + "learning_rate": 5e-05, + "epoch": 0.20906704232567075, + "step": 3265 + }, + { + "loss": 2.3345, + "grad_norm": 1.8227028846740723, + "learning_rate": 5e-05, + "epoch": 0.2093872062495998, + "step": 3270 + }, + { + "loss": 2.3648, + "grad_norm": 1.9340879917144775, + "learning_rate": 5e-05, + "epoch": 0.20970737017352883, + "step": 3275 + }, + { + "loss": 2.3289, + "grad_norm": 1.8352042436599731, + "learning_rate": 5e-05, + "epoch": 0.2100275340974579, + "step": 3280 + }, + { + "loss": 2.3582, + "grad_norm": 1.7997504472732544, + "learning_rate": 5e-05, + "epoch": 0.21034769802138695, + "step": 3285 + }, + { + "loss": 2.3383, + "grad_norm": 1.8354027271270752, + "learning_rate": 5e-05, + "epoch": 0.210667861945316, + "step": 3290 + }, + { + "loss": 2.3495, + "grad_norm": 1.9071873426437378, + "learning_rate": 5e-05, + "epoch": 0.21098802586924506, + "step": 3295 + }, + { + "loss": 2.3667, + "grad_norm": 1.8804770708084106, + "learning_rate": 5e-05, + "epoch": 0.21130818979317412, + "step": 3300 + }, + { + "loss": 2.348, + "grad_norm": 1.8979647159576416, + "learning_rate": 5e-05, + "epoch": 0.21162835371710315, + "step": 3305 + }, + { + "loss": 2.3424, + "grad_norm": 1.9298757314682007, + "learning_rate": 5e-05, + "epoch": 0.2119485176410322, + "step": 3310 + }, + { + "loss": 2.3626, + "grad_norm": 2.027535915374756, + "learning_rate": 5e-05, + "epoch": 0.21226868156496126, + "step": 3315 + }, + { + "loss": 2.3596, + "grad_norm": 1.896079421043396, + "learning_rate": 5e-05, + "epoch": 0.21258884548889032, + "step": 3320 + }, + { + "loss": 2.3383, + "grad_norm": 1.798487901687622, + "learning_rate": 5e-05, + "epoch": 0.21290900941281937, + "step": 3325 + }, + { + "loss": 2.3482, + "grad_norm": 1.9177759885787964, + "learning_rate": 5e-05, + "epoch": 0.21322917333674843, + "step": 3330 + }, + { + "loss": 2.3124, + "grad_norm": 1.9621219635009766, + "learning_rate": 5e-05, + "epoch": 0.21354933726067746, + "step": 3335 + }, + { + "loss": 2.3662, + "grad_norm": 1.9927774667739868, + "learning_rate": 5e-05, + "epoch": 0.21386950118460651, + "step": 3340 + }, + { + "loss": 2.3203, + "grad_norm": 1.8306477069854736, + "learning_rate": 5e-05, + "epoch": 0.21418966510853557, + "step": 3345 + }, + { + "loss": 2.3709, + "grad_norm": 1.922379732131958, + "learning_rate": 5e-05, + "epoch": 0.21450982903246463, + "step": 3350 + }, + { + "loss": 2.3456, + "grad_norm": 1.8414316177368164, + "learning_rate": 5e-05, + "epoch": 0.21482999295639368, + "step": 3355 + }, + { + "loss": 2.331, + "grad_norm": 1.847821831703186, + "learning_rate": 5e-05, + "epoch": 0.2151501568803227, + "step": 3360 + }, + { + "loss": 2.3428, + "grad_norm": 1.8807631731033325, + "learning_rate": 5e-05, + "epoch": 0.21547032080425177, + "step": 3365 + }, + { + "loss": 2.335, + "grad_norm": 1.9684065580368042, + "learning_rate": 5e-05, + "epoch": 0.21579048472818083, + "step": 3370 + }, + { + "loss": 2.3302, + "grad_norm": 1.9757294654846191, + "learning_rate": 5e-05, + "epoch": 0.21611064865210988, + "step": 3375 + }, + { + "loss": 2.3542, + "grad_norm": 2.087277412414551, + "learning_rate": 5e-05, + "epoch": 0.21643081257603894, + "step": 3380 + }, + { + "loss": 2.3651, + "grad_norm": 2.1057560443878174, + "learning_rate": 5e-05, + "epoch": 0.216750976499968, + "step": 3385 + }, + { + "loss": 2.3234, + "grad_norm": 1.805690884590149, + "learning_rate": 5e-05, + "epoch": 0.21707114042389702, + "step": 3390 + }, + { + "loss": 2.3306, + "grad_norm": 1.82710862159729, + "learning_rate": 5e-05, + "epoch": 0.21739130434782608, + "step": 3395 + }, + { + "loss": 2.3316, + "grad_norm": 1.8435773849487305, + "learning_rate": 5e-05, + "epoch": 0.21771146827175514, + "step": 3400 + }, + { + "eval_loss": 2.198260545730591, + "eval_runtime": 12.4233, + "eval_samples_per_second": 164.852, + "eval_steps_per_second": 20.606, + "epoch": 0.21771146827175514, + "step": 3400 + }, + { + "loss": 2.3341, + "grad_norm": 1.901654839515686, + "learning_rate": 5e-05, + "epoch": 0.2180316321956842, + "step": 3405 + }, + { + "loss": 2.3537, + "grad_norm": 1.9241116046905518, + "learning_rate": 5e-05, + "epoch": 0.21835179611961325, + "step": 3410 + }, + { + "loss": 2.3616, + "grad_norm": 1.9076368808746338, + "learning_rate": 5e-05, + "epoch": 0.2186719600435423, + "step": 3415 + }, + { + "loss": 2.3516, + "grad_norm": 1.940434217453003, + "learning_rate": 5e-05, + "epoch": 0.21899212396747134, + "step": 3420 + }, + { + "loss": 2.3462, + "grad_norm": 1.9202423095703125, + "learning_rate": 5e-05, + "epoch": 0.2193122878914004, + "step": 3425 + }, + { + "loss": 2.3302, + "grad_norm": 1.8307172060012817, + "learning_rate": 5e-05, + "epoch": 0.21963245181532945, + "step": 3430 + }, + { + "loss": 2.3675, + "grad_norm": 1.825799584388733, + "learning_rate": 5e-05, + "epoch": 0.2199526157392585, + "step": 3435 + }, + { + "loss": 2.3283, + "grad_norm": 1.83090341091156, + "learning_rate": 5e-05, + "epoch": 0.22027277966318756, + "step": 3440 + }, + { + "loss": 2.3698, + "grad_norm": 1.8049718141555786, + "learning_rate": 5e-05, + "epoch": 0.2205929435871166, + "step": 3445 + }, + { + "loss": 2.3352, + "grad_norm": 2.047487735748291, + "learning_rate": 5e-05, + "epoch": 0.22091310751104565, + "step": 3450 + }, + { + "loss": 2.3209, + "grad_norm": 1.8851560354232788, + "learning_rate": 5e-05, + "epoch": 0.2212332714349747, + "step": 3455 + }, + { + "loss": 2.3278, + "grad_norm": 1.781719446182251, + "learning_rate": 5e-05, + "epoch": 0.22155343535890376, + "step": 3460 + }, + { + "loss": 2.3379, + "grad_norm": 1.822160243988037, + "learning_rate": 5e-05, + "epoch": 0.22187359928283282, + "step": 3465 + }, + { + "loss": 2.3341, + "grad_norm": 1.7682366371154785, + "learning_rate": 5e-05, + "epoch": 0.22219376320676187, + "step": 3470 + }, + { + "loss": 2.3617, + "grad_norm": 2.0239579677581787, + "learning_rate": 5e-05, + "epoch": 0.2225139271306909, + "step": 3475 + }, + { + "loss": 2.3376, + "grad_norm": 1.9589248895645142, + "learning_rate": 5e-05, + "epoch": 0.22283409105461996, + "step": 3480 + }, + { + "loss": 2.325, + "grad_norm": 1.8296499252319336, + "learning_rate": 5e-05, + "epoch": 0.22315425497854902, + "step": 3485 + }, + { + "loss": 2.3318, + "grad_norm": 1.7005376815795898, + "learning_rate": 5e-05, + "epoch": 0.22347441890247807, + "step": 3490 + }, + { + "loss": 2.3311, + "grad_norm": 1.8033257722854614, + "learning_rate": 5e-05, + "epoch": 0.22379458282640713, + "step": 3495 + }, + { + "loss": 2.3697, + "grad_norm": 1.813000202178955, + "learning_rate": 5e-05, + "epoch": 0.22411474675033619, + "step": 3500 + }, + { + "loss": 2.3146, + "grad_norm": 1.8140153884887695, + "learning_rate": 5e-05, + "epoch": 0.22443491067426521, + "step": 3505 + }, + { + "loss": 2.3511, + "grad_norm": 1.8404661417007446, + "learning_rate": 5e-05, + "epoch": 0.22475507459819427, + "step": 3510 + }, + { + "loss": 2.3413, + "grad_norm": 1.7392330169677734, + "learning_rate": 5e-05, + "epoch": 0.22507523852212333, + "step": 3515 + }, + { + "loss": 2.382, + "grad_norm": 1.833692193031311, + "learning_rate": 5e-05, + "epoch": 0.22539540244605238, + "step": 3520 + }, + { + "loss": 2.3425, + "grad_norm": 1.841347575187683, + "learning_rate": 5e-05, + "epoch": 0.22571556636998144, + "step": 3525 + }, + { + "loss": 2.3298, + "grad_norm": 1.789963960647583, + "learning_rate": 5e-05, + "epoch": 0.22603573029391047, + "step": 3530 + }, + { + "loss": 2.3307, + "grad_norm": 1.7983555793762207, + "learning_rate": 5e-05, + "epoch": 0.22635589421783953, + "step": 3535 + }, + { + "loss": 2.3397, + "grad_norm": 2.009568452835083, + "learning_rate": 5e-05, + "epoch": 0.22667605814176858, + "step": 3540 + }, + { + "loss": 2.314, + "grad_norm": 1.8560316562652588, + "learning_rate": 5e-05, + "epoch": 0.22699622206569764, + "step": 3545 + }, + { + "loss": 2.3296, + "grad_norm": 1.8345584869384766, + "learning_rate": 5e-05, + "epoch": 0.2273163859896267, + "step": 3550 + }, + { + "loss": 2.3488, + "grad_norm": 1.8176889419555664, + "learning_rate": 5e-05, + "epoch": 0.22763654991355575, + "step": 3555 + }, + { + "loss": 2.3174, + "grad_norm": 1.8083289861679077, + "learning_rate": 5e-05, + "epoch": 0.22795671383748478, + "step": 3560 + }, + { + "loss": 2.3544, + "grad_norm": 1.8767235279083252, + "learning_rate": 5e-05, + "epoch": 0.22827687776141384, + "step": 3565 + }, + { + "loss": 2.3371, + "grad_norm": 1.7118626832962036, + "learning_rate": 5e-05, + "epoch": 0.2285970416853429, + "step": 3570 + }, + { + "loss": 2.3282, + "grad_norm": 1.8422014713287354, + "learning_rate": 5e-05, + "epoch": 0.22891720560927195, + "step": 3575 + }, + { + "loss": 2.3305, + "grad_norm": 1.787657380104065, + "learning_rate": 5e-05, + "epoch": 0.229237369533201, + "step": 3580 + }, + { + "loss": 2.3791, + "grad_norm": 1.8549391031265259, + "learning_rate": 5e-05, + "epoch": 0.22955753345713006, + "step": 3585 + }, + { + "loss": 2.3312, + "grad_norm": 1.923996090888977, + "learning_rate": 5e-05, + "epoch": 0.2298776973810591, + "step": 3590 + }, + { + "loss": 2.3529, + "grad_norm": 1.9095416069030762, + "learning_rate": 5e-05, + "epoch": 0.23019786130498815, + "step": 3595 + }, + { + "loss": 2.3178, + "grad_norm": 1.8591622114181519, + "learning_rate": 5e-05, + "epoch": 0.2305180252289172, + "step": 3600 + }, + { + "eval_loss": 2.183711528778076, + "eval_runtime": 9.2433, + "eval_samples_per_second": 221.566, + "eval_steps_per_second": 27.696, + "epoch": 0.2305180252289172, + "step": 3600 + }, + { + "loss": 2.3165, + "grad_norm": 1.8757052421569824, + "learning_rate": 5e-05, + "epoch": 0.23083818915284626, + "step": 3605 + }, + { + "loss": 2.3444, + "grad_norm": 1.7773499488830566, + "learning_rate": 5e-05, + "epoch": 0.23115835307677532, + "step": 3610 + }, + { + "loss": 2.3176, + "grad_norm": 1.9299156665802002, + "learning_rate": 5e-05, + "epoch": 0.23147851700070435, + "step": 3615 + }, + { + "loss": 2.3268, + "grad_norm": 1.8555759191513062, + "learning_rate": 5e-05, + "epoch": 0.2317986809246334, + "step": 3620 + }, + { + "loss": 2.3094, + "grad_norm": 1.8463343381881714, + "learning_rate": 5e-05, + "epoch": 0.23211884484856246, + "step": 3625 + }, + { + "loss": 2.3227, + "grad_norm": 1.9021217823028564, + "learning_rate": 5e-05, + "epoch": 0.23243900877249152, + "step": 3630 + }, + { + "loss": 2.33, + "grad_norm": 1.8267807960510254, + "learning_rate": 5e-05, + "epoch": 0.23275917269642057, + "step": 3635 + }, + { + "loss": 2.3597, + "grad_norm": 1.7418160438537598, + "learning_rate": 5e-05, + "epoch": 0.23307933662034963, + "step": 3640 + }, + { + "loss": 2.3096, + "grad_norm": 1.9065451622009277, + "learning_rate": 5e-05, + "epoch": 0.23339950054427866, + "step": 3645 + }, + { + "loss": 2.3189, + "grad_norm": 1.8539282083511353, + "learning_rate": 5e-05, + "epoch": 0.23371966446820772, + "step": 3650 + }, + { + "loss": 2.3683, + "grad_norm": 1.8925061225891113, + "learning_rate": 5e-05, + "epoch": 0.23403982839213677, + "step": 3655 + }, + { + "loss": 2.3056, + "grad_norm": 1.8763203620910645, + "learning_rate": 5e-05, + "epoch": 0.23435999231606583, + "step": 3660 + }, + { + "loss": 2.3628, + "grad_norm": 1.9830697774887085, + "learning_rate": 5e-05, + "epoch": 0.23468015623999489, + "step": 3665 + }, + { + "loss": 2.3377, + "grad_norm": 1.957559585571289, + "learning_rate": 5e-05, + "epoch": 0.23500032016392394, + "step": 3670 + }, + { + "loss": 2.3105, + "grad_norm": 1.8156100511550903, + "learning_rate": 5e-05, + "epoch": 0.23532048408785297, + "step": 3675 + }, + { + "loss": 2.3339, + "grad_norm": 1.8064128160476685, + "learning_rate": 5e-05, + "epoch": 0.23564064801178203, + "step": 3680 + }, + { + "loss": 2.3292, + "grad_norm": 1.7643941640853882, + "learning_rate": 5e-05, + "epoch": 0.23596081193571108, + "step": 3685 + }, + { + "loss": 2.3173, + "grad_norm": 1.9001303911209106, + "learning_rate": 5e-05, + "epoch": 0.23628097585964014, + "step": 3690 + }, + { + "loss": 2.319, + "grad_norm": 1.7739763259887695, + "learning_rate": 5e-05, + "epoch": 0.2366011397835692, + "step": 3695 + }, + { + "loss": 2.3218, + "grad_norm": 1.7484267950057983, + "learning_rate": 5e-05, + "epoch": 0.23692130370749823, + "step": 3700 + }, + { + "loss": 2.3221, + "grad_norm": 1.8267314434051514, + "learning_rate": 5e-05, + "epoch": 0.23724146763142728, + "step": 3705 + }, + { + "loss": 2.3376, + "grad_norm": 1.9269976615905762, + "learning_rate": 5e-05, + "epoch": 0.23756163155535634, + "step": 3710 + }, + { + "loss": 2.3206, + "grad_norm": 1.820557951927185, + "learning_rate": 5e-05, + "epoch": 0.2378817954792854, + "step": 3715 + }, + { + "loss": 2.3379, + "grad_norm": 1.788172960281372, + "learning_rate": 5e-05, + "epoch": 0.23820195940321445, + "step": 3720 + }, + { + "loss": 2.339, + "grad_norm": 1.866925835609436, + "learning_rate": 5e-05, + "epoch": 0.2385221233271435, + "step": 3725 + }, + { + "loss": 2.3227, + "grad_norm": 1.9489960670471191, + "learning_rate": 5e-05, + "epoch": 0.23884228725107254, + "step": 3730 + }, + { + "loss": 2.3115, + "grad_norm": 1.8640096187591553, + "learning_rate": 5e-05, + "epoch": 0.2391624511750016, + "step": 3735 + }, + { + "loss": 2.3011, + "grad_norm": 1.8240951299667358, + "learning_rate": 5e-05, + "epoch": 0.23948261509893065, + "step": 3740 + }, + { + "loss": 2.3257, + "grad_norm": 1.8693873882293701, + "learning_rate": 5e-05, + "epoch": 0.2398027790228597, + "step": 3745 + }, + { + "loss": 2.3027, + "grad_norm": 1.879884958267212, + "learning_rate": 5e-05, + "epoch": 0.24012294294678876, + "step": 3750 + }, + { + "loss": 2.3408, + "grad_norm": 1.8470027446746826, + "learning_rate": 5e-05, + "epoch": 0.24044310687071782, + "step": 3755 + }, + { + "loss": 2.2935, + "grad_norm": 1.8507801294326782, + "learning_rate": 5e-05, + "epoch": 0.24076327079464685, + "step": 3760 + }, + { + "loss": 2.3283, + "grad_norm": 1.8579989671707153, + "learning_rate": 5e-05, + "epoch": 0.2410834347185759, + "step": 3765 + }, + { + "loss": 2.3095, + "grad_norm": 1.8650803565979004, + "learning_rate": 5e-05, + "epoch": 0.24140359864250496, + "step": 3770 + }, + { + "loss": 2.3032, + "grad_norm": 1.8193062543869019, + "learning_rate": 5e-05, + "epoch": 0.24172376256643402, + "step": 3775 + }, + { + "loss": 2.3259, + "grad_norm": 2.0027434825897217, + "learning_rate": 5e-05, + "epoch": 0.24204392649036308, + "step": 3780 + }, + { + "loss": 2.3105, + "grad_norm": 1.9422210454940796, + "learning_rate": 5e-05, + "epoch": 0.2423640904142921, + "step": 3785 + }, + { + "loss": 2.3438, + "grad_norm": 1.9346174001693726, + "learning_rate": 5e-05, + "epoch": 0.24268425433822116, + "step": 3790 + }, + { + "loss": 2.317, + "grad_norm": 1.8896723985671997, + "learning_rate": 5e-05, + "epoch": 0.24300441826215022, + "step": 3795 + }, + { + "loss": 2.3141, + "grad_norm": 1.8078259229660034, + "learning_rate": 5e-05, + "epoch": 0.24332458218607927, + "step": 3800 + }, + { + "eval_loss": 2.184438705444336, + "eval_runtime": 10.614, + "eval_samples_per_second": 192.952, + "eval_steps_per_second": 24.119, + "epoch": 0.24332458218607927, + "step": 3800 + }, + { + "loss": 2.3237, + "grad_norm": 1.9326097965240479, + "learning_rate": 5e-05, + "epoch": 0.24364474611000833, + "step": 3805 + }, + { + "loss": 2.3287, + "grad_norm": 1.8264923095703125, + "learning_rate": 5e-05, + "epoch": 0.2439649100339374, + "step": 3810 + }, + { + "loss": 2.3117, + "grad_norm": 1.9702720642089844, + "learning_rate": 5e-05, + "epoch": 0.24428507395786642, + "step": 3815 + }, + { + "loss": 2.306, + "grad_norm": 1.810170292854309, + "learning_rate": 5e-05, + "epoch": 0.24460523788179547, + "step": 3820 + }, + { + "loss": 2.3408, + "grad_norm": 1.8189213275909424, + "learning_rate": 5e-05, + "epoch": 0.24492540180572453, + "step": 3825 + }, + { + "loss": 2.3131, + "grad_norm": 1.8908005952835083, + "learning_rate": 5e-05, + "epoch": 0.2452455657296536, + "step": 3830 + }, + { + "loss": 2.339, + "grad_norm": 1.8011490106582642, + "learning_rate": 5e-05, + "epoch": 0.24556572965358264, + "step": 3835 + }, + { + "loss": 2.3175, + "grad_norm": 1.7648205757141113, + "learning_rate": 5e-05, + "epoch": 0.2458858935775117, + "step": 3840 + }, + { + "loss": 2.325, + "grad_norm": 1.8377348184585571, + "learning_rate": 5e-05, + "epoch": 0.24620605750144073, + "step": 3845 + }, + { + "loss": 2.2859, + "grad_norm": 1.8196038007736206, + "learning_rate": 5e-05, + "epoch": 0.24652622142536978, + "step": 3850 + }, + { + "loss": 2.331, + "grad_norm": 1.7730222940444946, + "learning_rate": 5e-05, + "epoch": 0.24684638534929884, + "step": 3855 + }, + { + "loss": 2.3507, + "grad_norm": 1.8816814422607422, + "learning_rate": 5e-05, + "epoch": 0.2471665492732279, + "step": 3860 + }, + { + "loss": 2.3348, + "grad_norm": 1.842856526374817, + "learning_rate": 5e-05, + "epoch": 0.24748671319715695, + "step": 3865 + }, + { + "loss": 2.3169, + "grad_norm": 1.792005181312561, + "learning_rate": 5e-05, + "epoch": 0.24780687712108598, + "step": 3870 + }, + { + "loss": 2.3106, + "grad_norm": 1.7900562286376953, + "learning_rate": 5e-05, + "epoch": 0.24812704104501504, + "step": 3875 + }, + { + "loss": 2.3323, + "grad_norm": 1.7834545373916626, + "learning_rate": 5e-05, + "epoch": 0.2484472049689441, + "step": 3880 + }, + { + "loss": 2.3434, + "grad_norm": 1.8184958696365356, + "learning_rate": 5e-05, + "epoch": 0.24876736889287315, + "step": 3885 + }, + { + "loss": 2.3054, + "grad_norm": 1.8056672811508179, + "learning_rate": 5e-05, + "epoch": 0.2490875328168022, + "step": 3890 + }, + { + "loss": 2.3238, + "grad_norm": 1.8434021472930908, + "learning_rate": 5e-05, + "epoch": 0.24940769674073127, + "step": 3895 + }, + { + "loss": 2.3059, + "grad_norm": 1.7343634366989136, + "learning_rate": 5e-05, + "epoch": 0.2497278606646603, + "step": 3900 + }, + { + "loss": 2.325, + "grad_norm": 1.7279424667358398, + "learning_rate": 5e-05, + "epoch": 0.2500480245885894, + "step": 3905 + }, + { + "loss": 2.323, + "grad_norm": 1.7715774774551392, + "learning_rate": 5e-05, + "epoch": 0.2503681885125184, + "step": 3910 + }, + { + "loss": 2.2872, + "grad_norm": 1.7786765098571777, + "learning_rate": 5e-05, + "epoch": 0.25068835243644744, + "step": 3915 + }, + { + "loss": 2.3408, + "grad_norm": 1.8894507884979248, + "learning_rate": 5e-05, + "epoch": 0.2510085163603765, + "step": 3920 + }, + { + "loss": 2.3193, + "grad_norm": 1.8058632612228394, + "learning_rate": 5e-05, + "epoch": 0.25132868028430555, + "step": 3925 + }, + { + "loss": 2.3168, + "grad_norm": 1.7818254232406616, + "learning_rate": 5e-05, + "epoch": 0.25164884420823463, + "step": 3930 + }, + { + "loss": 2.3393, + "grad_norm": 1.7840033769607544, + "learning_rate": 5e-05, + "epoch": 0.25196900813216366, + "step": 3935 + }, + { + "loss": 2.3405, + "grad_norm": 1.8498218059539795, + "learning_rate": 5e-05, + "epoch": 0.2522891720560927, + "step": 3940 + }, + { + "loss": 2.3186, + "grad_norm": 1.827964425086975, + "learning_rate": 5e-05, + "epoch": 0.2526093359800218, + "step": 3945 + }, + { + "loss": 2.3069, + "grad_norm": 1.8498241901397705, + "learning_rate": 5e-05, + "epoch": 0.2529294999039508, + "step": 3950 + }, + { + "loss": 2.3419, + "grad_norm": 1.7726775407791138, + "learning_rate": 5e-05, + "epoch": 0.2532496638278799, + "step": 3955 + }, + { + "loss": 2.3047, + "grad_norm": 1.9088823795318604, + "learning_rate": 5e-05, + "epoch": 0.2535698277518089, + "step": 3960 + }, + { + "loss": 2.334, + "grad_norm": 1.8803976774215698, + "learning_rate": 5e-05, + "epoch": 0.253889991675738, + "step": 3965 + }, + { + "loss": 2.2902, + "grad_norm": 1.914686679840088, + "learning_rate": 5e-05, + "epoch": 0.25421015559966703, + "step": 3970 + }, + { + "loss": 2.3233, + "grad_norm": 1.8192038536071777, + "learning_rate": 5e-05, + "epoch": 0.25453031952359606, + "step": 3975 + }, + { + "loss": 2.3367, + "grad_norm": 1.8976246118545532, + "learning_rate": 5e-05, + "epoch": 0.25485048344752514, + "step": 3980 + }, + { + "loss": 2.2671, + "grad_norm": 1.7621504068374634, + "learning_rate": 5e-05, + "epoch": 0.2551706473714542, + "step": 3985 + }, + { + "loss": 2.3326, + "grad_norm": 1.912398338317871, + "learning_rate": 5e-05, + "epoch": 0.25549081129538326, + "step": 3990 + }, + { + "loss": 2.3008, + "grad_norm": 2.027517557144165, + "learning_rate": 5e-05, + "epoch": 0.2558109752193123, + "step": 3995 + }, + { + "loss": 2.3146, + "grad_norm": 1.934006690979004, + "learning_rate": 5e-05, + "epoch": 0.2561311391432413, + "step": 4000 + }, + { + "eval_loss": 2.177450180053711, + "eval_runtime": 9.6248, + "eval_samples_per_second": 212.784, + "eval_steps_per_second": 26.598, + "epoch": 0.2561311391432413, + "step": 4000 + }, + { + "loss": 2.2979, + "grad_norm": 1.8951339721679688, + "learning_rate": 5e-05, + "epoch": 0.2564513030671704, + "step": 4005 + }, + { + "loss": 2.3171, + "grad_norm": 1.7967989444732666, + "learning_rate": 5e-05, + "epoch": 0.25677146699109943, + "step": 4010 + }, + { + "loss": 2.3077, + "grad_norm": 1.878688097000122, + "learning_rate": 5e-05, + "epoch": 0.2570916309150285, + "step": 4015 + }, + { + "loss": 2.2935, + "grad_norm": 1.855326771736145, + "learning_rate": 5e-05, + "epoch": 0.25741179483895754, + "step": 4020 + }, + { + "loss": 2.3404, + "grad_norm": 1.797782063484192, + "learning_rate": 5e-05, + "epoch": 0.25773195876288657, + "step": 4025 + }, + { + "loss": 2.2913, + "grad_norm": 1.9316190481185913, + "learning_rate": 5e-05, + "epoch": 0.25805212268681565, + "step": 4030 + }, + { + "loss": 2.306, + "grad_norm": 1.7924227714538574, + "learning_rate": 5e-05, + "epoch": 0.2583722866107447, + "step": 4035 + }, + { + "loss": 2.3185, + "grad_norm": 1.8619980812072754, + "learning_rate": 5e-05, + "epoch": 0.25869245053467377, + "step": 4040 + }, + { + "loss": 2.3279, + "grad_norm": 1.931126594543457, + "learning_rate": 5e-05, + "epoch": 0.2590126144586028, + "step": 4045 + }, + { + "loss": 2.3288, + "grad_norm": 1.8355220556259155, + "learning_rate": 5e-05, + "epoch": 0.2593327783825319, + "step": 4050 + }, + { + "loss": 2.3015, + "grad_norm": 1.8821378946304321, + "learning_rate": 5e-05, + "epoch": 0.2596529423064609, + "step": 4055 + }, + { + "loss": 2.3181, + "grad_norm": 1.929376482963562, + "learning_rate": 5e-05, + "epoch": 0.25997310623038994, + "step": 4060 + }, + { + "loss": 2.3188, + "grad_norm": 1.8176177740097046, + "learning_rate": 5e-05, + "epoch": 0.260293270154319, + "step": 4065 + }, + { + "loss": 2.3164, + "grad_norm": 1.7493705749511719, + "learning_rate": 5e-05, + "epoch": 0.26061343407824805, + "step": 4070 + }, + { + "loss": 2.2848, + "grad_norm": 1.8640022277832031, + "learning_rate": 5e-05, + "epoch": 0.26093359800217714, + "step": 4075 + }, + { + "loss": 2.3192, + "grad_norm": 1.8012224435806274, + "learning_rate": 5e-05, + "epoch": 0.26125376192610616, + "step": 4080 + }, + { + "loss": 2.3191, + "grad_norm": 1.9000436067581177, + "learning_rate": 5e-05, + "epoch": 0.2615739258500352, + "step": 4085 + }, + { + "loss": 2.3032, + "grad_norm": 1.8532963991165161, + "learning_rate": 5e-05, + "epoch": 0.2618940897739643, + "step": 4090 + }, + { + "loss": 2.326, + "grad_norm": 1.8395898342132568, + "learning_rate": 5e-05, + "epoch": 0.2622142536978933, + "step": 4095 + }, + { + "loss": 2.3354, + "grad_norm": 1.7998299598693848, + "learning_rate": 5e-05, + "epoch": 0.2625344176218224, + "step": 4100 + }, + { + "loss": 2.3093, + "grad_norm": 1.8509643077850342, + "learning_rate": 5e-05, + "epoch": 0.2628545815457514, + "step": 4105 + }, + { + "loss": 2.3287, + "grad_norm": 1.9016660451889038, + "learning_rate": 5e-05, + "epoch": 0.26317474546968045, + "step": 4110 + }, + { + "loss": 2.2871, + "grad_norm": 1.8604185581207275, + "learning_rate": 5e-05, + "epoch": 0.26349490939360953, + "step": 4115 + }, + { + "loss": 2.3072, + "grad_norm": 1.842264175415039, + "learning_rate": 5e-05, + "epoch": 0.26381507331753856, + "step": 4120 + }, + { + "loss": 2.3312, + "grad_norm": 1.7420934438705444, + "learning_rate": 5e-05, + "epoch": 0.26413523724146765, + "step": 4125 + }, + { + "loss": 2.3131, + "grad_norm": 1.7676818370819092, + "learning_rate": 5e-05, + "epoch": 0.2644554011653967, + "step": 4130 + }, + { + "loss": 2.3026, + "grad_norm": 1.7885444164276123, + "learning_rate": 5e-05, + "epoch": 0.26477556508932576, + "step": 4135 + }, + { + "loss": 2.3201, + "grad_norm": 1.6721593141555786, + "learning_rate": 5e-05, + "epoch": 0.2650957290132548, + "step": 4140 + }, + { + "loss": 2.3047, + "grad_norm": 1.8331459760665894, + "learning_rate": 5e-05, + "epoch": 0.2654158929371838, + "step": 4145 + }, + { + "loss": 2.3259, + "grad_norm": 1.735121250152588, + "learning_rate": 5e-05, + "epoch": 0.2657360568611129, + "step": 4150 + }, + { + "loss": 2.2825, + "grad_norm": 1.8103950023651123, + "learning_rate": 5e-05, + "epoch": 0.26605622078504193, + "step": 4155 + }, + { + "loss": 2.3094, + "grad_norm": 1.833533525466919, + "learning_rate": 5e-05, + "epoch": 0.266376384708971, + "step": 4160 + }, + { + "loss": 2.3258, + "grad_norm": 1.7850996255874634, + "learning_rate": 5e-05, + "epoch": 0.26669654863290004, + "step": 4165 + }, + { + "loss": 2.314, + "grad_norm": 1.8073853254318237, + "learning_rate": 5e-05, + "epoch": 0.2670167125568291, + "step": 4170 + }, + { + "loss": 2.33, + "grad_norm": 1.7849806547164917, + "learning_rate": 5e-05, + "epoch": 0.26733687648075816, + "step": 4175 + }, + { + "loss": 2.3151, + "grad_norm": 1.7261276245117188, + "learning_rate": 5e-05, + "epoch": 0.2676570404046872, + "step": 4180 + }, + { + "loss": 2.3036, + "grad_norm": 1.763243556022644, + "learning_rate": 5e-05, + "epoch": 0.26797720432861627, + "step": 4185 + }, + { + "loss": 2.3306, + "grad_norm": 1.8771343231201172, + "learning_rate": 5e-05, + "epoch": 0.2682973682525453, + "step": 4190 + }, + { + "loss": 2.3093, + "grad_norm": 1.8824447393417358, + "learning_rate": 5e-05, + "epoch": 0.2686175321764743, + "step": 4195 + }, + { + "loss": 2.3243, + "grad_norm": 1.757983684539795, + "learning_rate": 5e-05, + "epoch": 0.2689376961004034, + "step": 4200 + }, + { + "eval_loss": 2.162459135055542, + "eval_runtime": 12.641, + "eval_samples_per_second": 162.012, + "eval_steps_per_second": 20.252, + "epoch": 0.2689376961004034, + "step": 4200 + }, + { + "loss": 2.3164, + "grad_norm": 1.7963429689407349, + "learning_rate": 5e-05, + "epoch": 0.26925786002433244, + "step": 4205 + }, + { + "loss": 2.3378, + "grad_norm": 1.9342796802520752, + "learning_rate": 5e-05, + "epoch": 0.2695780239482615, + "step": 4210 + }, + { + "loss": 2.3121, + "grad_norm": 1.863183856010437, + "learning_rate": 5e-05, + "epoch": 0.26989818787219055, + "step": 4215 + }, + { + "loss": 2.2946, + "grad_norm": 1.7715400457382202, + "learning_rate": 5e-05, + "epoch": 0.27021835179611964, + "step": 4220 + }, + { + "loss": 2.2986, + "grad_norm": 1.806707739830017, + "learning_rate": 5e-05, + "epoch": 0.27053851572004867, + "step": 4225 + }, + { + "loss": 2.3095, + "grad_norm": 1.8026810884475708, + "learning_rate": 5e-05, + "epoch": 0.2708586796439777, + "step": 4230 + }, + { + "loss": 2.3165, + "grad_norm": 1.7845863103866577, + "learning_rate": 5e-05, + "epoch": 0.2711788435679068, + "step": 4235 + }, + { + "loss": 2.3014, + "grad_norm": 1.864893913269043, + "learning_rate": 5e-05, + "epoch": 0.2714990074918358, + "step": 4240 + }, + { + "loss": 2.3238, + "grad_norm": 1.7305742502212524, + "learning_rate": 5e-05, + "epoch": 0.2718191714157649, + "step": 4245 + }, + { + "loss": 2.291, + "grad_norm": 1.7948355674743652, + "learning_rate": 5e-05, + "epoch": 0.2721393353396939, + "step": 4250 + }, + { + "loss": 2.3144, + "grad_norm": 1.7819257974624634, + "learning_rate": 5e-05, + "epoch": 0.27245949926362295, + "step": 4255 + }, + { + "loss": 2.327, + "grad_norm": 1.8523086309432983, + "learning_rate": 5e-05, + "epoch": 0.27277966318755204, + "step": 4260 + }, + { + "loss": 2.3293, + "grad_norm": 1.7722643613815308, + "learning_rate": 5e-05, + "epoch": 0.27309982711148106, + "step": 4265 + }, + { + "loss": 2.303, + "grad_norm": 1.8291378021240234, + "learning_rate": 5e-05, + "epoch": 0.27341999103541015, + "step": 4270 + }, + { + "loss": 2.2893, + "grad_norm": 1.8277583122253418, + "learning_rate": 5e-05, + "epoch": 0.2737401549593392, + "step": 4275 + }, + { + "loss": 2.3348, + "grad_norm": 1.8024441003799438, + "learning_rate": 5e-05, + "epoch": 0.2740603188832682, + "step": 4280 + }, + { + "loss": 2.3024, + "grad_norm": 1.8651007413864136, + "learning_rate": 5e-05, + "epoch": 0.2743804828071973, + "step": 4285 + }, + { + "loss": 2.327, + "grad_norm": 1.8744381666183472, + "learning_rate": 5e-05, + "epoch": 0.2747006467311263, + "step": 4290 + }, + { + "loss": 2.3225, + "grad_norm": 1.8396573066711426, + "learning_rate": 5e-05, + "epoch": 0.2750208106550554, + "step": 4295 + }, + { + "loss": 2.2918, + "grad_norm": 1.7585549354553223, + "learning_rate": 5e-05, + "epoch": 0.27534097457898443, + "step": 4300 + }, + { + "loss": 2.3135, + "grad_norm": 1.8332717418670654, + "learning_rate": 5e-05, + "epoch": 0.2756611385029135, + "step": 4305 + }, + { + "loss": 2.3176, + "grad_norm": 1.8986752033233643, + "learning_rate": 5e-05, + "epoch": 0.27598130242684255, + "step": 4310 + }, + { + "loss": 2.2758, + "grad_norm": 1.7375805377960205, + "learning_rate": 5e-05, + "epoch": 0.2763014663507716, + "step": 4315 + }, + { + "loss": 2.2786, + "grad_norm": 1.838408350944519, + "learning_rate": 5e-05, + "epoch": 0.27662163027470066, + "step": 4320 + }, + { + "loss": 2.3119, + "grad_norm": 1.9200383424758911, + "learning_rate": 5e-05, + "epoch": 0.2769417941986297, + "step": 4325 + }, + { + "loss": 2.3082, + "grad_norm": 1.8573769330978394, + "learning_rate": 5e-05, + "epoch": 0.27726195812255877, + "step": 4330 + }, + { + "loss": 2.2946, + "grad_norm": 1.819273829460144, + "learning_rate": 5e-05, + "epoch": 0.2775821220464878, + "step": 4335 + }, + { + "loss": 2.28, + "grad_norm": 1.8978952169418335, + "learning_rate": 5e-05, + "epoch": 0.27790228597041683, + "step": 4340 + }, + { + "loss": 2.3058, + "grad_norm": 1.7782193422317505, + "learning_rate": 5e-05, + "epoch": 0.2782224498943459, + "step": 4345 + }, + { + "loss": 2.3189, + "grad_norm": 1.831231951713562, + "learning_rate": 5e-05, + "epoch": 0.27854261381827494, + "step": 4350 + }, + { + "loss": 2.314, + "grad_norm": 1.830064296722412, + "learning_rate": 5e-05, + "epoch": 0.278862777742204, + "step": 4355 + }, + { + "loss": 2.2983, + "grad_norm": 1.8492834568023682, + "learning_rate": 5e-05, + "epoch": 0.27918294166613306, + "step": 4360 + }, + { + "loss": 2.3379, + "grad_norm": 1.841322898864746, + "learning_rate": 5e-05, + "epoch": 0.2795031055900621, + "step": 4365 + }, + { + "loss": 2.3309, + "grad_norm": 1.8109886646270752, + "learning_rate": 5e-05, + "epoch": 0.27982326951399117, + "step": 4370 + }, + { + "loss": 2.2967, + "grad_norm": 1.9388337135314941, + "learning_rate": 5e-05, + "epoch": 0.2801434334379202, + "step": 4375 + }, + { + "loss": 2.3163, + "grad_norm": 1.8596948385238647, + "learning_rate": 5e-05, + "epoch": 0.2804635973618493, + "step": 4380 + }, + { + "loss": 2.2899, + "grad_norm": 1.8299187421798706, + "learning_rate": 5e-05, + "epoch": 0.2807837612857783, + "step": 4385 + }, + { + "loss": 2.3007, + "grad_norm": 1.819220781326294, + "learning_rate": 5e-05, + "epoch": 0.2811039252097074, + "step": 4390 + }, + { + "loss": 2.312, + "grad_norm": 1.8518681526184082, + "learning_rate": 5e-05, + "epoch": 0.2814240891336364, + "step": 4395 + }, + { + "loss": 2.3215, + "grad_norm": 1.8841506242752075, + "learning_rate": 5e-05, + "epoch": 0.28174425305756545, + "step": 4400 + }, + { + "eval_loss": 2.1684622764587402, + "eval_runtime": 9.6418, + "eval_samples_per_second": 212.408, + "eval_steps_per_second": 26.551, + "epoch": 0.28174425305756545, + "step": 4400 + }, + { + "loss": 2.2986, + "grad_norm": 1.71231210231781, + "learning_rate": 5e-05, + "epoch": 0.28206441698149454, + "step": 4405 + }, + { + "loss": 2.2829, + "grad_norm": 1.7305104732513428, + "learning_rate": 5e-05, + "epoch": 0.28238458090542357, + "step": 4410 + }, + { + "loss": 2.2749, + "grad_norm": 1.8442025184631348, + "learning_rate": 5e-05, + "epoch": 0.28270474482935265, + "step": 4415 + }, + { + "loss": 2.3002, + "grad_norm": 1.8370575904846191, + "learning_rate": 5e-05, + "epoch": 0.2830249087532817, + "step": 4420 + }, + { + "loss": 2.2997, + "grad_norm": 1.8042954206466675, + "learning_rate": 5e-05, + "epoch": 0.2833450726772107, + "step": 4425 + }, + { + "loss": 2.3224, + "grad_norm": 1.7841765880584717, + "learning_rate": 5e-05, + "epoch": 0.2836652366011398, + "step": 4430 + }, + { + "loss": 2.3205, + "grad_norm": 1.6575603485107422, + "learning_rate": 5e-05, + "epoch": 0.2839854005250688, + "step": 4435 + }, + { + "loss": 2.301, + "grad_norm": 1.8698128461837769, + "learning_rate": 5e-05, + "epoch": 0.2843055644489979, + "step": 4440 + }, + { + "loss": 2.3192, + "grad_norm": 1.8466641902923584, + "learning_rate": 5e-05, + "epoch": 0.28462572837292693, + "step": 4445 + }, + { + "loss": 2.2704, + "grad_norm": 1.7934186458587646, + "learning_rate": 5e-05, + "epoch": 0.28494589229685596, + "step": 4450 + }, + { + "loss": 2.3225, + "grad_norm": 1.770643949508667, + "learning_rate": 5e-05, + "epoch": 0.28526605622078505, + "step": 4455 + }, + { + "loss": 2.3238, + "grad_norm": 1.7914665937423706, + "learning_rate": 5e-05, + "epoch": 0.2855862201447141, + "step": 4460 + }, + { + "loss": 2.3193, + "grad_norm": 1.7819799184799194, + "learning_rate": 5e-05, + "epoch": 0.28590638406864316, + "step": 4465 + }, + { + "loss": 2.2642, + "grad_norm": 1.7854515314102173, + "learning_rate": 5e-05, + "epoch": 0.2862265479925722, + "step": 4470 + }, + { + "loss": 2.3097, + "grad_norm": 1.703332543373108, + "learning_rate": 5e-05, + "epoch": 0.2865467119165013, + "step": 4475 + }, + { + "loss": 2.3122, + "grad_norm": 1.7654129266738892, + "learning_rate": 5e-05, + "epoch": 0.2868668758404303, + "step": 4480 + }, + { + "loss": 2.3142, + "grad_norm": 1.8920791149139404, + "learning_rate": 5e-05, + "epoch": 0.28718703976435933, + "step": 4485 + }, + { + "loss": 2.3208, + "grad_norm": 1.824573278427124, + "learning_rate": 5e-05, + "epoch": 0.2875072036882884, + "step": 4490 + }, + { + "loss": 2.3027, + "grad_norm": 1.7249481678009033, + "learning_rate": 5e-05, + "epoch": 0.28782736761221744, + "step": 4495 + }, + { + "loss": 2.2931, + "grad_norm": 1.8988478183746338, + "learning_rate": 5e-05, + "epoch": 0.28814753153614653, + "step": 4500 + }, + { + "loss": 2.3057, + "grad_norm": 1.8929831981658936, + "learning_rate": 5e-05, + "epoch": 0.28846769546007556, + "step": 4505 + }, + { + "loss": 2.313, + "grad_norm": 1.8110840320587158, + "learning_rate": 5e-05, + "epoch": 0.2887878593840046, + "step": 4510 + }, + { + "loss": 2.2986, + "grad_norm": 1.7756503820419312, + "learning_rate": 5e-05, + "epoch": 0.28910802330793367, + "step": 4515 + }, + { + "loss": 2.2993, + "grad_norm": 1.8048218488693237, + "learning_rate": 5e-05, + "epoch": 0.2894281872318627, + "step": 4520 + }, + { + "loss": 2.283, + "grad_norm": 1.7407152652740479, + "learning_rate": 5e-05, + "epoch": 0.2897483511557918, + "step": 4525 + }, + { + "loss": 2.3091, + "grad_norm": 1.909650206565857, + "learning_rate": 5e-05, + "epoch": 0.2900685150797208, + "step": 4530 + }, + { + "loss": 2.3102, + "grad_norm": 1.803214430809021, + "learning_rate": 5e-05, + "epoch": 0.29038867900364984, + "step": 4535 + }, + { + "loss": 2.2942, + "grad_norm": 1.9347703456878662, + "learning_rate": 5e-05, + "epoch": 0.2907088429275789, + "step": 4540 + }, + { + "loss": 2.2967, + "grad_norm": 1.8654407262802124, + "learning_rate": 5e-05, + "epoch": 0.29102900685150795, + "step": 4545 + }, + { + "loss": 2.3208, + "grad_norm": 1.7373706102371216, + "learning_rate": 5e-05, + "epoch": 0.29134917077543704, + "step": 4550 + }, + { + "loss": 2.3188, + "grad_norm": 1.7621229887008667, + "learning_rate": 5e-05, + "epoch": 0.29166933469936607, + "step": 4555 + }, + { + "loss": 2.2971, + "grad_norm": 1.7987569570541382, + "learning_rate": 5e-05, + "epoch": 0.29198949862329515, + "step": 4560 + }, + { + "loss": 2.2926, + "grad_norm": 1.8752938508987427, + "learning_rate": 5e-05, + "epoch": 0.2923096625472242, + "step": 4565 + }, + { + "loss": 2.294, + "grad_norm": 1.809169888496399, + "learning_rate": 5e-05, + "epoch": 0.2926298264711532, + "step": 4570 + }, + { + "loss": 2.3059, + "grad_norm": 1.8496021032333374, + "learning_rate": 5e-05, + "epoch": 0.2929499903950823, + "step": 4575 + }, + { + "loss": 2.2714, + "grad_norm": 1.8275306224822998, + "learning_rate": 5e-05, + "epoch": 0.2932701543190113, + "step": 4580 + }, + { + "loss": 2.2797, + "grad_norm": 1.8231137990951538, + "learning_rate": 5e-05, + "epoch": 0.2935903182429404, + "step": 4585 + }, + { + "loss": 2.2901, + "grad_norm": 1.7881653308868408, + "learning_rate": 5e-05, + "epoch": 0.29391048216686944, + "step": 4590 + }, + { + "loss": 2.3043, + "grad_norm": 1.9115880727767944, + "learning_rate": 5e-05, + "epoch": 0.29423064609079846, + "step": 4595 + }, + { + "loss": 2.3292, + "grad_norm": 1.8070696592330933, + "learning_rate": 5e-05, + "epoch": 0.29455081001472755, + "step": 4600 + }, + { + "eval_loss": 2.1569859981536865, + "eval_runtime": 9.5207, + "eval_samples_per_second": 215.111, + "eval_steps_per_second": 26.889, + "epoch": 0.29455081001472755, + "step": 4600 + }, + { + "loss": 2.32, + "grad_norm": 1.7979247570037842, + "learning_rate": 5e-05, + "epoch": 0.2948709739386566, + "step": 4605 + }, + { + "loss": 2.2949, + "grad_norm": 1.7743096351623535, + "learning_rate": 5e-05, + "epoch": 0.29519113786258566, + "step": 4610 + }, + { + "loss": 2.2916, + "grad_norm": 1.7690064907073975, + "learning_rate": 5e-05, + "epoch": 0.2955113017865147, + "step": 4615 + }, + { + "loss": 2.3084, + "grad_norm": 1.9324722290039062, + "learning_rate": 5e-05, + "epoch": 0.2958314657104437, + "step": 4620 + }, + { + "loss": 2.2975, + "grad_norm": 1.7818751335144043, + "learning_rate": 5e-05, + "epoch": 0.2961516296343728, + "step": 4625 + }, + { + "loss": 2.2689, + "grad_norm": 1.7577718496322632, + "learning_rate": 5e-05, + "epoch": 0.29647179355830183, + "step": 4630 + }, + { + "loss": 2.2863, + "grad_norm": 1.7863922119140625, + "learning_rate": 5e-05, + "epoch": 0.2967919574822309, + "step": 4635 + }, + { + "loss": 2.2954, + "grad_norm": 1.8004027605056763, + "learning_rate": 5e-05, + "epoch": 0.29711212140615995, + "step": 4640 + }, + { + "loss": 2.3267, + "grad_norm": 1.7635235786437988, + "learning_rate": 5e-05, + "epoch": 0.29743228533008903, + "step": 4645 + }, + { + "loss": 2.3047, + "grad_norm": 1.814304232597351, + "learning_rate": 5e-05, + "epoch": 0.29775244925401806, + "step": 4650 + }, + { + "loss": 2.2948, + "grad_norm": 1.8464570045471191, + "learning_rate": 5e-05, + "epoch": 0.2980726131779471, + "step": 4655 + }, + { + "loss": 2.2944, + "grad_norm": 1.7262645959854126, + "learning_rate": 5e-05, + "epoch": 0.29839277710187617, + "step": 4660 + }, + { + "loss": 2.3012, + "grad_norm": 1.7750794887542725, + "learning_rate": 5e-05, + "epoch": 0.2987129410258052, + "step": 4665 + }, + { + "loss": 2.2965, + "grad_norm": 1.7386796474456787, + "learning_rate": 5e-05, + "epoch": 0.2990331049497343, + "step": 4670 + }, + { + "loss": 2.2986, + "grad_norm": 1.8167015314102173, + "learning_rate": 5e-05, + "epoch": 0.2993532688736633, + "step": 4675 + }, + { + "loss": 2.2928, + "grad_norm": 1.7372899055480957, + "learning_rate": 5e-05, + "epoch": 0.29967343279759234, + "step": 4680 + }, + { + "loss": 2.2767, + "grad_norm": 1.793278694152832, + "learning_rate": 5e-05, + "epoch": 0.2999935967215214, + "step": 4685 + }, + { + "loss": 2.2887, + "grad_norm": 1.93364679813385, + "learning_rate": 5e-05, + "epoch": 0.30031376064545046, + "step": 4690 + }, + { + "loss": 2.3015, + "grad_norm": 1.7167513370513916, + "learning_rate": 5e-05, + "epoch": 0.30063392456937954, + "step": 4695 + }, + { + "loss": 2.2635, + "grad_norm": 1.7310161590576172, + "learning_rate": 5e-05, + "epoch": 0.30095408849330857, + "step": 4700 + }, + { + "loss": 2.2942, + "grad_norm": 1.7478691339492798, + "learning_rate": 5e-05, + "epoch": 0.3012742524172376, + "step": 4705 + }, + { + "loss": 2.2552, + "grad_norm": 1.8167970180511475, + "learning_rate": 5e-05, + "epoch": 0.3015944163411667, + "step": 4710 + }, + { + "loss": 2.2812, + "grad_norm": 1.6952241659164429, + "learning_rate": 5e-05, + "epoch": 0.3019145802650957, + "step": 4715 + }, + { + "loss": 2.2908, + "grad_norm": 1.7196714878082275, + "learning_rate": 5e-05, + "epoch": 0.3022347441890248, + "step": 4720 + }, + { + "loss": 2.2867, + "grad_norm": 1.7747132778167725, + "learning_rate": 5e-05, + "epoch": 0.3025549081129538, + "step": 4725 + }, + { + "loss": 2.2695, + "grad_norm": 1.8552742004394531, + "learning_rate": 5e-05, + "epoch": 0.3028750720368829, + "step": 4730 + }, + { + "loss": 2.264, + "grad_norm": 1.719187617301941, + "learning_rate": 5e-05, + "epoch": 0.30319523596081194, + "step": 4735 + }, + { + "loss": 2.2794, + "grad_norm": 1.8311821222305298, + "learning_rate": 5e-05, + "epoch": 0.30351539988474097, + "step": 4740 + }, + { + "loss": 2.2649, + "grad_norm": 1.7115150690078735, + "learning_rate": 5e-05, + "epoch": 0.30383556380867005, + "step": 4745 + }, + { + "loss": 2.299, + "grad_norm": 1.7218992710113525, + "learning_rate": 5e-05, + "epoch": 0.3041557277325991, + "step": 4750 + }, + { + "loss": 2.3031, + "grad_norm": 1.7692986726760864, + "learning_rate": 5e-05, + "epoch": 0.30447589165652816, + "step": 4755 + }, + { + "loss": 2.3133, + "grad_norm": 1.7613261938095093, + "learning_rate": 5e-05, + "epoch": 0.3047960555804572, + "step": 4760 + }, + { + "loss": 2.3096, + "grad_norm": 1.762600064277649, + "learning_rate": 5e-05, + "epoch": 0.3051162195043862, + "step": 4765 + }, + { + "loss": 2.2789, + "grad_norm": 1.7768152952194214, + "learning_rate": 5e-05, + "epoch": 0.3054363834283153, + "step": 4770 + }, + { + "loss": 2.3094, + "grad_norm": 1.8207039833068848, + "learning_rate": 5e-05, + "epoch": 0.30575654735224433, + "step": 4775 + }, + { + "loss": 2.2842, + "grad_norm": 1.7720569372177124, + "learning_rate": 5e-05, + "epoch": 0.3060767112761734, + "step": 4780 + }, + { + "loss": 2.3167, + "grad_norm": 1.7781318426132202, + "learning_rate": 5e-05, + "epoch": 0.30639687520010245, + "step": 4785 + }, + { + "loss": 2.2924, + "grad_norm": 1.797167181968689, + "learning_rate": 5e-05, + "epoch": 0.3067170391240315, + "step": 4790 + }, + { + "loss": 2.2818, + "grad_norm": 1.7862792015075684, + "learning_rate": 5e-05, + "epoch": 0.30703720304796056, + "step": 4795 + }, + { + "loss": 2.2911, + "grad_norm": 1.913051724433899, + "learning_rate": 5e-05, + "epoch": 0.3073573669718896, + "step": 4800 + }, + { + "eval_loss": 2.1567511558532715, + "eval_runtime": 9.5969, + "eval_samples_per_second": 213.403, + "eval_steps_per_second": 26.675, + "epoch": 0.3073573669718896, + "step": 4800 + }, + { + "loss": 2.3157, + "grad_norm": 1.9030219316482544, + "learning_rate": 5e-05, + "epoch": 0.3076775308958187, + "step": 4805 + }, + { + "loss": 2.2967, + "grad_norm": 1.981708288192749, + "learning_rate": 5e-05, + "epoch": 0.3079976948197477, + "step": 4810 + }, + { + "loss": 2.2553, + "grad_norm": 1.7999526262283325, + "learning_rate": 5e-05, + "epoch": 0.3083178587436768, + "step": 4815 + }, + { + "loss": 2.2993, + "grad_norm": 1.7089029550552368, + "learning_rate": 5e-05, + "epoch": 0.3086380226676058, + "step": 4820 + }, + { + "loss": 2.2782, + "grad_norm": 1.7940775156021118, + "learning_rate": 5e-05, + "epoch": 0.30895818659153484, + "step": 4825 + }, + { + "loss": 2.2651, + "grad_norm": 1.896036148071289, + "learning_rate": 5e-05, + "epoch": 0.30927835051546393, + "step": 4830 + }, + { + "loss": 2.2781, + "grad_norm": 1.7181426286697388, + "learning_rate": 5e-05, + "epoch": 0.30959851443939296, + "step": 4835 + }, + { + "loss": 2.3079, + "grad_norm": 1.7568175792694092, + "learning_rate": 5e-05, + "epoch": 0.30991867836332204, + "step": 4840 + }, + { + "loss": 2.3256, + "grad_norm": 1.7117818593978882, + "learning_rate": 5e-05, + "epoch": 0.31023884228725107, + "step": 4845 + }, + { + "loss": 2.2652, + "grad_norm": 1.8535692691802979, + "learning_rate": 5e-05, + "epoch": 0.3105590062111801, + "step": 4850 + }, + { + "loss": 2.3082, + "grad_norm": 1.8518201112747192, + "learning_rate": 5e-05, + "epoch": 0.3108791701351092, + "step": 4855 + }, + { + "loss": 2.2893, + "grad_norm": 1.875934362411499, + "learning_rate": 5e-05, + "epoch": 0.3111993340590382, + "step": 4860 + }, + { + "loss": 2.2672, + "grad_norm": 1.743920087814331, + "learning_rate": 5e-05, + "epoch": 0.3115194979829673, + "step": 4865 + }, + { + "loss": 2.2895, + "grad_norm": 1.7549186944961548, + "learning_rate": 5e-05, + "epoch": 0.3118396619068963, + "step": 4870 + }, + { + "loss": 2.282, + "grad_norm": 1.7128772735595703, + "learning_rate": 5e-05, + "epoch": 0.31215982583082535, + "step": 4875 + }, + { + "loss": 2.3079, + "grad_norm": 1.7349681854248047, + "learning_rate": 5e-05, + "epoch": 0.31247998975475444, + "step": 4880 + }, + { + "loss": 2.2719, + "grad_norm": 1.8261305093765259, + "learning_rate": 5e-05, + "epoch": 0.31280015367868347, + "step": 4885 + }, + { + "loss": 2.2801, + "grad_norm": 1.7922636270523071, + "learning_rate": 5e-05, + "epoch": 0.31312031760261255, + "step": 4890 + }, + { + "loss": 2.2963, + "grad_norm": 1.8621406555175781, + "learning_rate": 5e-05, + "epoch": 0.3134404815265416, + "step": 4895 + }, + { + "loss": 2.2842, + "grad_norm": 1.7960196733474731, + "learning_rate": 5e-05, + "epoch": 0.31376064545047067, + "step": 4900 + }, + { + "loss": 2.2535, + "grad_norm": 1.7535030841827393, + "learning_rate": 5e-05, + "epoch": 0.3140808093743997, + "step": 4905 + }, + { + "loss": 2.2706, + "grad_norm": 1.7128777503967285, + "learning_rate": 5e-05, + "epoch": 0.3144009732983287, + "step": 4910 + }, + { + "loss": 2.2963, + "grad_norm": 1.7324950695037842, + "learning_rate": 5e-05, + "epoch": 0.3147211372222578, + "step": 4915 + }, + { + "loss": 2.2996, + "grad_norm": 1.759783387184143, + "learning_rate": 5e-05, + "epoch": 0.31504130114618684, + "step": 4920 + }, + { + "loss": 2.2934, + "grad_norm": 1.799742579460144, + "learning_rate": 5e-05, + "epoch": 0.3153614650701159, + "step": 4925 + }, + { + "loss": 2.2862, + "grad_norm": 1.726730465888977, + "learning_rate": 5e-05, + "epoch": 0.31568162899404495, + "step": 4930 + }, + { + "loss": 2.2912, + "grad_norm": 1.8253145217895508, + "learning_rate": 5e-05, + "epoch": 0.316001792917974, + "step": 4935 + }, + { + "loss": 2.3154, + "grad_norm": 1.7888239622116089, + "learning_rate": 5e-05, + "epoch": 0.31632195684190306, + "step": 4940 + }, + { + "loss": 2.291, + "grad_norm": 1.818763256072998, + "learning_rate": 5e-05, + "epoch": 0.3166421207658321, + "step": 4945 + }, + { + "loss": 2.2879, + "grad_norm": 1.679724097251892, + "learning_rate": 5e-05, + "epoch": 0.3169622846897612, + "step": 4950 + }, + { + "loss": 2.276, + "grad_norm": 1.7187193632125854, + "learning_rate": 5e-05, + "epoch": 0.3172824486136902, + "step": 4955 + }, + { + "loss": 2.3043, + "grad_norm": 1.8776874542236328, + "learning_rate": 5e-05, + "epoch": 0.3176026125376193, + "step": 4960 + }, + { + "loss": 2.299, + "grad_norm": 1.8387751579284668, + "learning_rate": 5e-05, + "epoch": 0.3179227764615483, + "step": 4965 + }, + { + "loss": 2.2942, + "grad_norm": 1.8348480463027954, + "learning_rate": 5e-05, + "epoch": 0.31824294038547735, + "step": 4970 + }, + { + "loss": 2.2789, + "grad_norm": 1.76790189743042, + "learning_rate": 5e-05, + "epoch": 0.31856310430940643, + "step": 4975 + }, + { + "loss": 2.2821, + "grad_norm": 1.7413114309310913, + "learning_rate": 5e-05, + "epoch": 0.31888326823333546, + "step": 4980 + }, + { + "loss": 2.2802, + "grad_norm": 1.720826268196106, + "learning_rate": 5e-05, + "epoch": 0.31920343215726454, + "step": 4985 + }, + { + "loss": 2.3128, + "grad_norm": 1.6995984315872192, + "learning_rate": 5e-05, + "epoch": 0.3195235960811936, + "step": 4990 + }, + { + "loss": 2.3075, + "grad_norm": 1.8378366231918335, + "learning_rate": 5e-05, + "epoch": 0.3198437600051226, + "step": 4995 + }, + { + "loss": 2.3002, + "grad_norm": 1.812118411064148, + "learning_rate": 5e-05, + "epoch": 0.3201639239290517, + "step": 5000 + }, + { + "eval_loss": 2.1544718742370605, + "eval_runtime": 9.2883, + "eval_samples_per_second": 220.493, + "eval_steps_per_second": 27.562, + "epoch": 0.3201639239290517, + "step": 5000 + }, + { + "loss": 2.2662, + "grad_norm": 1.87315833568573, + "learning_rate": 5e-05, + "epoch": 0.3204840878529807, + "step": 5005 + }, + { + "loss": 2.3016, + "grad_norm": 1.7631300687789917, + "learning_rate": 5e-05, + "epoch": 0.3208042517769098, + "step": 5010 + }, + { + "loss": 2.3032, + "grad_norm": 1.8889778852462769, + "learning_rate": 5e-05, + "epoch": 0.32112441570083883, + "step": 5015 + }, + { + "loss": 2.3028, + "grad_norm": 1.7224068641662598, + "learning_rate": 5e-05, + "epoch": 0.32144457962476786, + "step": 5020 + }, + { + "loss": 2.2573, + "grad_norm": 1.7411279678344727, + "learning_rate": 5e-05, + "epoch": 0.32176474354869694, + "step": 5025 + }, + { + "loss": 2.2797, + "grad_norm": 2.0215229988098145, + "learning_rate": 5e-05, + "epoch": 0.32208490747262597, + "step": 5030 + }, + { + "loss": 2.2683, + "grad_norm": 1.7024788856506348, + "learning_rate": 5e-05, + "epoch": 0.32240507139655505, + "step": 5035 + }, + { + "loss": 2.2709, + "grad_norm": 1.871773600578308, + "learning_rate": 5e-05, + "epoch": 0.3227252353204841, + "step": 5040 + }, + { + "loss": 2.28, + "grad_norm": 1.672973394393921, + "learning_rate": 5e-05, + "epoch": 0.32304539924441317, + "step": 5045 + }, + { + "loss": 2.2829, + "grad_norm": 1.955171823501587, + "learning_rate": 5e-05, + "epoch": 0.3233655631683422, + "step": 5050 + }, + { + "loss": 2.2957, + "grad_norm": 1.820365071296692, + "learning_rate": 5e-05, + "epoch": 0.3236857270922712, + "step": 5055 + }, + { + "loss": 2.2904, + "grad_norm": 1.8486545085906982, + "learning_rate": 5e-05, + "epoch": 0.3240058910162003, + "step": 5060 + }, + { + "loss": 2.297, + "grad_norm": 1.7132619619369507, + "learning_rate": 5e-05, + "epoch": 0.32432605494012934, + "step": 5065 + }, + { + "loss": 2.2782, + "grad_norm": 1.8040876388549805, + "learning_rate": 5e-05, + "epoch": 0.3246462188640584, + "step": 5070 + }, + { + "loss": 2.2734, + "grad_norm": 1.7798943519592285, + "learning_rate": 5e-05, + "epoch": 0.32496638278798745, + "step": 5075 + }, + { + "loss": 2.2636, + "grad_norm": 1.7690693140029907, + "learning_rate": 5e-05, + "epoch": 0.3252865467119165, + "step": 5080 + }, + { + "loss": 2.2689, + "grad_norm": 1.793588638305664, + "learning_rate": 5e-05, + "epoch": 0.32560671063584556, + "step": 5085 + }, + { + "loss": 2.3057, + "grad_norm": 1.7516108751296997, + "learning_rate": 5e-05, + "epoch": 0.3259268745597746, + "step": 5090 + }, + { + "loss": 2.2766, + "grad_norm": 1.7585774660110474, + "learning_rate": 5e-05, + "epoch": 0.3262470384837037, + "step": 5095 + }, + { + "loss": 2.3102, + "grad_norm": 1.7661858797073364, + "learning_rate": 5e-05, + "epoch": 0.3265672024076327, + "step": 5100 + }, + { + "loss": 2.3072, + "grad_norm": 1.7506427764892578, + "learning_rate": 5e-05, + "epoch": 0.32688736633156174, + "step": 5105 + }, + { + "loss": 2.2912, + "grad_norm": 1.7459840774536133, + "learning_rate": 5e-05, + "epoch": 0.3272075302554908, + "step": 5110 + }, + { + "loss": 2.2875, + "grad_norm": 1.7619469165802002, + "learning_rate": 5e-05, + "epoch": 0.32752769417941985, + "step": 5115 + }, + { + "loss": 2.275, + "grad_norm": 1.7539411783218384, + "learning_rate": 5e-05, + "epoch": 0.32784785810334893, + "step": 5120 + }, + { + "loss": 2.2907, + "grad_norm": 1.726323127746582, + "learning_rate": 5e-05, + "epoch": 0.32816802202727796, + "step": 5125 + }, + { + "loss": 2.2491, + "grad_norm": 1.6898913383483887, + "learning_rate": 5e-05, + "epoch": 0.32848818595120705, + "step": 5130 + }, + { + "loss": 2.3008, + "grad_norm": 1.7721654176712036, + "learning_rate": 5e-05, + "epoch": 0.3288083498751361, + "step": 5135 + }, + { + "loss": 2.2871, + "grad_norm": 1.859742522239685, + "learning_rate": 5e-05, + "epoch": 0.3291285137990651, + "step": 5140 + }, + { + "loss": 2.2713, + "grad_norm": 1.8325496912002563, + "learning_rate": 5e-05, + "epoch": 0.3294486777229942, + "step": 5145 + }, + { + "loss": 2.2931, + "grad_norm": 1.845747709274292, + "learning_rate": 5e-05, + "epoch": 0.3297688416469232, + "step": 5150 + }, + { + "loss": 2.2746, + "grad_norm": 1.8495094776153564, + "learning_rate": 5e-05, + "epoch": 0.3300890055708523, + "step": 5155 + }, + { + "loss": 2.2889, + "grad_norm": 1.7559887170791626, + "learning_rate": 5e-05, + "epoch": 0.33040916949478133, + "step": 5160 + }, + { + "loss": 2.3083, + "grad_norm": 1.7825977802276611, + "learning_rate": 5e-05, + "epoch": 0.33072933341871036, + "step": 5165 + }, + { + "loss": 2.2705, + "grad_norm": 1.788145899772644, + "learning_rate": 5e-05, + "epoch": 0.33104949734263944, + "step": 5170 + }, + { + "loss": 2.2735, + "grad_norm": 1.755177617073059, + "learning_rate": 5e-05, + "epoch": 0.33136966126656847, + "step": 5175 + }, + { + "loss": 2.2677, + "grad_norm": 1.7675113677978516, + "learning_rate": 5e-05, + "epoch": 0.33168982519049756, + "step": 5180 + }, + { + "loss": 2.3125, + "grad_norm": 1.7566906213760376, + "learning_rate": 5e-05, + "epoch": 0.3320099891144266, + "step": 5185 + }, + { + "loss": 2.2741, + "grad_norm": 1.8251054286956787, + "learning_rate": 5e-05, + "epoch": 0.3323301530383556, + "step": 5190 + }, + { + "loss": 2.255, + "grad_norm": 1.815388560295105, + "learning_rate": 5e-05, + "epoch": 0.3326503169622847, + "step": 5195 + }, + { + "loss": 2.3014, + "grad_norm": 1.8509902954101562, + "learning_rate": 5e-05, + "epoch": 0.3329704808862137, + "step": 5200 + }, + { + "eval_loss": 2.1533737182617188, + "eval_runtime": 13.3544, + "eval_samples_per_second": 153.358, + "eval_steps_per_second": 19.17, + "epoch": 0.3329704808862137, + "step": 5200 + }, + { + "loss": 2.2773, + "grad_norm": 1.8529142141342163, + "learning_rate": 5e-05, + "epoch": 0.3332906448101428, + "step": 5205 + }, + { + "loss": 2.2882, + "grad_norm": 1.8580735921859741, + "learning_rate": 5e-05, + "epoch": 0.33361080873407184, + "step": 5210 + }, + { + "loss": 2.2652, + "grad_norm": 1.8027414083480835, + "learning_rate": 5e-05, + "epoch": 0.3339309726580009, + "step": 5215 + }, + { + "loss": 2.2714, + "grad_norm": 1.7679603099822998, + "learning_rate": 5e-05, + "epoch": 0.33425113658192995, + "step": 5220 + }, + { + "loss": 2.2793, + "grad_norm": 1.730897068977356, + "learning_rate": 5e-05, + "epoch": 0.334571300505859, + "step": 5225 + }, + { + "loss": 2.275, + "grad_norm": 1.7817909717559814, + "learning_rate": 5e-05, + "epoch": 0.33489146442978807, + "step": 5230 + }, + { + "loss": 2.27, + "grad_norm": 1.763421893119812, + "learning_rate": 5e-05, + "epoch": 0.3352116283537171, + "step": 5235 + }, + { + "loss": 2.2856, + "grad_norm": 1.7344940900802612, + "learning_rate": 5e-05, + "epoch": 0.3355317922776462, + "step": 5240 + }, + { + "loss": 2.2903, + "grad_norm": 1.8662790060043335, + "learning_rate": 5e-05, + "epoch": 0.3358519562015752, + "step": 5245 + }, + { + "loss": 2.2768, + "grad_norm": 1.7917147874832153, + "learning_rate": 5e-05, + "epoch": 0.33617212012550424, + "step": 5250 + }, + { + "loss": 2.2903, + "grad_norm": 1.734060525894165, + "learning_rate": 5e-05, + "epoch": 0.3364922840494333, + "step": 5255 + }, + { + "loss": 2.268, + "grad_norm": 1.7482142448425293, + "learning_rate": 5e-05, + "epoch": 0.33681244797336235, + "step": 5260 + }, + { + "loss": 2.2548, + "grad_norm": 1.819955825805664, + "learning_rate": 5e-05, + "epoch": 0.33713261189729143, + "step": 5265 + }, + { + "loss": 2.2723, + "grad_norm": 1.7646571397781372, + "learning_rate": 5e-05, + "epoch": 0.33745277582122046, + "step": 5270 + }, + { + "loss": 2.2785, + "grad_norm": 1.7525885105133057, + "learning_rate": 5e-05, + "epoch": 0.3377729397451495, + "step": 5275 + }, + { + "loss": 2.2877, + "grad_norm": 1.8543511629104614, + "learning_rate": 5e-05, + "epoch": 0.3380931036690786, + "step": 5280 + }, + { + "loss": 2.2604, + "grad_norm": 1.848857045173645, + "learning_rate": 5e-05, + "epoch": 0.3384132675930076, + "step": 5285 + }, + { + "loss": 2.2763, + "grad_norm": 1.8210421800613403, + "learning_rate": 5e-05, + "epoch": 0.3387334315169367, + "step": 5290 + }, + { + "loss": 2.2962, + "grad_norm": 1.717044472694397, + "learning_rate": 5e-05, + "epoch": 0.3390535954408657, + "step": 5295 + }, + { + "loss": 2.2777, + "grad_norm": 1.7110569477081299, + "learning_rate": 5e-05, + "epoch": 0.3393737593647948, + "step": 5300 + }, + { + "loss": 2.2395, + "grad_norm": 1.7508400678634644, + "learning_rate": 5e-05, + "epoch": 0.33969392328872383, + "step": 5305 + }, + { + "loss": 2.2731, + "grad_norm": 1.8925203084945679, + "learning_rate": 5e-05, + "epoch": 0.34001408721265286, + "step": 5310 + }, + { + "loss": 2.3024, + "grad_norm": 1.7870714664459229, + "learning_rate": 5e-05, + "epoch": 0.34033425113658194, + "step": 5315 + }, + { + "loss": 2.2829, + "grad_norm": 1.744795799255371, + "learning_rate": 5e-05, + "epoch": 0.340654415060511, + "step": 5320 + }, + { + "loss": 2.2815, + "grad_norm": 1.7675684690475464, + "learning_rate": 5e-05, + "epoch": 0.34097457898444006, + "step": 5325 + }, + { + "loss": 2.2803, + "grad_norm": 1.8785274028778076, + "learning_rate": 5e-05, + "epoch": 0.3412947429083691, + "step": 5330 + }, + { + "loss": 2.2804, + "grad_norm": 1.818994402885437, + "learning_rate": 5e-05, + "epoch": 0.3416149068322981, + "step": 5335 + }, + { + "loss": 2.3124, + "grad_norm": 1.9585684537887573, + "learning_rate": 5e-05, + "epoch": 0.3419350707562272, + "step": 5340 + }, + { + "loss": 2.2672, + "grad_norm": 1.770952820777893, + "learning_rate": 5e-05, + "epoch": 0.34225523468015623, + "step": 5345 + }, + { + "loss": 2.2677, + "grad_norm": 1.6577550172805786, + "learning_rate": 5e-05, + "epoch": 0.3425753986040853, + "step": 5350 + }, + { + "loss": 2.2676, + "grad_norm": 1.9661815166473389, + "learning_rate": 5e-05, + "epoch": 0.34289556252801434, + "step": 5355 + }, + { + "loss": 2.2868, + "grad_norm": 1.7929357290267944, + "learning_rate": 5e-05, + "epoch": 0.34321572645194337, + "step": 5360 + }, + { + "loss": 2.2842, + "grad_norm": 1.8188828229904175, + "learning_rate": 5e-05, + "epoch": 0.34353589037587245, + "step": 5365 + }, + { + "loss": 2.2513, + "grad_norm": 1.781710147857666, + "learning_rate": 5e-05, + "epoch": 0.3438560542998015, + "step": 5370 + }, + { + "loss": 2.2664, + "grad_norm": 1.8323997259140015, + "learning_rate": 5e-05, + "epoch": 0.34417621822373057, + "step": 5375 + }, + { + "loss": 2.2698, + "grad_norm": 1.8200249671936035, + "learning_rate": 5e-05, + "epoch": 0.3444963821476596, + "step": 5380 + }, + { + "loss": 2.2822, + "grad_norm": 1.749212622642517, + "learning_rate": 5e-05, + "epoch": 0.3448165460715887, + "step": 5385 + }, + { + "loss": 2.2588, + "grad_norm": 2.007263422012329, + "learning_rate": 5e-05, + "epoch": 0.3451367099955177, + "step": 5390 + }, + { + "loss": 2.2757, + "grad_norm": 1.8585344552993774, + "learning_rate": 5e-05, + "epoch": 0.34545687391944674, + "step": 5395 + }, + { + "loss": 2.2768, + "grad_norm": 1.923609733581543, + "learning_rate": 5e-05, + "epoch": 0.3457770378433758, + "step": 5400 + }, + { + "eval_loss": 2.1260647773742676, + "eval_runtime": 9.2705, + "eval_samples_per_second": 220.916, + "eval_steps_per_second": 27.615, + "epoch": 0.3457770378433758, + "step": 5400 + }, + { + "loss": 2.291, + "grad_norm": 1.8628069162368774, + "learning_rate": 5e-05, + "epoch": 0.34609720176730485, + "step": 5405 + }, + { + "loss": 2.276, + "grad_norm": 1.712012529373169, + "learning_rate": 5e-05, + "epoch": 0.34641736569123394, + "step": 5410 + }, + { + "loss": 2.2774, + "grad_norm": 1.8341697454452515, + "learning_rate": 5e-05, + "epoch": 0.34673752961516296, + "step": 5415 + }, + { + "loss": 2.2809, + "grad_norm": 1.7650182247161865, + "learning_rate": 5e-05, + "epoch": 0.347057693539092, + "step": 5420 + }, + { + "loss": 2.2893, + "grad_norm": 1.7278627157211304, + "learning_rate": 5e-05, + "epoch": 0.3473778574630211, + "step": 5425 + }, + { + "loss": 2.2507, + "grad_norm": 1.6427825689315796, + "learning_rate": 5e-05, + "epoch": 0.3476980213869501, + "step": 5430 + }, + { + "loss": 2.2821, + "grad_norm": 1.7547065019607544, + "learning_rate": 5e-05, + "epoch": 0.3480181853108792, + "step": 5435 + }, + { + "loss": 2.2893, + "grad_norm": 1.7182886600494385, + "learning_rate": 5e-05, + "epoch": 0.3483383492348082, + "step": 5440 + }, + { + "loss": 2.2876, + "grad_norm": 1.84799063205719, + "learning_rate": 5e-05, + "epoch": 0.34865851315873725, + "step": 5445 + }, + { + "loss": 2.2712, + "grad_norm": 1.6814558506011963, + "learning_rate": 5e-05, + "epoch": 0.34897867708266633, + "step": 5450 + }, + { + "loss": 2.29, + "grad_norm": 1.881182074546814, + "learning_rate": 5e-05, + "epoch": 0.34929884100659536, + "step": 5455 + }, + { + "loss": 2.2206, + "grad_norm": 1.8951886892318726, + "learning_rate": 5e-05, + "epoch": 0.34961900493052445, + "step": 5460 + }, + { + "loss": 2.2433, + "grad_norm": 1.8026858568191528, + "learning_rate": 5e-05, + "epoch": 0.3499391688544535, + "step": 5465 + }, + { + "loss": 2.2556, + "grad_norm": 1.7248677015304565, + "learning_rate": 5e-05, + "epoch": 0.35025933277838256, + "step": 5470 + }, + { + "loss": 2.2637, + "grad_norm": 1.7217531204223633, + "learning_rate": 5e-05, + "epoch": 0.3505794967023116, + "step": 5475 + }, + { + "loss": 2.2854, + "grad_norm": 1.7552876472473145, + "learning_rate": 5e-05, + "epoch": 0.3508996606262406, + "step": 5480 + }, + { + "loss": 2.262, + "grad_norm": 1.7518340349197388, + "learning_rate": 5e-05, + "epoch": 0.3512198245501697, + "step": 5485 + }, + { + "loss": 2.2613, + "grad_norm": 1.8185194730758667, + "learning_rate": 5e-05, + "epoch": 0.35153998847409873, + "step": 5490 + }, + { + "loss": 2.2867, + "grad_norm": 1.7345361709594727, + "learning_rate": 5e-05, + "epoch": 0.3518601523980278, + "step": 5495 + }, + { + "loss": 2.2498, + "grad_norm": 1.7367279529571533, + "learning_rate": 5e-05, + "epoch": 0.35218031632195684, + "step": 5500 + }, + { + "loss": 2.3032, + "grad_norm": 1.7460354566574097, + "learning_rate": 5e-05, + "epoch": 0.35250048024588587, + "step": 5505 + }, + { + "loss": 2.279, + "grad_norm": 1.6836531162261963, + "learning_rate": 5e-05, + "epoch": 0.35282064416981496, + "step": 5510 + }, + { + "loss": 2.2791, + "grad_norm": 1.7619463205337524, + "learning_rate": 5e-05, + "epoch": 0.353140808093744, + "step": 5515 + }, + { + "loss": 2.2974, + "grad_norm": 1.8177152872085571, + "learning_rate": 5e-05, + "epoch": 0.35346097201767307, + "step": 5520 + }, + { + "loss": 2.3094, + "grad_norm": 1.7507604360580444, + "learning_rate": 5e-05, + "epoch": 0.3537811359416021, + "step": 5525 + }, + { + "loss": 2.2745, + "grad_norm": 1.7359153032302856, + "learning_rate": 5e-05, + "epoch": 0.3541012998655311, + "step": 5530 + }, + { + "loss": 2.2713, + "grad_norm": 1.7324638366699219, + "learning_rate": 5e-05, + "epoch": 0.3544214637894602, + "step": 5535 + }, + { + "loss": 2.3063, + "grad_norm": 1.7245142459869385, + "learning_rate": 5e-05, + "epoch": 0.35474162771338924, + "step": 5540 + }, + { + "loss": 2.2258, + "grad_norm": 1.699273943901062, + "learning_rate": 5e-05, + "epoch": 0.3550617916373183, + "step": 5545 + }, + { + "loss": 2.2459, + "grad_norm": 1.653936505317688, + "learning_rate": 5e-05, + "epoch": 0.35538195556124735, + "step": 5550 + }, + { + "loss": 2.2984, + "grad_norm": 1.7689787149429321, + "learning_rate": 5e-05, + "epoch": 0.35570211948517644, + "step": 5555 + }, + { + "loss": 2.2585, + "grad_norm": 1.693535327911377, + "learning_rate": 5e-05, + "epoch": 0.35602228340910547, + "step": 5560 + }, + { + "loss": 2.2832, + "grad_norm": 1.801584243774414, + "learning_rate": 5e-05, + "epoch": 0.3563424473330345, + "step": 5565 + }, + { + "loss": 2.3029, + "grad_norm": 1.8008770942687988, + "learning_rate": 5e-05, + "epoch": 0.3566626112569636, + "step": 5570 + }, + { + "loss": 2.3078, + "grad_norm": 1.7314320802688599, + "learning_rate": 5e-05, + "epoch": 0.3569827751808926, + "step": 5575 + }, + { + "loss": 2.2484, + "grad_norm": 1.7883455753326416, + "learning_rate": 5e-05, + "epoch": 0.3573029391048217, + "step": 5580 + }, + { + "loss": 2.2453, + "grad_norm": 1.7167059183120728, + "learning_rate": 5e-05, + "epoch": 0.3576231030287507, + "step": 5585 + }, + { + "loss": 2.2577, + "grad_norm": 1.7459754943847656, + "learning_rate": 5e-05, + "epoch": 0.35794326695267975, + "step": 5590 + }, + { + "loss": 2.2555, + "grad_norm": 1.783430576324463, + "learning_rate": 5e-05, + "epoch": 0.35826343087660883, + "step": 5595 + }, + { + "loss": 2.2625, + "grad_norm": 1.8094017505645752, + "learning_rate": 5e-05, + "epoch": 0.35858359480053786, + "step": 5600 + }, + { + "eval_loss": 2.1376986503601074, + "eval_runtime": 9.2886, + "eval_samples_per_second": 220.484, + "eval_steps_per_second": 27.561, + "epoch": 0.35858359480053786, + "step": 5600 + }, + { + "loss": 2.2669, + "grad_norm": 1.796750783920288, + "learning_rate": 5e-05, + "epoch": 0.35890375872446695, + "step": 5605 + }, + { + "loss": 2.2834, + "grad_norm": 1.7852609157562256, + "learning_rate": 5e-05, + "epoch": 0.359223922648396, + "step": 5610 + }, + { + "loss": 2.2566, + "grad_norm": 1.7483196258544922, + "learning_rate": 5e-05, + "epoch": 0.359544086572325, + "step": 5615 + }, + { + "loss": 2.2819, + "grad_norm": 1.7469184398651123, + "learning_rate": 5e-05, + "epoch": 0.3598642504962541, + "step": 5620 + }, + { + "loss": 2.253, + "grad_norm": 1.787428855895996, + "learning_rate": 5e-05, + "epoch": 0.3601844144201831, + "step": 5625 + }, + { + "loss": 2.2558, + "grad_norm": 1.7420175075531006, + "learning_rate": 5e-05, + "epoch": 0.3605045783441122, + "step": 5630 + }, + { + "loss": 2.2465, + "grad_norm": 1.676102638244629, + "learning_rate": 5e-05, + "epoch": 0.36082474226804123, + "step": 5635 + }, + { + "loss": 2.2594, + "grad_norm": 1.754003882408142, + "learning_rate": 5e-05, + "epoch": 0.3611449061919703, + "step": 5640 + }, + { + "loss": 2.2558, + "grad_norm": 1.7780991792678833, + "learning_rate": 5e-05, + "epoch": 0.36146507011589935, + "step": 5645 + }, + { + "loss": 2.2613, + "grad_norm": 1.7494131326675415, + "learning_rate": 5e-05, + "epoch": 0.3617852340398284, + "step": 5650 + }, + { + "loss": 2.2511, + "grad_norm": 1.8119771480560303, + "learning_rate": 5e-05, + "epoch": 0.36210539796375746, + "step": 5655 + }, + { + "loss": 2.2891, + "grad_norm": 1.8024489879608154, + "learning_rate": 5e-05, + "epoch": 0.3624255618876865, + "step": 5660 + }, + { + "loss": 2.2737, + "grad_norm": 1.7026606798171997, + "learning_rate": 5e-05, + "epoch": 0.36274572581161557, + "step": 5665 + }, + { + "loss": 2.3008, + "grad_norm": 1.7064659595489502, + "learning_rate": 5e-05, + "epoch": 0.3630658897355446, + "step": 5670 + }, + { + "loss": 2.2796, + "grad_norm": 1.7445411682128906, + "learning_rate": 5e-05, + "epoch": 0.36338605365947363, + "step": 5675 + }, + { + "loss": 2.2406, + "grad_norm": 1.7404433488845825, + "learning_rate": 5e-05, + "epoch": 0.3637062175834027, + "step": 5680 + }, + { + "loss": 2.2742, + "grad_norm": 1.6843476295471191, + "learning_rate": 5e-05, + "epoch": 0.36402638150733174, + "step": 5685 + }, + { + "loss": 2.2763, + "grad_norm": 1.8461291790008545, + "learning_rate": 5e-05, + "epoch": 0.3643465454312608, + "step": 5690 + }, + { + "loss": 2.2594, + "grad_norm": 1.7500439882278442, + "learning_rate": 5e-05, + "epoch": 0.36466670935518986, + "step": 5695 + }, + { + "loss": 2.2495, + "grad_norm": 1.7546688318252563, + "learning_rate": 5e-05, + "epoch": 0.3649868732791189, + "step": 5700 + }, + { + "loss": 2.268, + "grad_norm": 1.7128827571868896, + "learning_rate": 5e-05, + "epoch": 0.36530703720304797, + "step": 5705 + }, + { + "loss": 2.2716, + "grad_norm": 1.8002029657363892, + "learning_rate": 5e-05, + "epoch": 0.365627201126977, + "step": 5710 + }, + { + "loss": 2.2689, + "grad_norm": 1.7871887683868408, + "learning_rate": 5e-05, + "epoch": 0.3659473650509061, + "step": 5715 + }, + { + "loss": 2.2444, + "grad_norm": 1.801291584968567, + "learning_rate": 5e-05, + "epoch": 0.3662675289748351, + "step": 5720 + }, + { + "loss": 2.2788, + "grad_norm": 1.8185909986495972, + "learning_rate": 5e-05, + "epoch": 0.3665876928987642, + "step": 5725 + }, + { + "loss": 2.2746, + "grad_norm": 1.7295774221420288, + "learning_rate": 5e-05, + "epoch": 0.3669078568226932, + "step": 5730 + }, + { + "loss": 2.2953, + "grad_norm": 1.7250750064849854, + "learning_rate": 5e-05, + "epoch": 0.36722802074662225, + "step": 5735 + }, + { + "loss": 2.2689, + "grad_norm": 1.7358938455581665, + "learning_rate": 5e-05, + "epoch": 0.36754818467055134, + "step": 5740 + }, + { + "loss": 2.2594, + "grad_norm": 1.7297829389572144, + "learning_rate": 5e-05, + "epoch": 0.36786834859448037, + "step": 5745 + }, + { + "loss": 2.2729, + "grad_norm": 1.788424015045166, + "learning_rate": 5e-05, + "epoch": 0.36818851251840945, + "step": 5750 + }, + { + "loss": 2.2442, + "grad_norm": 1.873340368270874, + "learning_rate": 5e-05, + "epoch": 0.3685086764423385, + "step": 5755 + }, + { + "loss": 2.249, + "grad_norm": 1.7489144802093506, + "learning_rate": 5e-05, + "epoch": 0.3688288403662675, + "step": 5760 + }, + { + "loss": 2.2845, + "grad_norm": 1.7094459533691406, + "learning_rate": 5e-05, + "epoch": 0.3691490042901966, + "step": 5765 + }, + { + "loss": 2.2626, + "grad_norm": 1.7959952354431152, + "learning_rate": 5e-05, + "epoch": 0.3694691682141256, + "step": 5770 + }, + { + "loss": 2.261, + "grad_norm": 1.7271146774291992, + "learning_rate": 5e-05, + "epoch": 0.3697893321380547, + "step": 5775 + }, + { + "loss": 2.2694, + "grad_norm": 1.7205613851547241, + "learning_rate": 5e-05, + "epoch": 0.37010949606198373, + "step": 5780 + }, + { + "loss": 2.2703, + "grad_norm": 1.6520004272460938, + "learning_rate": 5e-05, + "epoch": 0.37042965998591276, + "step": 5785 + }, + { + "loss": 2.2531, + "grad_norm": 1.6878688335418701, + "learning_rate": 5e-05, + "epoch": 0.37074982390984185, + "step": 5790 + }, + { + "loss": 2.2859, + "grad_norm": 1.7531139850616455, + "learning_rate": 5e-05, + "epoch": 0.3710699878337709, + "step": 5795 + }, + { + "loss": 2.2609, + "grad_norm": 1.7352375984191895, + "learning_rate": 5e-05, + "epoch": 0.37139015175769996, + "step": 5800 + }, + { + "eval_loss": 2.1418533325195312, + "eval_runtime": 12.9494, + "eval_samples_per_second": 158.154, + "eval_steps_per_second": 19.769, + "epoch": 0.37139015175769996, + "step": 5800 + }, + { + "loss": 2.278, + "grad_norm": 1.7465990781784058, + "learning_rate": 5e-05, + "epoch": 0.371710315681629, + "step": 5805 + }, + { + "loss": 2.2576, + "grad_norm": 1.6427454948425293, + "learning_rate": 5e-05, + "epoch": 0.3720304796055581, + "step": 5810 + }, + { + "loss": 2.2879, + "grad_norm": 1.6827012300491333, + "learning_rate": 5e-05, + "epoch": 0.3723506435294871, + "step": 5815 + }, + { + "loss": 2.2817, + "grad_norm": 1.6802785396575928, + "learning_rate": 5e-05, + "epoch": 0.37267080745341613, + "step": 5820 + }, + { + "loss": 2.2624, + "grad_norm": 1.764146089553833, + "learning_rate": 5e-05, + "epoch": 0.3729909713773452, + "step": 5825 + }, + { + "loss": 2.2748, + "grad_norm": 1.7563925981521606, + "learning_rate": 5e-05, + "epoch": 0.37331113530127424, + "step": 5830 + }, + { + "loss": 2.2679, + "grad_norm": 1.7826206684112549, + "learning_rate": 5e-05, + "epoch": 0.37363129922520333, + "step": 5835 + }, + { + "loss": 2.2631, + "grad_norm": 1.7394565343856812, + "learning_rate": 5e-05, + "epoch": 0.37395146314913236, + "step": 5840 + }, + { + "loss": 2.2668, + "grad_norm": 1.702976942062378, + "learning_rate": 5e-05, + "epoch": 0.3742716270730614, + "step": 5845 + }, + { + "loss": 2.2615, + "grad_norm": 1.8101780414581299, + "learning_rate": 5e-05, + "epoch": 0.37459179099699047, + "step": 5850 + }, + { + "loss": 2.2914, + "grad_norm": 1.7696033716201782, + "learning_rate": 5e-05, + "epoch": 0.3749119549209195, + "step": 5855 + }, + { + "loss": 2.2536, + "grad_norm": 1.7243146896362305, + "learning_rate": 5e-05, + "epoch": 0.3752321188448486, + "step": 5860 + }, + { + "loss": 2.2839, + "grad_norm": 1.707695722579956, + "learning_rate": 5e-05, + "epoch": 0.3755522827687776, + "step": 5865 + }, + { + "loss": 2.2604, + "grad_norm": 1.650211215019226, + "learning_rate": 5e-05, + "epoch": 0.37587244669270664, + "step": 5870 + }, + { + "loss": 2.258, + "grad_norm": 1.6194339990615845, + "learning_rate": 5e-05, + "epoch": 0.3761926106166357, + "step": 5875 + }, + { + "loss": 2.2542, + "grad_norm": 1.7091882228851318, + "learning_rate": 5e-05, + "epoch": 0.37651277454056475, + "step": 5880 + }, + { + "loss": 2.2661, + "grad_norm": 1.733975887298584, + "learning_rate": 5e-05, + "epoch": 0.37683293846449384, + "step": 5885 + }, + { + "loss": 2.2536, + "grad_norm": 1.7769482135772705, + "learning_rate": 5e-05, + "epoch": 0.37715310238842287, + "step": 5890 + }, + { + "loss": 2.2515, + "grad_norm": 1.7663599252700806, + "learning_rate": 5e-05, + "epoch": 0.37747326631235195, + "step": 5895 + }, + { + "loss": 2.2979, + "grad_norm": 1.6576106548309326, + "learning_rate": 5e-05, + "epoch": 0.377793430236281, + "step": 5900 + }, + { + "loss": 2.2524, + "grad_norm": 1.838011384010315, + "learning_rate": 5e-05, + "epoch": 0.37811359416021, + "step": 5905 + }, + { + "loss": 2.2539, + "grad_norm": 1.7713699340820312, + "learning_rate": 5e-05, + "epoch": 0.3784337580841391, + "step": 5910 + }, + { + "loss": 2.2644, + "grad_norm": 1.765184760093689, + "learning_rate": 5e-05, + "epoch": 0.3787539220080681, + "step": 5915 + }, + { + "loss": 2.2128, + "grad_norm": 1.70463228225708, + "learning_rate": 5e-05, + "epoch": 0.3790740859319972, + "step": 5920 + }, + { + "loss": 2.2651, + "grad_norm": 1.689228892326355, + "learning_rate": 5e-05, + "epoch": 0.37939424985592624, + "step": 5925 + }, + { + "loss": 2.2368, + "grad_norm": 1.7535511255264282, + "learning_rate": 5e-05, + "epoch": 0.37971441377985526, + "step": 5930 + }, + { + "loss": 2.2507, + "grad_norm": 1.768235445022583, + "learning_rate": 5e-05, + "epoch": 0.38003457770378435, + "step": 5935 + }, + { + "loss": 2.2313, + "grad_norm": 1.7316277027130127, + "learning_rate": 5e-05, + "epoch": 0.3803547416277134, + "step": 5940 + }, + { + "loss": 2.2901, + "grad_norm": 1.7372463941574097, + "learning_rate": 5e-05, + "epoch": 0.38067490555164246, + "step": 5945 + }, + { + "loss": 2.2767, + "grad_norm": 1.8195472955703735, + "learning_rate": 5e-05, + "epoch": 0.3809950694755715, + "step": 5950 + }, + { + "loss": 2.2819, + "grad_norm": 1.7599300146102905, + "learning_rate": 5e-05, + "epoch": 0.3813152333995005, + "step": 5955 + }, + { + "loss": 2.2495, + "grad_norm": 1.763772964477539, + "learning_rate": 5e-05, + "epoch": 0.3816353973234296, + "step": 5960 + }, + { + "loss": 2.2689, + "grad_norm": 1.6997100114822388, + "learning_rate": 5e-05, + "epoch": 0.38195556124735863, + "step": 5965 + }, + { + "loss": 2.2784, + "grad_norm": 1.6891993284225464, + "learning_rate": 5e-05, + "epoch": 0.3822757251712877, + "step": 5970 + }, + { + "loss": 2.289, + "grad_norm": 1.7187187671661377, + "learning_rate": 5e-05, + "epoch": 0.38259588909521675, + "step": 5975 + }, + { + "loss": 2.2425, + "grad_norm": 1.6827722787857056, + "learning_rate": 5e-05, + "epoch": 0.38291605301914583, + "step": 5980 + }, + { + "loss": 2.2499, + "grad_norm": 1.6614289283752441, + "learning_rate": 5e-05, + "epoch": 0.38323621694307486, + "step": 5985 + }, + { + "loss": 2.2559, + "grad_norm": 1.7182048559188843, + "learning_rate": 5e-05, + "epoch": 0.3835563808670039, + "step": 5990 + }, + { + "loss": 2.2781, + "grad_norm": 1.7572559118270874, + "learning_rate": 5e-05, + "epoch": 0.38387654479093297, + "step": 5995 + }, + { + "loss": 2.26, + "grad_norm": 1.8257755041122437, + "learning_rate": 5e-05, + "epoch": 0.384196708714862, + "step": 6000 + }, + { + "eval_loss": 2.11156964302063, + "eval_runtime": 9.0951, + "eval_samples_per_second": 225.177, + "eval_steps_per_second": 28.147, + "epoch": 0.384196708714862, + "step": 6000 + }, + { + "loss": 2.235, + "grad_norm": 1.7780883312225342, + "learning_rate": 5e-05, + "epoch": 0.3845168726387911, + "step": 6005 + }, + { + "loss": 2.2434, + "grad_norm": 1.7068132162094116, + "learning_rate": 5e-05, + "epoch": 0.3848370365627201, + "step": 6010 + }, + { + "loss": 2.2748, + "grad_norm": 1.6712257862091064, + "learning_rate": 5e-05, + "epoch": 0.38515720048664914, + "step": 6015 + }, + { + "loss": 2.2457, + "grad_norm": 1.7343010902404785, + "learning_rate": 5e-05, + "epoch": 0.3854773644105782, + "step": 6020 + }, + { + "loss": 2.2694, + "grad_norm": 1.8106725215911865, + "learning_rate": 5e-05, + "epoch": 0.38579752833450726, + "step": 6025 + }, + { + "loss": 2.2828, + "grad_norm": 1.7711716890335083, + "learning_rate": 5e-05, + "epoch": 0.38611769225843634, + "step": 6030 + }, + { + "loss": 2.259, + "grad_norm": 1.7112571001052856, + "learning_rate": 5e-05, + "epoch": 0.38643785618236537, + "step": 6035 + }, + { + "loss": 2.2748, + "grad_norm": 1.7668615579605103, + "learning_rate": 5e-05, + "epoch": 0.3867580201062944, + "step": 6040 + }, + { + "loss": 2.2853, + "grad_norm": 1.67672598361969, + "learning_rate": 5e-05, + "epoch": 0.3870781840302235, + "step": 6045 + }, + { + "loss": 2.2871, + "grad_norm": 1.7809470891952515, + "learning_rate": 5e-05, + "epoch": 0.3873983479541525, + "step": 6050 + }, + { + "loss": 2.2709, + "grad_norm": 1.785502552986145, + "learning_rate": 5e-05, + "epoch": 0.3877185118780816, + "step": 6055 + }, + { + "loss": 2.2826, + "grad_norm": 1.725252389907837, + "learning_rate": 5e-05, + "epoch": 0.3880386758020106, + "step": 6060 + }, + { + "loss": 2.2652, + "grad_norm": 1.7655749320983887, + "learning_rate": 5e-05, + "epoch": 0.3883588397259397, + "step": 6065 + }, + { + "loss": 2.2745, + "grad_norm": 1.845263123512268, + "learning_rate": 5e-05, + "epoch": 0.38867900364986874, + "step": 6070 + }, + { + "loss": 2.2355, + "grad_norm": 1.7035220861434937, + "learning_rate": 5e-05, + "epoch": 0.38899916757379777, + "step": 6075 + }, + { + "loss": 2.2324, + "grad_norm": 1.7222847938537598, + "learning_rate": 5e-05, + "epoch": 0.38931933149772685, + "step": 6080 + }, + { + "loss": 2.2685, + "grad_norm": 1.8165398836135864, + "learning_rate": 5e-05, + "epoch": 0.3896394954216559, + "step": 6085 + }, + { + "loss": 2.2682, + "grad_norm": 1.6270705461502075, + "learning_rate": 5e-05, + "epoch": 0.38995965934558496, + "step": 6090 + }, + { + "loss": 2.2508, + "grad_norm": 1.7037124633789062, + "learning_rate": 5e-05, + "epoch": 0.390279823269514, + "step": 6095 + }, + { + "loss": 2.2543, + "grad_norm": 1.648330569267273, + "learning_rate": 5e-05, + "epoch": 0.390599987193443, + "step": 6100 + }, + { + "loss": 2.2578, + "grad_norm": 1.7594226598739624, + "learning_rate": 5e-05, + "epoch": 0.3909201511173721, + "step": 6105 + }, + { + "loss": 2.223, + "grad_norm": 1.8519033193588257, + "learning_rate": 5e-05, + "epoch": 0.39124031504130113, + "step": 6110 + }, + { + "loss": 2.239, + "grad_norm": 1.7203348875045776, + "learning_rate": 5e-05, + "epoch": 0.3915604789652302, + "step": 6115 + }, + { + "loss": 2.2772, + "grad_norm": 1.6320827007293701, + "learning_rate": 5e-05, + "epoch": 0.39188064288915925, + "step": 6120 + }, + { + "loss": 2.2555, + "grad_norm": 1.7894231081008911, + "learning_rate": 5e-05, + "epoch": 0.3922008068130883, + "step": 6125 + }, + { + "loss": 2.2534, + "grad_norm": 1.8454432487487793, + "learning_rate": 5e-05, + "epoch": 0.39252097073701736, + "step": 6130 + }, + { + "loss": 2.2645, + "grad_norm": 1.7246161699295044, + "learning_rate": 5e-05, + "epoch": 0.3928411346609464, + "step": 6135 + }, + { + "loss": 2.2586, + "grad_norm": 1.765830159187317, + "learning_rate": 5e-05, + "epoch": 0.3931612985848755, + "step": 6140 + }, + { + "loss": 2.2669, + "grad_norm": 1.6727474927902222, + "learning_rate": 5e-05, + "epoch": 0.3934814625088045, + "step": 6145 + }, + { + "loss": 2.2517, + "grad_norm": 1.69596529006958, + "learning_rate": 5e-05, + "epoch": 0.3938016264327336, + "step": 6150 + }, + { + "loss": 2.2718, + "grad_norm": 1.6889044046401978, + "learning_rate": 5e-05, + "epoch": 0.3941217903566626, + "step": 6155 + }, + { + "loss": 2.2556, + "grad_norm": 1.627752661705017, + "learning_rate": 5e-05, + "epoch": 0.39444195428059164, + "step": 6160 + }, + { + "loss": 2.2455, + "grad_norm": 1.7724320888519287, + "learning_rate": 5e-05, + "epoch": 0.39476211820452073, + "step": 6165 + }, + { + "loss": 2.2798, + "grad_norm": 1.7310431003570557, + "learning_rate": 5e-05, + "epoch": 0.39508228212844976, + "step": 6170 + }, + { + "loss": 2.24, + "grad_norm": 1.7662495374679565, + "learning_rate": 5e-05, + "epoch": 0.39540244605237884, + "step": 6175 + }, + { + "loss": 2.2278, + "grad_norm": 1.6548069715499878, + "learning_rate": 5e-05, + "epoch": 0.39572260997630787, + "step": 6180 + }, + { + "loss": 2.2691, + "grad_norm": 1.7314773797988892, + "learning_rate": 5e-05, + "epoch": 0.3960427739002369, + "step": 6185 + }, + { + "loss": 2.2485, + "grad_norm": 1.737084984779358, + "learning_rate": 5e-05, + "epoch": 0.396362937824166, + "step": 6190 + }, + { + "loss": 2.2649, + "grad_norm": 1.7657032012939453, + "learning_rate": 5e-05, + "epoch": 0.396683101748095, + "step": 6195 + }, + { + "loss": 2.2166, + "grad_norm": 1.7243016958236694, + "learning_rate": 5e-05, + "epoch": 0.3970032656720241, + "step": 6200 + }, + { + "eval_loss": 2.1302952766418457, + "eval_runtime": 13.3134, + "eval_samples_per_second": 153.83, + "eval_steps_per_second": 19.229, + "epoch": 0.3970032656720241, + "step": 6200 + }, + { + "loss": 2.2638, + "grad_norm": 1.847778081893921, + "learning_rate": 5e-05, + "epoch": 0.3973234295959531, + "step": 6205 + }, + { + "loss": 2.2624, + "grad_norm": 1.7355600595474243, + "learning_rate": 5e-05, + "epoch": 0.39764359351988215, + "step": 6210 + }, + { + "loss": 2.2611, + "grad_norm": 1.7112786769866943, + "learning_rate": 5e-05, + "epoch": 0.39796375744381124, + "step": 6215 + }, + { + "loss": 2.2695, + "grad_norm": 1.7076542377471924, + "learning_rate": 5e-05, + "epoch": 0.39828392136774027, + "step": 6220 + }, + { + "loss": 2.2448, + "grad_norm": 1.7070058584213257, + "learning_rate": 5e-05, + "epoch": 0.39860408529166935, + "step": 6225 + }, + { + "loss": 2.2445, + "grad_norm": 1.7254059314727783, + "learning_rate": 5e-05, + "epoch": 0.3989242492155984, + "step": 6230 + }, + { + "loss": 2.2435, + "grad_norm": 1.7010166645050049, + "learning_rate": 5e-05, + "epoch": 0.39924441313952747, + "step": 6235 + }, + { + "loss": 2.2417, + "grad_norm": 1.7349189519882202, + "learning_rate": 5e-05, + "epoch": 0.3995645770634565, + "step": 6240 + }, + { + "loss": 2.2544, + "grad_norm": 1.812296748161316, + "learning_rate": 5e-05, + "epoch": 0.3998847409873855, + "step": 6245 + }, + { + "loss": 2.2425, + "grad_norm": 1.7517497539520264, + "learning_rate": 5e-05, + "epoch": 0.4002049049113146, + "step": 6250 + }, + { + "loss": 2.2405, + "grad_norm": 1.7381399869918823, + "learning_rate": 5e-05, + "epoch": 0.40052506883524364, + "step": 6255 + }, + { + "loss": 2.2862, + "grad_norm": 1.7130184173583984, + "learning_rate": 5e-05, + "epoch": 0.4008452327591727, + "step": 6260 + }, + { + "loss": 2.2525, + "grad_norm": 1.766489028930664, + "learning_rate": 5e-05, + "epoch": 0.40116539668310175, + "step": 6265 + }, + { + "loss": 2.2372, + "grad_norm": 1.6739976406097412, + "learning_rate": 5e-05, + "epoch": 0.4014855606070308, + "step": 6270 + }, + { + "loss": 2.2145, + "grad_norm": 1.6726338863372803, + "learning_rate": 5e-05, + "epoch": 0.40180572453095986, + "step": 6275 + }, + { + "loss": 2.2622, + "grad_norm": 1.8587597608566284, + "learning_rate": 5e-05, + "epoch": 0.4021258884548889, + "step": 6280 + }, + { + "loss": 2.252, + "grad_norm": 1.7975720167160034, + "learning_rate": 5e-05, + "epoch": 0.402446052378818, + "step": 6285 + }, + { + "loss": 2.2645, + "grad_norm": 1.6847093105316162, + "learning_rate": 5e-05, + "epoch": 0.402766216302747, + "step": 6290 + }, + { + "loss": 2.213, + "grad_norm": 1.6540334224700928, + "learning_rate": 5e-05, + "epoch": 0.40308638022667603, + "step": 6295 + }, + { + "loss": 2.2244, + "grad_norm": 1.7274028062820435, + "learning_rate": 5e-05, + "epoch": 0.4034065441506051, + "step": 6300 + }, + { + "loss": 2.2298, + "grad_norm": 1.738688588142395, + "learning_rate": 5e-05, + "epoch": 0.40372670807453415, + "step": 6305 + }, + { + "loss": 2.2655, + "grad_norm": 1.7383339405059814, + "learning_rate": 5e-05, + "epoch": 0.40404687199846323, + "step": 6310 + }, + { + "loss": 2.2595, + "grad_norm": 1.6844770908355713, + "learning_rate": 5e-05, + "epoch": 0.40436703592239226, + "step": 6315 + }, + { + "loss": 2.2474, + "grad_norm": 1.6770930290222168, + "learning_rate": 5e-05, + "epoch": 0.40468719984632134, + "step": 6320 + }, + { + "loss": 2.2467, + "grad_norm": 1.775538682937622, + "learning_rate": 5e-05, + "epoch": 0.4050073637702504, + "step": 6325 + }, + { + "loss": 2.2238, + "grad_norm": 1.7302964925765991, + "learning_rate": 5e-05, + "epoch": 0.4053275276941794, + "step": 6330 + }, + { + "loss": 2.2545, + "grad_norm": 1.7631667852401733, + "learning_rate": 5e-05, + "epoch": 0.4056476916181085, + "step": 6335 + }, + { + "loss": 2.2462, + "grad_norm": 1.704640507698059, + "learning_rate": 5e-05, + "epoch": 0.4059678555420375, + "step": 6340 + }, + { + "loss": 2.2356, + "grad_norm": 1.849134087562561, + "learning_rate": 5e-05, + "epoch": 0.4062880194659666, + "step": 6345 + }, + { + "loss": 2.2713, + "grad_norm": 1.764115810394287, + "learning_rate": 5e-05, + "epoch": 0.4066081833898956, + "step": 6350 + }, + { + "loss": 2.268, + "grad_norm": 1.653106451034546, + "learning_rate": 5e-05, + "epoch": 0.40692834731382466, + "step": 6355 + }, + { + "loss": 2.2242, + "grad_norm": 1.816038727760315, + "learning_rate": 5e-05, + "epoch": 0.40724851123775374, + "step": 6360 + }, + { + "loss": 2.2568, + "grad_norm": 1.6303045749664307, + "learning_rate": 5e-05, + "epoch": 0.40756867516168277, + "step": 6365 + }, + { + "loss": 2.2756, + "grad_norm": 1.6868770122528076, + "learning_rate": 5e-05, + "epoch": 0.40788883908561185, + "step": 6370 + }, + { + "loss": 2.2556, + "grad_norm": 1.711913824081421, + "learning_rate": 5e-05, + "epoch": 0.4082090030095409, + "step": 6375 + }, + { + "loss": 2.2355, + "grad_norm": 1.6835819482803345, + "learning_rate": 5e-05, + "epoch": 0.4085291669334699, + "step": 6380 + }, + { + "loss": 2.2559, + "grad_norm": 1.7608588933944702, + "learning_rate": 5e-05, + "epoch": 0.408849330857399, + "step": 6385 + }, + { + "loss": 2.252, + "grad_norm": 1.6766396760940552, + "learning_rate": 5e-05, + "epoch": 0.409169494781328, + "step": 6390 + }, + { + "loss": 2.2475, + "grad_norm": 1.6831003427505493, + "learning_rate": 5e-05, + "epoch": 0.4094896587052571, + "step": 6395 + }, + { + "loss": 2.2639, + "grad_norm": 1.7193617820739746, + "learning_rate": 5e-05, + "epoch": 0.40980982262918614, + "step": 6400 + }, + { + "eval_loss": 2.11531400680542, + "eval_runtime": 9.5033, + "eval_samples_per_second": 215.505, + "eval_steps_per_second": 26.938, + "epoch": 0.40980982262918614, + "step": 6400 + }, + { + "loss": 2.2536, + "grad_norm": 1.6074965000152588, + "learning_rate": 5e-05, + "epoch": 0.4101299865531152, + "step": 6405 + }, + { + "loss": 2.2417, + "grad_norm": 1.6199990510940552, + "learning_rate": 5e-05, + "epoch": 0.41045015047704425, + "step": 6410 + }, + { + "loss": 2.2629, + "grad_norm": 1.6224853992462158, + "learning_rate": 5e-05, + "epoch": 0.4107703144009733, + "step": 6415 + }, + { + "loss": 2.2491, + "grad_norm": 1.779128909111023, + "learning_rate": 5e-05, + "epoch": 0.41109047832490236, + "step": 6420 + }, + { + "loss": 2.2301, + "grad_norm": 1.7013006210327148, + "learning_rate": 5e-05, + "epoch": 0.4114106422488314, + "step": 6425 + }, + { + "loss": 2.2517, + "grad_norm": 1.745300531387329, + "learning_rate": 5e-05, + "epoch": 0.4117308061727605, + "step": 6430 + }, + { + "loss": 2.2743, + "grad_norm": 1.670337438583374, + "learning_rate": 5e-05, + "epoch": 0.4120509700966895, + "step": 6435 + }, + { + "loss": 2.2511, + "grad_norm": 1.7760534286499023, + "learning_rate": 5e-05, + "epoch": 0.41237113402061853, + "step": 6440 + }, + { + "loss": 2.2577, + "grad_norm": 1.7097136974334717, + "learning_rate": 5e-05, + "epoch": 0.4126912979445476, + "step": 6445 + }, + { + "loss": 2.2357, + "grad_norm": 1.738032341003418, + "learning_rate": 5e-05, + "epoch": 0.41301146186847665, + "step": 6450 + }, + { + "loss": 2.2212, + "grad_norm": 1.6849381923675537, + "learning_rate": 5e-05, + "epoch": 0.41333162579240573, + "step": 6455 + }, + { + "loss": 2.233, + "grad_norm": 1.8453466892242432, + "learning_rate": 5e-05, + "epoch": 0.41365178971633476, + "step": 6460 + }, + { + "loss": 2.2687, + "grad_norm": 1.7124505043029785, + "learning_rate": 5e-05, + "epoch": 0.4139719536402638, + "step": 6465 + }, + { + "loss": 2.2342, + "grad_norm": 1.730618953704834, + "learning_rate": 5e-05, + "epoch": 0.4142921175641929, + "step": 6470 + }, + { + "loss": 2.2618, + "grad_norm": 1.7143526077270508, + "learning_rate": 5e-05, + "epoch": 0.4146122814881219, + "step": 6475 + }, + { + "loss": 2.267, + "grad_norm": 1.7568591833114624, + "learning_rate": 5e-05, + "epoch": 0.414932445412051, + "step": 6480 + }, + { + "loss": 2.2626, + "grad_norm": 1.663020372390747, + "learning_rate": 5e-05, + "epoch": 0.41525260933598, + "step": 6485 + }, + { + "loss": 2.2408, + "grad_norm": 1.6989688873291016, + "learning_rate": 5e-05, + "epoch": 0.4155727732599091, + "step": 6490 + }, + { + "loss": 2.2769, + "grad_norm": 1.648116946220398, + "learning_rate": 5e-05, + "epoch": 0.41589293718383813, + "step": 6495 + }, + { + "loss": 2.2234, + "grad_norm": 1.7310383319854736, + "learning_rate": 5e-05, + "epoch": 0.41621310110776716, + "step": 6500 + }, + { + "loss": 2.2395, + "grad_norm": 1.7397419214248657, + "learning_rate": 5e-05, + "epoch": 0.41653326503169624, + "step": 6505 + }, + { + "loss": 2.237, + "grad_norm": 1.7233692407608032, + "learning_rate": 5e-05, + "epoch": 0.41685342895562527, + "step": 6510 + }, + { + "loss": 2.2458, + "grad_norm": 1.7460954189300537, + "learning_rate": 5e-05, + "epoch": 0.41717359287955436, + "step": 6515 + }, + { + "loss": 2.2398, + "grad_norm": 1.770958423614502, + "learning_rate": 5e-05, + "epoch": 0.4174937568034834, + "step": 6520 + }, + { + "loss": 2.2217, + "grad_norm": 1.7674636840820312, + "learning_rate": 5e-05, + "epoch": 0.4178139207274124, + "step": 6525 + }, + { + "loss": 2.2629, + "grad_norm": 1.7418832778930664, + "learning_rate": 5e-05, + "epoch": 0.4181340846513415, + "step": 6530 + }, + { + "loss": 2.2547, + "grad_norm": 1.6848324537277222, + "learning_rate": 5e-05, + "epoch": 0.4184542485752705, + "step": 6535 + }, + { + "loss": 2.2029, + "grad_norm": 1.698730707168579, + "learning_rate": 5e-05, + "epoch": 0.4187744124991996, + "step": 6540 + }, + { + "loss": 2.2331, + "grad_norm": 1.7850102186203003, + "learning_rate": 5e-05, + "epoch": 0.41909457642312864, + "step": 6545 + }, + { + "loss": 2.2741, + "grad_norm": 1.8512533903121948, + "learning_rate": 5e-05, + "epoch": 0.41941474034705767, + "step": 6550 + }, + { + "loss": 2.2612, + "grad_norm": 1.7491039037704468, + "learning_rate": 5e-05, + "epoch": 0.41973490427098675, + "step": 6555 + }, + { + "loss": 2.2617, + "grad_norm": 1.7620813846588135, + "learning_rate": 5e-05, + "epoch": 0.4200550681949158, + "step": 6560 + }, + { + "loss": 2.2338, + "grad_norm": 1.7340549230575562, + "learning_rate": 5e-05, + "epoch": 0.42037523211884487, + "step": 6565 + }, + { + "loss": 2.2702, + "grad_norm": 1.6962077617645264, + "learning_rate": 5e-05, + "epoch": 0.4206953960427739, + "step": 6570 + }, + { + "loss": 2.2338, + "grad_norm": 1.6991527080535889, + "learning_rate": 5e-05, + "epoch": 0.421015559966703, + "step": 6575 + }, + { + "loss": 2.2522, + "grad_norm": 1.74476158618927, + "learning_rate": 5e-05, + "epoch": 0.421335723890632, + "step": 6580 + }, + { + "loss": 2.2406, + "grad_norm": 1.763519287109375, + "learning_rate": 5e-05, + "epoch": 0.42165588781456104, + "step": 6585 + }, + { + "loss": 2.2464, + "grad_norm": 1.675957202911377, + "learning_rate": 5e-05, + "epoch": 0.4219760517384901, + "step": 6590 + }, + { + "loss": 2.2329, + "grad_norm": 1.7178364992141724, + "learning_rate": 5e-05, + "epoch": 0.42229621566241915, + "step": 6595 + }, + { + "loss": 2.25, + "grad_norm": 1.843867301940918, + "learning_rate": 5e-05, + "epoch": 0.42261637958634823, + "step": 6600 + }, + { + "eval_loss": 2.110640048980713, + "eval_runtime": 9.6155, + "eval_samples_per_second": 212.989, + "eval_steps_per_second": 26.624, + "epoch": 0.42261637958634823, + "step": 6600 + }, + { + "loss": 2.2297, + "grad_norm": 1.850877046585083, + "learning_rate": 5e-05, + "epoch": 0.42293654351027726, + "step": 6605 + }, + { + "loss": 2.2479, + "grad_norm": 1.7398591041564941, + "learning_rate": 5e-05, + "epoch": 0.4232567074342063, + "step": 6610 + }, + { + "loss": 2.2746, + "grad_norm": 1.7509093284606934, + "learning_rate": 5e-05, + "epoch": 0.4235768713581354, + "step": 6615 + }, + { + "loss": 2.2588, + "grad_norm": 1.7495396137237549, + "learning_rate": 5e-05, + "epoch": 0.4238970352820644, + "step": 6620 + }, + { + "loss": 2.2274, + "grad_norm": 1.6394826173782349, + "learning_rate": 5e-05, + "epoch": 0.4242171992059935, + "step": 6625 + }, + { + "loss": 2.2229, + "grad_norm": 1.7712039947509766, + "learning_rate": 5e-05, + "epoch": 0.4245373631299225, + "step": 6630 + }, + { + "loss": 2.2761, + "grad_norm": 1.6803395748138428, + "learning_rate": 5e-05, + "epoch": 0.42485752705385155, + "step": 6635 + }, + { + "loss": 2.2622, + "grad_norm": 1.7649556398391724, + "learning_rate": 5e-05, + "epoch": 0.42517769097778063, + "step": 6640 + }, + { + "loss": 2.246, + "grad_norm": 1.8420475721359253, + "learning_rate": 5e-05, + "epoch": 0.42549785490170966, + "step": 6645 + }, + { + "loss": 2.2621, + "grad_norm": 1.8346716165542603, + "learning_rate": 5e-05, + "epoch": 0.42581801882563874, + "step": 6650 + }, + { + "loss": 2.2548, + "grad_norm": 1.6930170059204102, + "learning_rate": 5e-05, + "epoch": 0.4261381827495678, + "step": 6655 + }, + { + "loss": 2.2308, + "grad_norm": 1.7325392961502075, + "learning_rate": 5e-05, + "epoch": 0.42645834667349686, + "step": 6660 + }, + { + "loss": 2.2261, + "grad_norm": 1.6914280652999878, + "learning_rate": 5e-05, + "epoch": 0.4267785105974259, + "step": 6665 + }, + { + "loss": 2.2445, + "grad_norm": 1.7235634326934814, + "learning_rate": 5e-05, + "epoch": 0.4270986745213549, + "step": 6670 + }, + { + "loss": 2.2277, + "grad_norm": 1.6718071699142456, + "learning_rate": 5e-05, + "epoch": 0.427418838445284, + "step": 6675 + }, + { + "loss": 2.245, + "grad_norm": 1.6824864149093628, + "learning_rate": 5e-05, + "epoch": 0.42773900236921303, + "step": 6680 + }, + { + "loss": 2.2075, + "grad_norm": 1.6548774242401123, + "learning_rate": 5e-05, + "epoch": 0.4280591662931421, + "step": 6685 + }, + { + "loss": 2.2419, + "grad_norm": 1.6627106666564941, + "learning_rate": 5e-05, + "epoch": 0.42837933021707114, + "step": 6690 + }, + { + "loss": 2.2613, + "grad_norm": 1.6999781131744385, + "learning_rate": 5e-05, + "epoch": 0.42869949414100017, + "step": 6695 + }, + { + "loss": 2.2318, + "grad_norm": 1.8439428806304932, + "learning_rate": 5e-05, + "epoch": 0.42901965806492925, + "step": 6700 + }, + { + "loss": 2.2611, + "grad_norm": 1.7762128114700317, + "learning_rate": 5e-05, + "epoch": 0.4293398219888583, + "step": 6705 + }, + { + "loss": 2.2322, + "grad_norm": 1.7934534549713135, + "learning_rate": 5e-05, + "epoch": 0.42965998591278737, + "step": 6710 + }, + { + "loss": 2.2503, + "grad_norm": 1.6476908922195435, + "learning_rate": 5e-05, + "epoch": 0.4299801498367164, + "step": 6715 + }, + { + "loss": 2.2723, + "grad_norm": 1.757597804069519, + "learning_rate": 5e-05, + "epoch": 0.4303003137606454, + "step": 6720 + }, + { + "loss": 2.2294, + "grad_norm": 1.6976431608200073, + "learning_rate": 5e-05, + "epoch": 0.4306204776845745, + "step": 6725 + }, + { + "loss": 2.2551, + "grad_norm": 1.81328284740448, + "learning_rate": 5e-05, + "epoch": 0.43094064160850354, + "step": 6730 + }, + { + "loss": 2.2393, + "grad_norm": 1.7719358205795288, + "learning_rate": 5e-05, + "epoch": 0.4312608055324326, + "step": 6735 + }, + { + "loss": 2.2426, + "grad_norm": 1.8044530153274536, + "learning_rate": 5e-05, + "epoch": 0.43158096945636165, + "step": 6740 + }, + { + "loss": 2.2435, + "grad_norm": 1.760985255241394, + "learning_rate": 5e-05, + "epoch": 0.43190113338029074, + "step": 6745 + }, + { + "loss": 2.2433, + "grad_norm": 1.7239030599594116, + "learning_rate": 5e-05, + "epoch": 0.43222129730421976, + "step": 6750 + }, + { + "loss": 2.2013, + "grad_norm": 1.7211287021636963, + "learning_rate": 5e-05, + "epoch": 0.4325414612281488, + "step": 6755 + }, + { + "loss": 2.2658, + "grad_norm": 1.7456564903259277, + "learning_rate": 5e-05, + "epoch": 0.4328616251520779, + "step": 6760 + }, + { + "loss": 2.2225, + "grad_norm": 1.7644805908203125, + "learning_rate": 5e-05, + "epoch": 0.4331817890760069, + "step": 6765 + }, + { + "loss": 2.2455, + "grad_norm": 1.6574170589447021, + "learning_rate": 5e-05, + "epoch": 0.433501952999936, + "step": 6770 + }, + { + "loss": 2.2565, + "grad_norm": 1.673085331916809, + "learning_rate": 5e-05, + "epoch": 0.433822116923865, + "step": 6775 + }, + { + "loss": 2.2561, + "grad_norm": 1.7815220355987549, + "learning_rate": 5e-05, + "epoch": 0.43414228084779405, + "step": 6780 + }, + { + "loss": 2.2375, + "grad_norm": 1.7830764055252075, + "learning_rate": 5e-05, + "epoch": 0.43446244477172313, + "step": 6785 + }, + { + "loss": 2.2547, + "grad_norm": 1.7634798288345337, + "learning_rate": 5e-05, + "epoch": 0.43478260869565216, + "step": 6790 + }, + { + "loss": 2.2365, + "grad_norm": 1.771299123764038, + "learning_rate": 5e-05, + "epoch": 0.43510277261958125, + "step": 6795 + }, + { + "loss": 2.2699, + "grad_norm": 1.708333134651184, + "learning_rate": 5e-05, + "epoch": 0.4354229365435103, + "step": 6800 + }, + { + "eval_loss": 2.0987234115600586, + "eval_runtime": 9.3179, + "eval_samples_per_second": 219.792, + "eval_steps_per_second": 27.474, + "epoch": 0.4354229365435103, + "step": 6800 + }, + { + "loss": 2.2188, + "grad_norm": 1.8107043504714966, + "learning_rate": 5e-05, + "epoch": 0.4357431004674393, + "step": 6805 + }, + { + "loss": 2.2213, + "grad_norm": 1.707737922668457, + "learning_rate": 5e-05, + "epoch": 0.4360632643913684, + "step": 6810 + }, + { + "loss": 2.261, + "grad_norm": 1.8159151077270508, + "learning_rate": 5e-05, + "epoch": 0.4363834283152974, + "step": 6815 + }, + { + "loss": 2.2857, + "grad_norm": 1.6932034492492676, + "learning_rate": 5e-05, + "epoch": 0.4367035922392265, + "step": 6820 + }, + { + "loss": 2.222, + "grad_norm": 1.8024814128875732, + "learning_rate": 5e-05, + "epoch": 0.43702375616315553, + "step": 6825 + }, + { + "loss": 2.2387, + "grad_norm": 1.7472243309020996, + "learning_rate": 5e-05, + "epoch": 0.4373439200870846, + "step": 6830 + }, + { + "loss": 2.2135, + "grad_norm": 1.7393558025360107, + "learning_rate": 5e-05, + "epoch": 0.43766408401101364, + "step": 6835 + }, + { + "loss": 2.2406, + "grad_norm": 1.8635519742965698, + "learning_rate": 5e-05, + "epoch": 0.43798424793494267, + "step": 6840 + }, + { + "loss": 2.2176, + "grad_norm": 1.757818579673767, + "learning_rate": 5e-05, + "epoch": 0.43830441185887176, + "step": 6845 + }, + { + "loss": 2.2769, + "grad_norm": 1.670522928237915, + "learning_rate": 5e-05, + "epoch": 0.4386245757828008, + "step": 6850 + }, + { + "loss": 2.2687, + "grad_norm": 1.708299160003662, + "learning_rate": 5e-05, + "epoch": 0.43894473970672987, + "step": 6855 + }, + { + "loss": 2.2174, + "grad_norm": 1.6819125413894653, + "learning_rate": 5e-05, + "epoch": 0.4392649036306589, + "step": 6860 + }, + { + "loss": 2.2336, + "grad_norm": 1.7067598104476929, + "learning_rate": 5e-05, + "epoch": 0.4395850675545879, + "step": 6865 + }, + { + "loss": 2.2244, + "grad_norm": 1.6839826107025146, + "learning_rate": 5e-05, + "epoch": 0.439905231478517, + "step": 6870 + }, + { + "loss": 2.2274, + "grad_norm": 1.8001630306243896, + "learning_rate": 5e-05, + "epoch": 0.44022539540244604, + "step": 6875 + }, + { + "loss": 2.2402, + "grad_norm": 1.7565686702728271, + "learning_rate": 5e-05, + "epoch": 0.4405455593263751, + "step": 6880 + }, + { + "loss": 2.2296, + "grad_norm": 1.6886423826217651, + "learning_rate": 5e-05, + "epoch": 0.44086572325030415, + "step": 6885 + }, + { + "loss": 2.2304, + "grad_norm": 1.713010311126709, + "learning_rate": 5e-05, + "epoch": 0.4411858871742332, + "step": 6890 + }, + { + "loss": 2.2443, + "grad_norm": 1.6640557050704956, + "learning_rate": 5e-05, + "epoch": 0.44150605109816227, + "step": 6895 + }, + { + "loss": 2.223, + "grad_norm": 1.6528607606887817, + "learning_rate": 5e-05, + "epoch": 0.4418262150220913, + "step": 6900 + }, + { + "loss": 2.2264, + "grad_norm": 1.6679795980453491, + "learning_rate": 5e-05, + "epoch": 0.4421463789460204, + "step": 6905 + }, + { + "loss": 2.2393, + "grad_norm": 1.7212443351745605, + "learning_rate": 5e-05, + "epoch": 0.4424665428699494, + "step": 6910 + }, + { + "loss": 2.236, + "grad_norm": 1.671025037765503, + "learning_rate": 5e-05, + "epoch": 0.4427867067938785, + "step": 6915 + }, + { + "loss": 2.2512, + "grad_norm": 1.6580772399902344, + "learning_rate": 5e-05, + "epoch": 0.4431068707178075, + "step": 6920 + }, + { + "loss": 2.2237, + "grad_norm": 1.8094478845596313, + "learning_rate": 5e-05, + "epoch": 0.44342703464173655, + "step": 6925 + }, + { + "loss": 2.2487, + "grad_norm": 1.6925034523010254, + "learning_rate": 5e-05, + "epoch": 0.44374719856566563, + "step": 6930 + }, + { + "loss": 2.2323, + "grad_norm": 1.6939678192138672, + "learning_rate": 5e-05, + "epoch": 0.44406736248959466, + "step": 6935 + }, + { + "loss": 2.1992, + "grad_norm": 1.750412940979004, + "learning_rate": 5e-05, + "epoch": 0.44438752641352375, + "step": 6940 + }, + { + "loss": 2.2182, + "grad_norm": 1.6810964345932007, + "learning_rate": 5e-05, + "epoch": 0.4447076903374528, + "step": 6945 + }, + { + "loss": 2.26, + "grad_norm": 1.6222447156906128, + "learning_rate": 5e-05, + "epoch": 0.4450278542613818, + "step": 6950 + }, + { + "loss": 2.237, + "grad_norm": 1.71504545211792, + "learning_rate": 5e-05, + "epoch": 0.4453480181853109, + "step": 6955 + }, + { + "loss": 2.2384, + "grad_norm": 1.7647539377212524, + "learning_rate": 5e-05, + "epoch": 0.4456681821092399, + "step": 6960 + }, + { + "loss": 2.2321, + "grad_norm": 1.815050482749939, + "learning_rate": 5e-05, + "epoch": 0.445988346033169, + "step": 6965 + }, + { + "loss": 2.2339, + "grad_norm": 1.8233994245529175, + "learning_rate": 5e-05, + "epoch": 0.44630850995709803, + "step": 6970 + }, + { + "loss": 2.2443, + "grad_norm": 1.7368268966674805, + "learning_rate": 5e-05, + "epoch": 0.44662867388102706, + "step": 6975 + }, + { + "loss": 2.2406, + "grad_norm": 1.6867866516113281, + "learning_rate": 5e-05, + "epoch": 0.44694883780495615, + "step": 6980 + }, + { + "loss": 2.2212, + "grad_norm": 1.633429765701294, + "learning_rate": 5e-05, + "epoch": 0.4472690017288852, + "step": 6985 + }, + { + "loss": 2.2543, + "grad_norm": 1.6579304933547974, + "learning_rate": 5e-05, + "epoch": 0.44758916565281426, + "step": 6990 + }, + { + "loss": 2.2339, + "grad_norm": 1.6452136039733887, + "learning_rate": 5e-05, + "epoch": 0.4479093295767433, + "step": 6995 + }, + { + "loss": 2.23, + "grad_norm": 1.670894980430603, + "learning_rate": 5e-05, + "epoch": 0.44822949350067237, + "step": 7000 + }, + { + "eval_loss": 2.09128475189209, + "eval_runtime": 9.5706, + "eval_samples_per_second": 213.988, + "eval_steps_per_second": 26.749, + "epoch": 0.44822949350067237, + "step": 7000 + }, + { + "loss": 2.2297, + "grad_norm": 1.6882041692733765, + "learning_rate": 5e-05, + "epoch": 0.4485496574246014, + "step": 7005 + }, + { + "loss": 2.217, + "grad_norm": 1.721706748008728, + "learning_rate": 5e-05, + "epoch": 0.44886982134853043, + "step": 7010 + }, + { + "loss": 2.2618, + "grad_norm": 1.7233175039291382, + "learning_rate": 5e-05, + "epoch": 0.4491899852724595, + "step": 7015 + }, + { + "loss": 2.2284, + "grad_norm": 1.7620608806610107, + "learning_rate": 5e-05, + "epoch": 0.44951014919638854, + "step": 7020 + }, + { + "loss": 2.2098, + "grad_norm": 1.692572832107544, + "learning_rate": 5e-05, + "epoch": 0.4498303131203176, + "step": 7025 + }, + { + "loss": 2.2384, + "grad_norm": 1.8099894523620605, + "learning_rate": 5e-05, + "epoch": 0.45015047704424666, + "step": 7030 + }, + { + "loss": 2.2435, + "grad_norm": 1.7223204374313354, + "learning_rate": 5e-05, + "epoch": 0.4504706409681757, + "step": 7035 + }, + { + "loss": 2.2161, + "grad_norm": 1.692357063293457, + "learning_rate": 5e-05, + "epoch": 0.45079080489210477, + "step": 7040 + }, + { + "loss": 2.2449, + "grad_norm": 1.6669970750808716, + "learning_rate": 5e-05, + "epoch": 0.4511109688160338, + "step": 7045 + }, + { + "loss": 2.2364, + "grad_norm": 1.719010353088379, + "learning_rate": 5e-05, + "epoch": 0.4514311327399629, + "step": 7050 + }, + { + "loss": 2.2415, + "grad_norm": 1.6553248167037964, + "learning_rate": 5e-05, + "epoch": 0.4517512966638919, + "step": 7055 + }, + { + "loss": 2.2294, + "grad_norm": 1.6735188961029053, + "learning_rate": 5e-05, + "epoch": 0.45207146058782094, + "step": 7060 + }, + { + "loss": 2.2371, + "grad_norm": 1.6721162796020508, + "learning_rate": 5e-05, + "epoch": 0.45239162451175, + "step": 7065 + }, + { + "loss": 2.2304, + "grad_norm": 1.7263718843460083, + "learning_rate": 5e-05, + "epoch": 0.45271178843567905, + "step": 7070 + }, + { + "loss": 2.2442, + "grad_norm": 1.7064590454101562, + "learning_rate": 5e-05, + "epoch": 0.45303195235960814, + "step": 7075 + }, + { + "loss": 2.2804, + "grad_norm": 1.7297579050064087, + "learning_rate": 5e-05, + "epoch": 0.45335211628353717, + "step": 7080 + }, + { + "loss": 2.213, + "grad_norm": 1.6997263431549072, + "learning_rate": 5e-05, + "epoch": 0.45367228020746625, + "step": 7085 + }, + { + "loss": 2.2262, + "grad_norm": 1.6889290809631348, + "learning_rate": 5e-05, + "epoch": 0.4539924441313953, + "step": 7090 + }, + { + "loss": 2.2396, + "grad_norm": 1.6912401914596558, + "learning_rate": 5e-05, + "epoch": 0.4543126080553243, + "step": 7095 + }, + { + "loss": 2.2471, + "grad_norm": 1.6478922367095947, + "learning_rate": 5e-05, + "epoch": 0.4546327719792534, + "step": 7100 + }, + { + "loss": 2.1881, + "grad_norm": 1.6519252061843872, + "learning_rate": 5e-05, + "epoch": 0.4549529359031824, + "step": 7105 + }, + { + "loss": 2.2228, + "grad_norm": 1.7075591087341309, + "learning_rate": 5e-05, + "epoch": 0.4552730998271115, + "step": 7110 + }, + { + "loss": 2.2266, + "grad_norm": 1.64700448513031, + "learning_rate": 5e-05, + "epoch": 0.45559326375104053, + "step": 7115 + }, + { + "loss": 2.222, + "grad_norm": 1.6712502241134644, + "learning_rate": 5e-05, + "epoch": 0.45591342767496956, + "step": 7120 + }, + { + "loss": 2.2108, + "grad_norm": 1.7444020509719849, + "learning_rate": 5e-05, + "epoch": 0.45623359159889865, + "step": 7125 + }, + { + "loss": 2.2235, + "grad_norm": 1.739864468574524, + "learning_rate": 5e-05, + "epoch": 0.4565537555228277, + "step": 7130 + }, + { + "loss": 2.2242, + "grad_norm": 1.6936887502670288, + "learning_rate": 5e-05, + "epoch": 0.45687391944675676, + "step": 7135 + }, + { + "loss": 2.2379, + "grad_norm": 1.6458464860916138, + "learning_rate": 5e-05, + "epoch": 0.4571940833706858, + "step": 7140 + }, + { + "loss": 2.2267, + "grad_norm": 1.6865026950836182, + "learning_rate": 5e-05, + "epoch": 0.4575142472946148, + "step": 7145 + }, + { + "loss": 2.2165, + "grad_norm": 1.769681453704834, + "learning_rate": 5e-05, + "epoch": 0.4578344112185439, + "step": 7150 + }, + { + "loss": 2.2495, + "grad_norm": 1.7259198427200317, + "learning_rate": 5e-05, + "epoch": 0.45815457514247293, + "step": 7155 + }, + { + "loss": 2.2434, + "grad_norm": 1.6952046155929565, + "learning_rate": 5e-05, + "epoch": 0.458474739066402, + "step": 7160 + }, + { + "loss": 2.2313, + "grad_norm": 1.7676249742507935, + "learning_rate": 5e-05, + "epoch": 0.45879490299033104, + "step": 7165 + }, + { + "loss": 2.2408, + "grad_norm": 1.6167271137237549, + "learning_rate": 5e-05, + "epoch": 0.45911506691426013, + "step": 7170 + }, + { + "loss": 2.2076, + "grad_norm": 1.7571128606796265, + "learning_rate": 5e-05, + "epoch": 0.45943523083818916, + "step": 7175 + }, + { + "loss": 2.2307, + "grad_norm": 1.619318962097168, + "learning_rate": 5e-05, + "epoch": 0.4597553947621182, + "step": 7180 + }, + { + "loss": 2.2399, + "grad_norm": 1.6977887153625488, + "learning_rate": 5e-05, + "epoch": 0.46007555868604727, + "step": 7185 + }, + { + "loss": 2.2279, + "grad_norm": 1.7176746129989624, + "learning_rate": 5e-05, + "epoch": 0.4603957226099763, + "step": 7190 + }, + { + "loss": 2.2263, + "grad_norm": 1.7717937231063843, + "learning_rate": 5e-05, + "epoch": 0.4607158865339054, + "step": 7195 + }, + { + "loss": 2.2539, + "grad_norm": 1.7532376050949097, + "learning_rate": 5e-05, + "epoch": 0.4610360504578344, + "step": 7200 + }, + { + "eval_loss": 2.105297565460205, + "eval_runtime": 9.2265, + "eval_samples_per_second": 221.969, + "eval_steps_per_second": 27.746, + "epoch": 0.4610360504578344, + "step": 7200 + }, + { + "loss": 2.2402, + "grad_norm": 1.6974916458129883, + "learning_rate": 5e-05, + "epoch": 0.46135621438176344, + "step": 7205 + }, + { + "loss": 2.205, + "grad_norm": 1.703689455986023, + "learning_rate": 5e-05, + "epoch": 0.4616763783056925, + "step": 7210 + }, + { + "loss": 2.2214, + "grad_norm": 1.6790088415145874, + "learning_rate": 5e-05, + "epoch": 0.46199654222962155, + "step": 7215 + }, + { + "loss": 2.2013, + "grad_norm": 1.7056132555007935, + "learning_rate": 5e-05, + "epoch": 0.46231670615355064, + "step": 7220 + }, + { + "loss": 2.2226, + "grad_norm": 1.7162805795669556, + "learning_rate": 5e-05, + "epoch": 0.46263687007747967, + "step": 7225 + }, + { + "loss": 2.2465, + "grad_norm": 1.6653958559036255, + "learning_rate": 5e-05, + "epoch": 0.4629570340014087, + "step": 7230 + }, + { + "loss": 2.2545, + "grad_norm": 1.7309342622756958, + "learning_rate": 5e-05, + "epoch": 0.4632771979253378, + "step": 7235 + }, + { + "loss": 2.2474, + "grad_norm": 1.6637336015701294, + "learning_rate": 5e-05, + "epoch": 0.4635973618492668, + "step": 7240 + }, + { + "loss": 2.227, + "grad_norm": 1.6410720348358154, + "learning_rate": 5e-05, + "epoch": 0.4639175257731959, + "step": 7245 + }, + { + "loss": 2.2268, + "grad_norm": 1.7074912786483765, + "learning_rate": 5e-05, + "epoch": 0.4642376896971249, + "step": 7250 + }, + { + "loss": 2.212, + "grad_norm": 1.7265816926956177, + "learning_rate": 5e-05, + "epoch": 0.464557853621054, + "step": 7255 + }, + { + "loss": 2.2419, + "grad_norm": 1.7796136140823364, + "learning_rate": 5e-05, + "epoch": 0.46487801754498304, + "step": 7260 + }, + { + "loss": 2.2388, + "grad_norm": 1.748008370399475, + "learning_rate": 5e-05, + "epoch": 0.46519818146891206, + "step": 7265 + }, + { + "loss": 2.2236, + "grad_norm": 1.7407780885696411, + "learning_rate": 5e-05, + "epoch": 0.46551834539284115, + "step": 7270 + }, + { + "loss": 2.2306, + "grad_norm": 1.7564735412597656, + "learning_rate": 5e-05, + "epoch": 0.4658385093167702, + "step": 7275 + }, + { + "loss": 2.246, + "grad_norm": 1.8281434774398804, + "learning_rate": 5e-05, + "epoch": 0.46615867324069926, + "step": 7280 + }, + { + "loss": 2.2127, + "grad_norm": 1.7006824016571045, + "learning_rate": 5e-05, + "epoch": 0.4664788371646283, + "step": 7285 + }, + { + "loss": 2.2081, + "grad_norm": 1.7190055847167969, + "learning_rate": 5e-05, + "epoch": 0.4667990010885573, + "step": 7290 + }, + { + "loss": 2.2096, + "grad_norm": 1.6515896320343018, + "learning_rate": 5e-05, + "epoch": 0.4671191650124864, + "step": 7295 + }, + { + "loss": 2.2523, + "grad_norm": 1.7050222158432007, + "learning_rate": 5e-05, + "epoch": 0.46743932893641543, + "step": 7300 + }, + { + "loss": 2.257, + "grad_norm": 1.8344449996948242, + "learning_rate": 5e-05, + "epoch": 0.4677594928603445, + "step": 7305 + }, + { + "loss": 2.2389, + "grad_norm": 1.762683391571045, + "learning_rate": 5e-05, + "epoch": 0.46807965678427355, + "step": 7310 + }, + { + "loss": 2.2384, + "grad_norm": 1.7812387943267822, + "learning_rate": 5e-05, + "epoch": 0.4683998207082026, + "step": 7315 + }, + { + "loss": 2.2468, + "grad_norm": 1.6442910432815552, + "learning_rate": 5e-05, + "epoch": 0.46871998463213166, + "step": 7320 + }, + { + "loss": 2.1976, + "grad_norm": 1.7651413679122925, + "learning_rate": 5e-05, + "epoch": 0.4690401485560607, + "step": 7325 + }, + { + "loss": 2.2286, + "grad_norm": 1.6484975814819336, + "learning_rate": 5e-05, + "epoch": 0.46936031247998977, + "step": 7330 + }, + { + "loss": 2.2328, + "grad_norm": 1.7871202230453491, + "learning_rate": 5e-05, + "epoch": 0.4696804764039188, + "step": 7335 + }, + { + "loss": 2.2273, + "grad_norm": 1.7313917875289917, + "learning_rate": 5e-05, + "epoch": 0.4700006403278479, + "step": 7340 + }, + { + "loss": 2.2665, + "grad_norm": 1.7805308103561401, + "learning_rate": 5e-05, + "epoch": 0.4703208042517769, + "step": 7345 + }, + { + "loss": 2.2815, + "grad_norm": 1.734679937362671, + "learning_rate": 5e-05, + "epoch": 0.47064096817570594, + "step": 7350 + }, + { + "loss": 2.2212, + "grad_norm": 1.696459412574768, + "learning_rate": 5e-05, + "epoch": 0.470961132099635, + "step": 7355 + }, + { + "loss": 2.2609, + "grad_norm": 1.767733097076416, + "learning_rate": 5e-05, + "epoch": 0.47128129602356406, + "step": 7360 + }, + { + "loss": 2.2074, + "grad_norm": 1.703615665435791, + "learning_rate": 5e-05, + "epoch": 0.47160145994749314, + "step": 7365 + }, + { + "loss": 2.2239, + "grad_norm": 1.6856378316879272, + "learning_rate": 5e-05, + "epoch": 0.47192162387142217, + "step": 7370 + }, + { + "loss": 2.2218, + "grad_norm": 1.75584876537323, + "learning_rate": 5e-05, + "epoch": 0.4722417877953512, + "step": 7375 + }, + { + "loss": 2.2304, + "grad_norm": 1.733469843864441, + "learning_rate": 5e-05, + "epoch": 0.4725619517192803, + "step": 7380 + }, + { + "loss": 2.242, + "grad_norm": 1.6238700151443481, + "learning_rate": 5e-05, + "epoch": 0.4728821156432093, + "step": 7385 + }, + { + "loss": 2.2392, + "grad_norm": 1.7466235160827637, + "learning_rate": 5e-05, + "epoch": 0.4732022795671384, + "step": 7390 + }, + { + "loss": 2.2455, + "grad_norm": 1.8809025287628174, + "learning_rate": 5e-05, + "epoch": 0.4735224434910674, + "step": 7395 + }, + { + "loss": 2.2431, + "grad_norm": 1.705507516860962, + "learning_rate": 5e-05, + "epoch": 0.47384260741499645, + "step": 7400 + }, + { + "eval_loss": 2.098146915435791, + "eval_runtime": 9.2837, + "eval_samples_per_second": 220.601, + "eval_steps_per_second": 27.575, + "epoch": 0.47384260741499645, + "step": 7400 + }, + { + "loss": 2.1957, + "grad_norm": 1.7113065719604492, + "learning_rate": 5e-05, + "epoch": 0.47416277133892554, + "step": 7405 + }, + { + "loss": 2.2073, + "grad_norm": 1.6644835472106934, + "learning_rate": 5e-05, + "epoch": 0.47448293526285457, + "step": 7410 + }, + { + "loss": 2.2312, + "grad_norm": 1.6765412092208862, + "learning_rate": 5e-05, + "epoch": 0.47480309918678365, + "step": 7415 + }, + { + "loss": 2.2219, + "grad_norm": 1.6999036073684692, + "learning_rate": 5e-05, + "epoch": 0.4751232631107127, + "step": 7420 + }, + { + "loss": 2.2325, + "grad_norm": 1.6527864933013916, + "learning_rate": 5e-05, + "epoch": 0.47544342703464176, + "step": 7425 + }, + { + "loss": 2.2194, + "grad_norm": 1.6665046215057373, + "learning_rate": 5e-05, + "epoch": 0.4757635909585708, + "step": 7430 + }, + { + "loss": 2.2436, + "grad_norm": 1.6418559551239014, + "learning_rate": 5e-05, + "epoch": 0.4760837548824998, + "step": 7435 + }, + { + "loss": 2.2442, + "grad_norm": 1.6663761138916016, + "learning_rate": 5e-05, + "epoch": 0.4764039188064289, + "step": 7440 + }, + { + "loss": 2.2229, + "grad_norm": 1.7055283784866333, + "learning_rate": 5e-05, + "epoch": 0.47672408273035793, + "step": 7445 + }, + { + "loss": 2.233, + "grad_norm": 1.8025060892105103, + "learning_rate": 5e-05, + "epoch": 0.477044246654287, + "step": 7450 + }, + { + "loss": 2.2237, + "grad_norm": 1.688681721687317, + "learning_rate": 5e-05, + "epoch": 0.47736441057821605, + "step": 7455 + }, + { + "loss": 2.2231, + "grad_norm": 1.7080329656600952, + "learning_rate": 5e-05, + "epoch": 0.4776845745021451, + "step": 7460 + }, + { + "loss": 2.2249, + "grad_norm": 1.6620713472366333, + "learning_rate": 5e-05, + "epoch": 0.47800473842607416, + "step": 7465 + }, + { + "loss": 2.2358, + "grad_norm": 1.7365187406539917, + "learning_rate": 5e-05, + "epoch": 0.4783249023500032, + "step": 7470 + }, + { + "loss": 2.2135, + "grad_norm": 1.6564573049545288, + "learning_rate": 5e-05, + "epoch": 0.4786450662739323, + "step": 7475 + }, + { + "loss": 2.2008, + "grad_norm": 1.6541259288787842, + "learning_rate": 5e-05, + "epoch": 0.4789652301978613, + "step": 7480 + }, + { + "loss": 2.1807, + "grad_norm": 1.7617090940475464, + "learning_rate": 5e-05, + "epoch": 0.47928539412179033, + "step": 7485 + }, + { + "loss": 2.2205, + "grad_norm": 1.6854571104049683, + "learning_rate": 5e-05, + "epoch": 0.4796055580457194, + "step": 7490 + }, + { + "loss": 2.2363, + "grad_norm": 1.7207971811294556, + "learning_rate": 5e-05, + "epoch": 0.47992572196964844, + "step": 7495 + }, + { + "loss": 2.2369, + "grad_norm": 1.7152527570724487, + "learning_rate": 5e-05, + "epoch": 0.48024588589357753, + "step": 7500 + }, + { + "loss": 2.2348, + "grad_norm": 1.6942616701126099, + "learning_rate": 5e-05, + "epoch": 0.48056604981750656, + "step": 7505 + }, + { + "loss": 2.2159, + "grad_norm": 1.7268720865249634, + "learning_rate": 5e-05, + "epoch": 0.48088621374143564, + "step": 7510 + }, + { + "loss": 2.2252, + "grad_norm": 1.7245421409606934, + "learning_rate": 5e-05, + "epoch": 0.48120637766536467, + "step": 7515 + }, + { + "loss": 2.235, + "grad_norm": 1.8024479150772095, + "learning_rate": 5e-05, + "epoch": 0.4815265415892937, + "step": 7520 + }, + { + "loss": 2.2198, + "grad_norm": 1.7535227537155151, + "learning_rate": 5e-05, + "epoch": 0.4818467055132228, + "step": 7525 + }, + { + "loss": 2.2312, + "grad_norm": 1.6243641376495361, + "learning_rate": 5e-05, + "epoch": 0.4821668694371518, + "step": 7530 + }, + { + "loss": 2.2252, + "grad_norm": 1.7475508451461792, + "learning_rate": 5e-05, + "epoch": 0.4824870333610809, + "step": 7535 + }, + { + "loss": 2.2565, + "grad_norm": 1.6990846395492554, + "learning_rate": 5e-05, + "epoch": 0.4828071972850099, + "step": 7540 + }, + { + "loss": 2.2315, + "grad_norm": 1.758632779121399, + "learning_rate": 5e-05, + "epoch": 0.48312736120893895, + "step": 7545 + }, + { + "loss": 2.2011, + "grad_norm": 1.6367809772491455, + "learning_rate": 5e-05, + "epoch": 0.48344752513286804, + "step": 7550 + }, + { + "loss": 2.2523, + "grad_norm": 1.667632818222046, + "learning_rate": 5e-05, + "epoch": 0.48376768905679707, + "step": 7555 + }, + { + "loss": 2.2309, + "grad_norm": 1.674497365951538, + "learning_rate": 5e-05, + "epoch": 0.48408785298072615, + "step": 7560 + }, + { + "loss": 2.2254, + "grad_norm": 1.6481330394744873, + "learning_rate": 5e-05, + "epoch": 0.4844080169046552, + "step": 7565 + }, + { + "loss": 2.2089, + "grad_norm": 1.6713910102844238, + "learning_rate": 5e-05, + "epoch": 0.4847281808285842, + "step": 7570 + }, + { + "loss": 2.2481, + "grad_norm": 1.807297945022583, + "learning_rate": 5e-05, + "epoch": 0.4850483447525133, + "step": 7575 + }, + { + "loss": 2.227, + "grad_norm": 1.6840758323669434, + "learning_rate": 5e-05, + "epoch": 0.4853685086764423, + "step": 7580 + }, + { + "loss": 2.2443, + "grad_norm": 1.7010893821716309, + "learning_rate": 5e-05, + "epoch": 0.4856886726003714, + "step": 7585 + }, + { + "loss": 2.2416, + "grad_norm": 1.6932690143585205, + "learning_rate": 5e-05, + "epoch": 0.48600883652430044, + "step": 7590 + }, + { + "loss": 2.2301, + "grad_norm": 1.7321279048919678, + "learning_rate": 5e-05, + "epoch": 0.4863290004482295, + "step": 7595 + }, + { + "loss": 2.2394, + "grad_norm": 1.7051780223846436, + "learning_rate": 5e-05, + "epoch": 0.48664916437215855, + "step": 7600 + }, + { + "eval_loss": 2.1021814346313477, + "eval_runtime": 9.0753, + "eval_samples_per_second": 225.668, + "eval_steps_per_second": 28.208, + "epoch": 0.48664916437215855, + "step": 7600 + }, + { + "loss": 2.2093, + "grad_norm": 1.7261319160461426, + "learning_rate": 5e-05, + "epoch": 0.4869693282960876, + "step": 7605 + }, + { + "loss": 2.2148, + "grad_norm": 1.7780207395553589, + "learning_rate": 5e-05, + "epoch": 0.48728949222001666, + "step": 7610 + }, + { + "loss": 2.217, + "grad_norm": 1.7456703186035156, + "learning_rate": 5e-05, + "epoch": 0.4876096561439457, + "step": 7615 + }, + { + "loss": 2.1884, + "grad_norm": 1.8097208738327026, + "learning_rate": 5e-05, + "epoch": 0.4879298200678748, + "step": 7620 + }, + { + "loss": 2.2064, + "grad_norm": 1.7063500881195068, + "learning_rate": 5e-05, + "epoch": 0.4882499839918038, + "step": 7625 + }, + { + "loss": 2.2059, + "grad_norm": 1.7562440633773804, + "learning_rate": 5e-05, + "epoch": 0.48857014791573283, + "step": 7630 + }, + { + "loss": 2.2136, + "grad_norm": 1.7514058351516724, + "learning_rate": 5e-05, + "epoch": 0.4888903118396619, + "step": 7635 + }, + { + "loss": 2.2369, + "grad_norm": 1.7494462728500366, + "learning_rate": 5e-05, + "epoch": 0.48921047576359095, + "step": 7640 + }, + { + "loss": 2.2129, + "grad_norm": 1.7088241577148438, + "learning_rate": 5e-05, + "epoch": 0.48953063968752003, + "step": 7645 + }, + { + "loss": 2.2257, + "grad_norm": 1.6516956090927124, + "learning_rate": 5e-05, + "epoch": 0.48985080361144906, + "step": 7650 + }, + { + "loss": 2.246, + "grad_norm": 1.7086745500564575, + "learning_rate": 5e-05, + "epoch": 0.4901709675353781, + "step": 7655 + }, + { + "loss": 2.2448, + "grad_norm": 1.748744010925293, + "learning_rate": 5e-05, + "epoch": 0.4904911314593072, + "step": 7660 + }, + { + "loss": 2.2262, + "grad_norm": 1.612053632736206, + "learning_rate": 5e-05, + "epoch": 0.4908112953832362, + "step": 7665 + }, + { + "loss": 2.2449, + "grad_norm": 1.7028324604034424, + "learning_rate": 5e-05, + "epoch": 0.4911314593071653, + "step": 7670 + }, + { + "loss": 2.2583, + "grad_norm": 1.735455870628357, + "learning_rate": 5e-05, + "epoch": 0.4914516232310943, + "step": 7675 + }, + { + "loss": 2.209, + "grad_norm": 1.806516170501709, + "learning_rate": 5e-05, + "epoch": 0.4917717871550234, + "step": 7680 + }, + { + "loss": 2.226, + "grad_norm": 1.6990715265274048, + "learning_rate": 5e-05, + "epoch": 0.4920919510789524, + "step": 7685 + }, + { + "loss": 2.2201, + "grad_norm": 1.6955472230911255, + "learning_rate": 5e-05, + "epoch": 0.49241211500288146, + "step": 7690 + }, + { + "loss": 2.2677, + "grad_norm": 1.6222796440124512, + "learning_rate": 5e-05, + "epoch": 0.49273227892681054, + "step": 7695 + }, + { + "loss": 2.2376, + "grad_norm": 1.7312852144241333, + "learning_rate": 5e-05, + "epoch": 0.49305244285073957, + "step": 7700 + }, + { + "loss": 2.2389, + "grad_norm": 1.6851000785827637, + "learning_rate": 5e-05, + "epoch": 0.49337260677466865, + "step": 7705 + }, + { + "loss": 2.2027, + "grad_norm": 1.6476107835769653, + "learning_rate": 5e-05, + "epoch": 0.4936927706985977, + "step": 7710 + }, + { + "loss": 2.2448, + "grad_norm": 1.6625196933746338, + "learning_rate": 5e-05, + "epoch": 0.4940129346225267, + "step": 7715 + }, + { + "loss": 2.1801, + "grad_norm": 1.7839092016220093, + "learning_rate": 5e-05, + "epoch": 0.4943330985464558, + "step": 7720 + }, + { + "loss": 2.2286, + "grad_norm": 1.6703824996948242, + "learning_rate": 5e-05, + "epoch": 0.4946532624703848, + "step": 7725 + }, + { + "loss": 2.1893, + "grad_norm": 1.825608253479004, + "learning_rate": 5e-05, + "epoch": 0.4949734263943139, + "step": 7730 + }, + { + "loss": 2.2228, + "grad_norm": 1.6634159088134766, + "learning_rate": 5e-05, + "epoch": 0.49529359031824294, + "step": 7735 + }, + { + "loss": 2.203, + "grad_norm": 1.6320128440856934, + "learning_rate": 5e-05, + "epoch": 0.49561375424217197, + "step": 7740 + }, + { + "loss": 2.2567, + "grad_norm": 1.6982113122940063, + "learning_rate": 5e-05, + "epoch": 0.49593391816610105, + "step": 7745 + }, + { + "loss": 2.2155, + "grad_norm": 1.5984007120132446, + "learning_rate": 5e-05, + "epoch": 0.4962540820900301, + "step": 7750 + }, + { + "loss": 2.2294, + "grad_norm": 1.7036561965942383, + "learning_rate": 5e-05, + "epoch": 0.49657424601395916, + "step": 7755 + }, + { + "loss": 2.2139, + "grad_norm": 1.7015743255615234, + "learning_rate": 5e-05, + "epoch": 0.4968944099378882, + "step": 7760 + }, + { + "loss": 2.2231, + "grad_norm": 1.7515677213668823, + "learning_rate": 5e-05, + "epoch": 0.4972145738618173, + "step": 7765 + }, + { + "loss": 2.2211, + "grad_norm": 1.7809141874313354, + "learning_rate": 5e-05, + "epoch": 0.4975347377857463, + "step": 7770 + }, + { + "loss": 2.2251, + "grad_norm": 1.6579275131225586, + "learning_rate": 5e-05, + "epoch": 0.49785490170967533, + "step": 7775 + }, + { + "loss": 2.2045, + "grad_norm": 1.6126208305358887, + "learning_rate": 5e-05, + "epoch": 0.4981750656336044, + "step": 7780 + }, + { + "loss": 2.1937, + "grad_norm": 1.7193243503570557, + "learning_rate": 5e-05, + "epoch": 0.49849522955753345, + "step": 7785 + }, + { + "loss": 2.2378, + "grad_norm": 1.7689208984375, + "learning_rate": 5e-05, + "epoch": 0.49881539348146253, + "step": 7790 + }, + { + "loss": 2.2021, + "grad_norm": 1.7081634998321533, + "learning_rate": 5e-05, + "epoch": 0.49913555740539156, + "step": 7795 + }, + { + "loss": 2.2286, + "grad_norm": 1.682572841644287, + "learning_rate": 5e-05, + "epoch": 0.4994557213293206, + "step": 7800 + }, + { + "eval_loss": 2.093745231628418, + "eval_runtime": 9.0651, + "eval_samples_per_second": 225.921, + "eval_steps_per_second": 28.24, + "epoch": 0.4994557213293206, + "step": 7800 + }, + { + "loss": 2.2273, + "grad_norm": 1.7163746356964111, + "learning_rate": 5e-05, + "epoch": 0.4997758852532497, + "step": 7805 + }, + { + "loss": 2.2204, + "grad_norm": 1.7658613920211792, + "learning_rate": 5e-05, + "epoch": 0.5000960491771788, + "step": 7810 + }, + { + "loss": 2.2302, + "grad_norm": 1.7190687656402588, + "learning_rate": 5e-05, + "epoch": 0.5004162131011077, + "step": 7815 + }, + { + "loss": 2.2075, + "grad_norm": 1.7589188814163208, + "learning_rate": 5e-05, + "epoch": 0.5007363770250368, + "step": 7820 + }, + { + "loss": 2.2345, + "grad_norm": 1.6196938753128052, + "learning_rate": 5e-05, + "epoch": 0.5010565409489659, + "step": 7825 + }, + { + "loss": 2.2037, + "grad_norm": 1.5979619026184082, + "learning_rate": 5e-05, + "epoch": 0.5013767048728949, + "step": 7830 + }, + { + "loss": 2.2133, + "grad_norm": 1.748252511024475, + "learning_rate": 5e-05, + "epoch": 0.501696868796824, + "step": 7835 + }, + { + "loss": 2.1874, + "grad_norm": 1.6827281713485718, + "learning_rate": 5e-05, + "epoch": 0.502017032720753, + "step": 7840 + }, + { + "loss": 2.2298, + "grad_norm": 1.7303760051727295, + "learning_rate": 5e-05, + "epoch": 0.5023371966446821, + "step": 7845 + }, + { + "loss": 2.2331, + "grad_norm": 1.724907398223877, + "learning_rate": 5e-05, + "epoch": 0.5026573605686111, + "step": 7850 + }, + { + "loss": 2.2103, + "grad_norm": 1.6769534349441528, + "learning_rate": 5e-05, + "epoch": 0.5029775244925402, + "step": 7855 + }, + { + "loss": 2.2251, + "grad_norm": 1.7141268253326416, + "learning_rate": 5e-05, + "epoch": 0.5032976884164693, + "step": 7860 + }, + { + "loss": 2.2383, + "grad_norm": 1.691082239151001, + "learning_rate": 5e-05, + "epoch": 0.5036178523403982, + "step": 7865 + }, + { + "loss": 2.2198, + "grad_norm": 1.7353357076644897, + "learning_rate": 5e-05, + "epoch": 0.5039380162643273, + "step": 7870 + }, + { + "loss": 2.2259, + "grad_norm": 1.6525827646255493, + "learning_rate": 5e-05, + "epoch": 0.5042581801882564, + "step": 7875 + }, + { + "loss": 2.1898, + "grad_norm": 1.592120885848999, + "learning_rate": 5e-05, + "epoch": 0.5045783441121854, + "step": 7880 + }, + { + "loss": 2.2348, + "grad_norm": 1.6837745904922485, + "learning_rate": 5e-05, + "epoch": 0.5048985080361145, + "step": 7885 + }, + { + "loss": 2.1958, + "grad_norm": 1.5835824012756348, + "learning_rate": 5e-05, + "epoch": 0.5052186719600436, + "step": 7890 + }, + { + "loss": 2.218, + "grad_norm": 1.701811671257019, + "learning_rate": 5e-05, + "epoch": 0.5055388358839726, + "step": 7895 + }, + { + "loss": 2.236, + "grad_norm": 1.6456730365753174, + "learning_rate": 5e-05, + "epoch": 0.5058589998079016, + "step": 7900 + }, + { + "loss": 2.2219, + "grad_norm": 1.8244099617004395, + "learning_rate": 5e-05, + "epoch": 0.5061791637318307, + "step": 7905 + }, + { + "loss": 2.2081, + "grad_norm": 1.7030236721038818, + "learning_rate": 5e-05, + "epoch": 0.5064993276557598, + "step": 7910 + }, + { + "loss": 2.2244, + "grad_norm": 1.7620062828063965, + "learning_rate": 5e-05, + "epoch": 0.5068194915796888, + "step": 7915 + }, + { + "loss": 2.2201, + "grad_norm": 1.6482844352722168, + "learning_rate": 5e-05, + "epoch": 0.5071396555036178, + "step": 7920 + }, + { + "loss": 2.2033, + "grad_norm": 1.6990326642990112, + "learning_rate": 5e-05, + "epoch": 0.5074598194275469, + "step": 7925 + }, + { + "loss": 2.2165, + "grad_norm": 1.8352545499801636, + "learning_rate": 5e-05, + "epoch": 0.507779983351476, + "step": 7930 + }, + { + "loss": 2.2163, + "grad_norm": 1.757162094116211, + "learning_rate": 5e-05, + "epoch": 0.508100147275405, + "step": 7935 + }, + { + "loss": 2.2313, + "grad_norm": 1.762620210647583, + "learning_rate": 5e-05, + "epoch": 0.5084203111993341, + "step": 7940 + }, + { + "loss": 2.2162, + "grad_norm": 1.82100510597229, + "learning_rate": 5e-05, + "epoch": 0.5087404751232631, + "step": 7945 + }, + { + "loss": 2.2278, + "grad_norm": 1.6846513748168945, + "learning_rate": 5e-05, + "epoch": 0.5090606390471921, + "step": 7950 + }, + { + "loss": 2.1958, + "grad_norm": 1.769995093345642, + "learning_rate": 5e-05, + "epoch": 0.5093808029711212, + "step": 7955 + }, + { + "loss": 2.1942, + "grad_norm": 1.7381685972213745, + "learning_rate": 5e-05, + "epoch": 0.5097009668950503, + "step": 7960 + }, + { + "loss": 2.2312, + "grad_norm": 1.723332166671753, + "learning_rate": 5e-05, + "epoch": 0.5100211308189793, + "step": 7965 + }, + { + "loss": 2.2185, + "grad_norm": 1.7073355913162231, + "learning_rate": 5e-05, + "epoch": 0.5103412947429083, + "step": 7970 + }, + { + "loss": 2.2389, + "grad_norm": 1.625958800315857, + "learning_rate": 5e-05, + "epoch": 0.5106614586668374, + "step": 7975 + }, + { + "loss": 2.2267, + "grad_norm": 1.673528790473938, + "learning_rate": 5e-05, + "epoch": 0.5109816225907665, + "step": 7980 + }, + { + "loss": 2.224, + "grad_norm": 1.6753426790237427, + "learning_rate": 5e-05, + "epoch": 0.5113017865146955, + "step": 7985 + }, + { + "loss": 2.2299, + "grad_norm": 1.6783485412597656, + "learning_rate": 5e-05, + "epoch": 0.5116219504386246, + "step": 7990 + }, + { + "loss": 2.2078, + "grad_norm": 1.6852103471755981, + "learning_rate": 5e-05, + "epoch": 0.5119421143625537, + "step": 7995 + }, + { + "loss": 2.2307, + "grad_norm": 1.7385365962982178, + "learning_rate": 5e-05, + "epoch": 0.5122622782864826, + "step": 8000 + }, + { + "eval_loss": 2.0892796516418457, + "eval_runtime": 9.339, + "eval_samples_per_second": 219.294, + "eval_steps_per_second": 27.412, + "epoch": 0.5122622782864826, + "step": 8000 + }, + { + "loss": 2.2291, + "grad_norm": 1.5970470905303955, + "learning_rate": 5e-05, + "epoch": 0.5125824422104117, + "step": 8005 + }, + { + "loss": 2.2039, + "grad_norm": 1.7087442874908447, + "learning_rate": 5e-05, + "epoch": 0.5129026061343408, + "step": 8010 + }, + { + "loss": 2.204, + "grad_norm": 1.634369969367981, + "learning_rate": 5e-05, + "epoch": 0.5132227700582699, + "step": 8015 + }, + { + "loss": 2.2052, + "grad_norm": 1.6249363422393799, + "learning_rate": 5e-05, + "epoch": 0.5135429339821989, + "step": 8020 + }, + { + "loss": 2.227, + "grad_norm": 1.75498366355896, + "learning_rate": 5e-05, + "epoch": 0.5138630979061279, + "step": 8025 + }, + { + "loss": 2.229, + "grad_norm": 1.7273918390274048, + "learning_rate": 5e-05, + "epoch": 0.514183261830057, + "step": 8030 + }, + { + "loss": 2.2165, + "grad_norm": 1.7545193433761597, + "learning_rate": 5e-05, + "epoch": 0.514503425753986, + "step": 8035 + }, + { + "loss": 2.224, + "grad_norm": 1.73219895362854, + "learning_rate": 5e-05, + "epoch": 0.5148235896779151, + "step": 8040 + }, + { + "loss": 2.2239, + "grad_norm": 1.7364838123321533, + "learning_rate": 5e-05, + "epoch": 0.5151437536018442, + "step": 8045 + }, + { + "loss": 2.2069, + "grad_norm": 1.626757025718689, + "learning_rate": 5e-05, + "epoch": 0.5154639175257731, + "step": 8050 + }, + { + "loss": 2.1969, + "grad_norm": 1.7197450399398804, + "learning_rate": 5e-05, + "epoch": 0.5157840814497022, + "step": 8055 + }, + { + "loss": 2.1923, + "grad_norm": 1.7638471126556396, + "learning_rate": 5e-05, + "epoch": 0.5161042453736313, + "step": 8060 + }, + { + "loss": 2.218, + "grad_norm": 1.6772651672363281, + "learning_rate": 5e-05, + "epoch": 0.5164244092975604, + "step": 8065 + }, + { + "loss": 2.2105, + "grad_norm": 1.707062005996704, + "learning_rate": 5e-05, + "epoch": 0.5167445732214894, + "step": 8070 + }, + { + "loss": 2.235, + "grad_norm": 1.679762601852417, + "learning_rate": 5e-05, + "epoch": 0.5170647371454185, + "step": 8075 + }, + { + "loss": 2.2301, + "grad_norm": 1.6003955602645874, + "learning_rate": 5e-05, + "epoch": 0.5173849010693475, + "step": 8080 + }, + { + "loss": 2.2387, + "grad_norm": 1.7114651203155518, + "learning_rate": 5e-05, + "epoch": 0.5177050649932765, + "step": 8085 + }, + { + "loss": 2.1936, + "grad_norm": 1.6603319644927979, + "learning_rate": 5e-05, + "epoch": 0.5180252289172056, + "step": 8090 + }, + { + "loss": 2.2455, + "grad_norm": 1.806725263595581, + "learning_rate": 5e-05, + "epoch": 0.5183453928411347, + "step": 8095 + }, + { + "loss": 2.2437, + "grad_norm": 1.642220377922058, + "learning_rate": 5e-05, + "epoch": 0.5186655567650638, + "step": 8100 + }, + { + "loss": 2.2011, + "grad_norm": 1.641445279121399, + "learning_rate": 5e-05, + "epoch": 0.5189857206889927, + "step": 8105 + }, + { + "loss": 2.2033, + "grad_norm": 1.741835594177246, + "learning_rate": 5e-05, + "epoch": 0.5193058846129218, + "step": 8110 + }, + { + "loss": 2.2164, + "grad_norm": 1.6442292928695679, + "learning_rate": 5e-05, + "epoch": 0.5196260485368509, + "step": 8115 + }, + { + "loss": 2.1894, + "grad_norm": 1.6900848150253296, + "learning_rate": 5e-05, + "epoch": 0.5199462124607799, + "step": 8120 + }, + { + "loss": 2.1935, + "grad_norm": 1.69651198387146, + "learning_rate": 5e-05, + "epoch": 0.520266376384709, + "step": 8125 + }, + { + "loss": 2.2244, + "grad_norm": 1.6706749200820923, + "learning_rate": 5e-05, + "epoch": 0.520586540308638, + "step": 8130 + }, + { + "loss": 2.2044, + "grad_norm": 1.7208715677261353, + "learning_rate": 5e-05, + "epoch": 0.520906704232567, + "step": 8135 + }, + { + "loss": 2.2312, + "grad_norm": 1.554226040840149, + "learning_rate": 5e-05, + "epoch": 0.5212268681564961, + "step": 8140 + }, + { + "loss": 2.2347, + "grad_norm": 1.7291901111602783, + "learning_rate": 5e-05, + "epoch": 0.5215470320804252, + "step": 8145 + }, + { + "loss": 2.2104, + "grad_norm": 1.6915407180786133, + "learning_rate": 5e-05, + "epoch": 0.5218671960043543, + "step": 8150 + }, + { + "loss": 2.1875, + "grad_norm": 1.6418696641921997, + "learning_rate": 5e-05, + "epoch": 0.5221873599282832, + "step": 8155 + }, + { + "loss": 2.2015, + "grad_norm": 1.656497597694397, + "learning_rate": 5e-05, + "epoch": 0.5225075238522123, + "step": 8160 + }, + { + "loss": 2.2165, + "grad_norm": 1.78023362159729, + "learning_rate": 5e-05, + "epoch": 0.5228276877761414, + "step": 8165 + }, + { + "loss": 2.2254, + "grad_norm": 1.690218448638916, + "learning_rate": 5e-05, + "epoch": 0.5231478517000704, + "step": 8170 + }, + { + "loss": 2.2104, + "grad_norm": 1.6391229629516602, + "learning_rate": 5e-05, + "epoch": 0.5234680156239995, + "step": 8175 + }, + { + "loss": 2.194, + "grad_norm": 1.6620936393737793, + "learning_rate": 5e-05, + "epoch": 0.5237881795479286, + "step": 8180 + }, + { + "loss": 2.1981, + "grad_norm": 1.6374820470809937, + "learning_rate": 5e-05, + "epoch": 0.5241083434718576, + "step": 8185 + }, + { + "loss": 2.2037, + "grad_norm": 1.6494983434677124, + "learning_rate": 5e-05, + "epoch": 0.5244285073957866, + "step": 8190 + }, + { + "loss": 2.2126, + "grad_norm": 1.6629283428192139, + "learning_rate": 5e-05, + "epoch": 0.5247486713197157, + "step": 8195 + }, + { + "loss": 2.1914, + "grad_norm": 1.7156426906585693, + "learning_rate": 5e-05, + "epoch": 0.5250688352436448, + "step": 8200 + }, + { + "eval_loss": 2.0739922523498535, + "eval_runtime": 9.5483, + "eval_samples_per_second": 214.489, + "eval_steps_per_second": 26.811, + "epoch": 0.5250688352436448, + "step": 8200 + }, + { + "loss": 2.2416, + "grad_norm": 1.7021827697753906, + "learning_rate": 5e-05, + "epoch": 0.5253889991675738, + "step": 8205 + }, + { + "loss": 2.1897, + "grad_norm": 1.7273354530334473, + "learning_rate": 5e-05, + "epoch": 0.5257091630915028, + "step": 8210 + }, + { + "loss": 2.2093, + "grad_norm": 1.6649446487426758, + "learning_rate": 5e-05, + "epoch": 0.5260293270154319, + "step": 8215 + }, + { + "loss": 2.2231, + "grad_norm": 1.7139078378677368, + "learning_rate": 5e-05, + "epoch": 0.5263494909393609, + "step": 8220 + }, + { + "loss": 2.212, + "grad_norm": 1.7619116306304932, + "learning_rate": 5e-05, + "epoch": 0.52666965486329, + "step": 8225 + }, + { + "loss": 2.2043, + "grad_norm": 1.537295937538147, + "learning_rate": 5e-05, + "epoch": 0.5269898187872191, + "step": 8230 + }, + { + "loss": 2.1928, + "grad_norm": 1.6285382509231567, + "learning_rate": 5e-05, + "epoch": 0.5273099827111482, + "step": 8235 + }, + { + "loss": 2.217, + "grad_norm": 1.673884630203247, + "learning_rate": 5e-05, + "epoch": 0.5276301466350771, + "step": 8240 + }, + { + "loss": 2.2091, + "grad_norm": 1.7069264650344849, + "learning_rate": 5e-05, + "epoch": 0.5279503105590062, + "step": 8245 + }, + { + "loss": 2.2122, + "grad_norm": 1.6686755418777466, + "learning_rate": 5e-05, + "epoch": 0.5282704744829353, + "step": 8250 + }, + { + "loss": 2.2098, + "grad_norm": 1.7338638305664062, + "learning_rate": 5e-05, + "epoch": 0.5285906384068643, + "step": 8255 + }, + { + "loss": 2.1981, + "grad_norm": 1.7347779273986816, + "learning_rate": 5e-05, + "epoch": 0.5289108023307934, + "step": 8260 + }, + { + "loss": 2.2123, + "grad_norm": 1.6788371801376343, + "learning_rate": 5e-05, + "epoch": 0.5292309662547224, + "step": 8265 + }, + { + "loss": 2.2125, + "grad_norm": 1.7118918895721436, + "learning_rate": 5e-05, + "epoch": 0.5295511301786515, + "step": 8270 + }, + { + "loss": 2.2498, + "grad_norm": 1.6705480813980103, + "learning_rate": 5e-05, + "epoch": 0.5298712941025805, + "step": 8275 + }, + { + "loss": 2.2, + "grad_norm": 1.6503225564956665, + "learning_rate": 5e-05, + "epoch": 0.5301914580265096, + "step": 8280 + }, + { + "loss": 2.2383, + "grad_norm": 1.7550666332244873, + "learning_rate": 5e-05, + "epoch": 0.5305116219504387, + "step": 8285 + }, + { + "loss": 2.1962, + "grad_norm": 1.9004778861999512, + "learning_rate": 5e-05, + "epoch": 0.5308317858743676, + "step": 8290 + }, + { + "loss": 2.19, + "grad_norm": 1.6682841777801514, + "learning_rate": 5e-05, + "epoch": 0.5311519497982967, + "step": 8295 + }, + { + "loss": 2.2168, + "grad_norm": 1.801561951637268, + "learning_rate": 5e-05, + "epoch": 0.5314721137222258, + "step": 8300 + }, + { + "loss": 2.1998, + "grad_norm": 1.621793270111084, + "learning_rate": 5e-05, + "epoch": 0.5317922776461548, + "step": 8305 + }, + { + "loss": 2.2359, + "grad_norm": 1.597138524055481, + "learning_rate": 5e-05, + "epoch": 0.5321124415700839, + "step": 8310 + }, + { + "loss": 2.2064, + "grad_norm": 1.5918503999710083, + "learning_rate": 5e-05, + "epoch": 0.532432605494013, + "step": 8315 + }, + { + "loss": 2.2203, + "grad_norm": 1.6550569534301758, + "learning_rate": 5e-05, + "epoch": 0.532752769417942, + "step": 8320 + }, + { + "loss": 2.2105, + "grad_norm": 1.5995575189590454, + "learning_rate": 5e-05, + "epoch": 0.533072933341871, + "step": 8325 + }, + { + "loss": 2.1901, + "grad_norm": 1.7386360168457031, + "learning_rate": 5e-05, + "epoch": 0.5333930972658001, + "step": 8330 + }, + { + "loss": 2.2097, + "grad_norm": 1.672532320022583, + "learning_rate": 5e-05, + "epoch": 0.5337132611897292, + "step": 8335 + }, + { + "loss": 2.2103, + "grad_norm": 1.6559290885925293, + "learning_rate": 5e-05, + "epoch": 0.5340334251136581, + "step": 8340 + }, + { + "loss": 2.1924, + "grad_norm": 1.7685126066207886, + "learning_rate": 5e-05, + "epoch": 0.5343535890375872, + "step": 8345 + }, + { + "loss": 2.2295, + "grad_norm": 1.6063467264175415, + "learning_rate": 5e-05, + "epoch": 0.5346737529615163, + "step": 8350 + }, + { + "loss": 2.21, + "grad_norm": 1.6099108457565308, + "learning_rate": 5e-05, + "epoch": 0.5349939168854454, + "step": 8355 + }, + { + "loss": 2.2266, + "grad_norm": 1.751085877418518, + "learning_rate": 5e-05, + "epoch": 0.5353140808093744, + "step": 8360 + }, + { + "loss": 2.229, + "grad_norm": 1.7449450492858887, + "learning_rate": 5e-05, + "epoch": 0.5356342447333035, + "step": 8365 + }, + { + "loss": 2.2268, + "grad_norm": 1.6309188604354858, + "learning_rate": 5e-05, + "epoch": 0.5359544086572325, + "step": 8370 + }, + { + "loss": 2.1899, + "grad_norm": 1.632546305656433, + "learning_rate": 5e-05, + "epoch": 0.5362745725811615, + "step": 8375 + }, + { + "loss": 2.2096, + "grad_norm": 1.6440316438674927, + "learning_rate": 5e-05, + "epoch": 0.5365947365050906, + "step": 8380 + }, + { + "loss": 2.2389, + "grad_norm": 1.7283806800842285, + "learning_rate": 5e-05, + "epoch": 0.5369149004290197, + "step": 8385 + }, + { + "loss": 2.2445, + "grad_norm": 1.6345056295394897, + "learning_rate": 5e-05, + "epoch": 0.5372350643529487, + "step": 8390 + }, + { + "loss": 2.1955, + "grad_norm": 1.7141886949539185, + "learning_rate": 5e-05, + "epoch": 0.5375552282768777, + "step": 8395 + }, + { + "loss": 2.2052, + "grad_norm": 1.652571678161621, + "learning_rate": 5e-05, + "epoch": 0.5378753922008068, + "step": 8400 + }, + { + "eval_loss": 2.0771679878234863, + "eval_runtime": 9.1156, + "eval_samples_per_second": 224.67, + "eval_steps_per_second": 28.084, + "epoch": 0.5378753922008068, + "step": 8400 + }, + { + "loss": 2.2219, + "grad_norm": 1.7584346532821655, + "learning_rate": 5e-05, + "epoch": 0.5381955561247359, + "step": 8405 + }, + { + "loss": 2.2288, + "grad_norm": 1.6449029445648193, + "learning_rate": 5e-05, + "epoch": 0.5385157200486649, + "step": 8410 + }, + { + "loss": 2.2211, + "grad_norm": 1.7171015739440918, + "learning_rate": 5e-05, + "epoch": 0.538835883972594, + "step": 8415 + }, + { + "loss": 2.2383, + "grad_norm": 1.8249092102050781, + "learning_rate": 5e-05, + "epoch": 0.539156047896523, + "step": 8420 + }, + { + "loss": 2.2288, + "grad_norm": 1.7422677278518677, + "learning_rate": 5e-05, + "epoch": 0.539476211820452, + "step": 8425 + }, + { + "loss": 2.2267, + "grad_norm": 1.7028205394744873, + "learning_rate": 5e-05, + "epoch": 0.5397963757443811, + "step": 8430 + }, + { + "loss": 2.2037, + "grad_norm": 1.6526613235473633, + "learning_rate": 5e-05, + "epoch": 0.5401165396683102, + "step": 8435 + }, + { + "loss": 2.2046, + "grad_norm": 1.6816647052764893, + "learning_rate": 5e-05, + "epoch": 0.5404367035922393, + "step": 8440 + }, + { + "loss": 2.2139, + "grad_norm": 1.7449307441711426, + "learning_rate": 5e-05, + "epoch": 0.5407568675161682, + "step": 8445 + }, + { + "loss": 2.2185, + "grad_norm": 1.6552678346633911, + "learning_rate": 5e-05, + "epoch": 0.5410770314400973, + "step": 8450 + }, + { + "loss": 2.2082, + "grad_norm": 1.6898199319839478, + "learning_rate": 5e-05, + "epoch": 0.5413971953640264, + "step": 8455 + }, + { + "loss": 2.2229, + "grad_norm": 1.6607279777526855, + "learning_rate": 5e-05, + "epoch": 0.5417173592879554, + "step": 8460 + }, + { + "loss": 2.2008, + "grad_norm": 1.650549054145813, + "learning_rate": 5e-05, + "epoch": 0.5420375232118845, + "step": 8465 + }, + { + "loss": 2.1981, + "grad_norm": 1.6187115907669067, + "learning_rate": 5e-05, + "epoch": 0.5423576871358136, + "step": 8470 + }, + { + "loss": 2.1946, + "grad_norm": 1.6210685968399048, + "learning_rate": 5e-05, + "epoch": 0.5426778510597425, + "step": 8475 + }, + { + "loss": 2.1881, + "grad_norm": 1.6523209810256958, + "learning_rate": 5e-05, + "epoch": 0.5429980149836716, + "step": 8480 + }, + { + "loss": 2.1864, + "grad_norm": 1.6581562757492065, + "learning_rate": 5e-05, + "epoch": 0.5433181789076007, + "step": 8485 + }, + { + "loss": 2.223, + "grad_norm": 1.628360629081726, + "learning_rate": 5e-05, + "epoch": 0.5436383428315298, + "step": 8490 + }, + { + "loss": 2.1894, + "grad_norm": 1.696500301361084, + "learning_rate": 5e-05, + "epoch": 0.5439585067554588, + "step": 8495 + }, + { + "loss": 2.2349, + "grad_norm": 1.7601076364517212, + "learning_rate": 5e-05, + "epoch": 0.5442786706793878, + "step": 8500 + }, + { + "loss": 2.2428, + "grad_norm": 1.718100666999817, + "learning_rate": 5e-05, + "epoch": 0.5445988346033169, + "step": 8505 + }, + { + "loss": 2.2134, + "grad_norm": 1.6576154232025146, + "learning_rate": 5e-05, + "epoch": 0.5449189985272459, + "step": 8510 + }, + { + "loss": 2.1908, + "grad_norm": 1.724191665649414, + "learning_rate": 5e-05, + "epoch": 0.545239162451175, + "step": 8515 + }, + { + "loss": 2.2296, + "grad_norm": 1.724888801574707, + "learning_rate": 5e-05, + "epoch": 0.5455593263751041, + "step": 8520 + }, + { + "loss": 2.211, + "grad_norm": 1.753718614578247, + "learning_rate": 5e-05, + "epoch": 0.5458794902990332, + "step": 8525 + }, + { + "loss": 2.1835, + "grad_norm": 1.7066841125488281, + "learning_rate": 5e-05, + "epoch": 0.5461996542229621, + "step": 8530 + }, + { + "loss": 2.2209, + "grad_norm": 1.7294259071350098, + "learning_rate": 5e-05, + "epoch": 0.5465198181468912, + "step": 8535 + }, + { + "loss": 2.1839, + "grad_norm": 1.827268123626709, + "learning_rate": 5e-05, + "epoch": 0.5468399820708203, + "step": 8540 + }, + { + "loss": 2.2227, + "grad_norm": 1.6551259756088257, + "learning_rate": 5e-05, + "epoch": 0.5471601459947493, + "step": 8545 + }, + { + "loss": 2.2131, + "grad_norm": 1.7133010625839233, + "learning_rate": 5e-05, + "epoch": 0.5474803099186784, + "step": 8550 + }, + { + "loss": 2.2248, + "grad_norm": 1.6199853420257568, + "learning_rate": 5e-05, + "epoch": 0.5478004738426074, + "step": 8555 + }, + { + "loss": 2.1953, + "grad_norm": 1.6595536470413208, + "learning_rate": 5e-05, + "epoch": 0.5481206377665364, + "step": 8560 + }, + { + "loss": 2.2208, + "grad_norm": 1.6121689081192017, + "learning_rate": 5e-05, + "epoch": 0.5484408016904655, + "step": 8565 + }, + { + "loss": 2.2003, + "grad_norm": 1.670493721961975, + "learning_rate": 5e-05, + "epoch": 0.5487609656143946, + "step": 8570 + }, + { + "loss": 2.2055, + "grad_norm": 1.6377112865447998, + "learning_rate": 5e-05, + "epoch": 0.5490811295383237, + "step": 8575 + }, + { + "loss": 2.1795, + "grad_norm": 1.755138635635376, + "learning_rate": 5e-05, + "epoch": 0.5494012934622526, + "step": 8580 + }, + { + "loss": 2.2184, + "grad_norm": 1.746940016746521, + "learning_rate": 5e-05, + "epoch": 0.5497214573861817, + "step": 8585 + }, + { + "loss": 2.1888, + "grad_norm": 1.7026208639144897, + "learning_rate": 5e-05, + "epoch": 0.5500416213101108, + "step": 8590 + }, + { + "loss": 2.1954, + "grad_norm": 1.622263789176941, + "learning_rate": 5e-05, + "epoch": 0.5503617852340398, + "step": 8595 + }, + { + "loss": 2.1978, + "grad_norm": 1.7794569730758667, + "learning_rate": 5e-05, + "epoch": 0.5506819491579689, + "step": 8600 + }, + { + "eval_loss": 2.068195343017578, + "eval_runtime": 8.9579, + "eval_samples_per_second": 228.625, + "eval_steps_per_second": 28.578, + "epoch": 0.5506819491579689, + "step": 8600 + }, + { + "loss": 2.1963, + "grad_norm": 1.67849862575531, + "learning_rate": 5e-05, + "epoch": 0.551002113081898, + "step": 8605 + }, + { + "loss": 2.2099, + "grad_norm": 1.6759015321731567, + "learning_rate": 5e-05, + "epoch": 0.551322277005827, + "step": 8610 + }, + { + "loss": 2.2053, + "grad_norm": 1.6613909006118774, + "learning_rate": 5e-05, + "epoch": 0.551642440929756, + "step": 8615 + }, + { + "loss": 2.2083, + "grad_norm": 1.719401478767395, + "learning_rate": 5e-05, + "epoch": 0.5519626048536851, + "step": 8620 + }, + { + "loss": 2.1983, + "grad_norm": 1.6846554279327393, + "learning_rate": 5e-05, + "epoch": 0.5522827687776142, + "step": 8625 + }, + { + "loss": 2.1777, + "grad_norm": 1.585060715675354, + "learning_rate": 5e-05, + "epoch": 0.5526029327015431, + "step": 8630 + }, + { + "loss": 2.2142, + "grad_norm": 1.5889817476272583, + "learning_rate": 5e-05, + "epoch": 0.5529230966254722, + "step": 8635 + }, + { + "loss": 2.2204, + "grad_norm": 1.8110606670379639, + "learning_rate": 5e-05, + "epoch": 0.5532432605494013, + "step": 8640 + }, + { + "loss": 2.212, + "grad_norm": 1.6412723064422607, + "learning_rate": 5e-05, + "epoch": 0.5535634244733303, + "step": 8645 + }, + { + "loss": 2.2182, + "grad_norm": 1.7718569040298462, + "learning_rate": 5e-05, + "epoch": 0.5538835883972594, + "step": 8650 + }, + { + "loss": 2.193, + "grad_norm": 1.6897401809692383, + "learning_rate": 5e-05, + "epoch": 0.5542037523211885, + "step": 8655 + }, + { + "loss": 2.1971, + "grad_norm": 1.5787633657455444, + "learning_rate": 5e-05, + "epoch": 0.5545239162451175, + "step": 8660 + }, + { + "loss": 2.1995, + "grad_norm": 1.791604995727539, + "learning_rate": 5e-05, + "epoch": 0.5548440801690465, + "step": 8665 + }, + { + "loss": 2.242, + "grad_norm": 1.6345185041427612, + "learning_rate": 5e-05, + "epoch": 0.5551642440929756, + "step": 8670 + }, + { + "loss": 2.2313, + "grad_norm": 1.6359039545059204, + "learning_rate": 5e-05, + "epoch": 0.5554844080169047, + "step": 8675 + }, + { + "loss": 2.2167, + "grad_norm": 1.5969913005828857, + "learning_rate": 5e-05, + "epoch": 0.5558045719408337, + "step": 8680 + }, + { + "loss": 2.1846, + "grad_norm": 1.6926162242889404, + "learning_rate": 5e-05, + "epoch": 0.5561247358647627, + "step": 8685 + }, + { + "loss": 2.2072, + "grad_norm": 1.5990930795669556, + "learning_rate": 5e-05, + "epoch": 0.5564448997886918, + "step": 8690 + }, + { + "loss": 2.2057, + "grad_norm": 1.6216379404067993, + "learning_rate": 5e-05, + "epoch": 0.5567650637126209, + "step": 8695 + }, + { + "loss": 2.1902, + "grad_norm": 1.7529386281967163, + "learning_rate": 5e-05, + "epoch": 0.5570852276365499, + "step": 8700 + }, + { + "loss": 2.2107, + "grad_norm": 1.7439981698989868, + "learning_rate": 5e-05, + "epoch": 0.557405391560479, + "step": 8705 + }, + { + "loss": 2.2051, + "grad_norm": 1.6986141204833984, + "learning_rate": 5e-05, + "epoch": 0.557725555484408, + "step": 8710 + }, + { + "loss": 2.2122, + "grad_norm": 1.678336501121521, + "learning_rate": 5e-05, + "epoch": 0.558045719408337, + "step": 8715 + }, + { + "loss": 2.2034, + "grad_norm": 1.6236997842788696, + "learning_rate": 5e-05, + "epoch": 0.5583658833322661, + "step": 8720 + }, + { + "loss": 2.2108, + "grad_norm": 1.568988561630249, + "learning_rate": 5e-05, + "epoch": 0.5586860472561952, + "step": 8725 + }, + { + "loss": 2.1891, + "grad_norm": 1.6444505453109741, + "learning_rate": 5e-05, + "epoch": 0.5590062111801242, + "step": 8730 + }, + { + "loss": 2.2248, + "grad_norm": 1.6724077463150024, + "learning_rate": 5e-05, + "epoch": 0.5593263751040533, + "step": 8735 + }, + { + "loss": 2.214, + "grad_norm": 1.6417819261550903, + "learning_rate": 5e-05, + "epoch": 0.5596465390279823, + "step": 8740 + }, + { + "loss": 2.1809, + "grad_norm": 1.6727244853973389, + "learning_rate": 5e-05, + "epoch": 0.5599667029519114, + "step": 8745 + }, + { + "loss": 2.2277, + "grad_norm": 1.6233677864074707, + "learning_rate": 5e-05, + "epoch": 0.5602868668758404, + "step": 8750 + }, + { + "loss": 2.2074, + "grad_norm": 1.6876188516616821, + "learning_rate": 5e-05, + "epoch": 0.5606070307997695, + "step": 8755 + }, + { + "loss": 2.2446, + "grad_norm": 1.6457571983337402, + "learning_rate": 5e-05, + "epoch": 0.5609271947236986, + "step": 8760 + }, + { + "loss": 2.2219, + "grad_norm": 1.6713467836380005, + "learning_rate": 5e-05, + "epoch": 0.5612473586476275, + "step": 8765 + }, + { + "loss": 2.1961, + "grad_norm": 1.6506388187408447, + "learning_rate": 5e-05, + "epoch": 0.5615675225715566, + "step": 8770 + }, + { + "loss": 2.2156, + "grad_norm": 1.7466049194335938, + "learning_rate": 5e-05, + "epoch": 0.5618876864954857, + "step": 8775 + }, + { + "loss": 2.2071, + "grad_norm": 1.702660322189331, + "learning_rate": 5e-05, + "epoch": 0.5622078504194148, + "step": 8780 + }, + { + "loss": 2.1891, + "grad_norm": 1.733842134475708, + "learning_rate": 5e-05, + "epoch": 0.5625280143433438, + "step": 8785 + }, + { + "loss": 2.1982, + "grad_norm": 1.6536738872528076, + "learning_rate": 5e-05, + "epoch": 0.5628481782672728, + "step": 8790 + }, + { + "loss": 2.2137, + "grad_norm": 1.6220111846923828, + "learning_rate": 5e-05, + "epoch": 0.5631683421912019, + "step": 8795 + }, + { + "loss": 2.2096, + "grad_norm": 1.7264735698699951, + "learning_rate": 5e-05, + "epoch": 0.5634885061151309, + "step": 8800 + }, + { + "eval_loss": 2.054811477661133, + "eval_runtime": 9.6124, + "eval_samples_per_second": 213.059, + "eval_steps_per_second": 26.632, + "epoch": 0.5634885061151309, + "step": 8800 + }, + { + "loss": 2.207, + "grad_norm": 1.7159490585327148, + "learning_rate": 5e-05, + "epoch": 0.56380867003906, + "step": 8805 + }, + { + "loss": 2.2107, + "grad_norm": 1.6418389081954956, + "learning_rate": 5e-05, + "epoch": 0.5641288339629891, + "step": 8810 + }, + { + "loss": 2.2117, + "grad_norm": 1.6357439756393433, + "learning_rate": 5e-05, + "epoch": 0.564448997886918, + "step": 8815 + }, + { + "loss": 2.2005, + "grad_norm": 1.6727110147476196, + "learning_rate": 5e-05, + "epoch": 0.5647691618108471, + "step": 8820 + }, + { + "loss": 2.209, + "grad_norm": 1.7824372053146362, + "learning_rate": 5e-05, + "epoch": 0.5650893257347762, + "step": 8825 + }, + { + "loss": 2.2069, + "grad_norm": 1.6073055267333984, + "learning_rate": 5e-05, + "epoch": 0.5654094896587053, + "step": 8830 + }, + { + "loss": 2.2006, + "grad_norm": 1.6197354793548584, + "learning_rate": 5e-05, + "epoch": 0.5657296535826343, + "step": 8835 + }, + { + "loss": 2.187, + "grad_norm": 1.6128679513931274, + "learning_rate": 5e-05, + "epoch": 0.5660498175065634, + "step": 8840 + }, + { + "loss": 2.186, + "grad_norm": 1.6642301082611084, + "learning_rate": 5e-05, + "epoch": 0.5663699814304924, + "step": 8845 + }, + { + "loss": 2.2474, + "grad_norm": 1.6685996055603027, + "learning_rate": 5e-05, + "epoch": 0.5666901453544214, + "step": 8850 + }, + { + "loss": 2.2148, + "grad_norm": 1.7215569019317627, + "learning_rate": 5e-05, + "epoch": 0.5670103092783505, + "step": 8855 + }, + { + "loss": 2.2341, + "grad_norm": 1.8265643119812012, + "learning_rate": 5e-05, + "epoch": 0.5673304732022796, + "step": 8860 + }, + { + "loss": 2.1853, + "grad_norm": 1.7127398252487183, + "learning_rate": 5e-05, + "epoch": 0.5676506371262087, + "step": 8865 + }, + { + "loss": 2.1994, + "grad_norm": 1.6666933298110962, + "learning_rate": 5e-05, + "epoch": 0.5679708010501376, + "step": 8870 + }, + { + "loss": 2.1971, + "grad_norm": 1.6345758438110352, + "learning_rate": 5e-05, + "epoch": 0.5682909649740667, + "step": 8875 + }, + { + "loss": 2.1836, + "grad_norm": 1.6167995929718018, + "learning_rate": 5e-05, + "epoch": 0.5686111288979958, + "step": 8880 + }, + { + "loss": 2.1954, + "grad_norm": 1.57766854763031, + "learning_rate": 5e-05, + "epoch": 0.5689312928219248, + "step": 8885 + }, + { + "loss": 2.2106, + "grad_norm": 1.6215426921844482, + "learning_rate": 5e-05, + "epoch": 0.5692514567458539, + "step": 8890 + }, + { + "loss": 2.1891, + "grad_norm": 1.6707900762557983, + "learning_rate": 5e-05, + "epoch": 0.569571620669783, + "step": 8895 + }, + { + "loss": 2.1936, + "grad_norm": 1.6593118906021118, + "learning_rate": 5e-05, + "epoch": 0.5698917845937119, + "step": 8900 + }, + { + "loss": 2.2129, + "grad_norm": 1.666658639907837, + "learning_rate": 5e-05, + "epoch": 0.570211948517641, + "step": 8905 + }, + { + "loss": 2.193, + "grad_norm": 1.680935025215149, + "learning_rate": 5e-05, + "epoch": 0.5705321124415701, + "step": 8910 + }, + { + "loss": 2.2007, + "grad_norm": 1.6215225458145142, + "learning_rate": 5e-05, + "epoch": 0.5708522763654992, + "step": 8915 + }, + { + "loss": 2.2161, + "grad_norm": 1.716860294342041, + "learning_rate": 5e-05, + "epoch": 0.5711724402894282, + "step": 8920 + }, + { + "loss": 2.211, + "grad_norm": 1.6454654932022095, + "learning_rate": 5e-05, + "epoch": 0.5714926042133572, + "step": 8925 + }, + { + "loss": 2.1803, + "grad_norm": 1.6381767988204956, + "learning_rate": 5e-05, + "epoch": 0.5718127681372863, + "step": 8930 + }, + { + "loss": 2.2293, + "grad_norm": 1.6502015590667725, + "learning_rate": 5e-05, + "epoch": 0.5721329320612153, + "step": 8935 + }, + { + "loss": 2.2001, + "grad_norm": 1.691019892692566, + "learning_rate": 5e-05, + "epoch": 0.5724530959851444, + "step": 8940 + }, + { + "loss": 2.1875, + "grad_norm": 1.6835181713104248, + "learning_rate": 5e-05, + "epoch": 0.5727732599090735, + "step": 8945 + }, + { + "loss": 2.2026, + "grad_norm": 1.6180915832519531, + "learning_rate": 5e-05, + "epoch": 0.5730934238330025, + "step": 8950 + }, + { + "loss": 2.2328, + "grad_norm": 1.57301926612854, + "learning_rate": 5e-05, + "epoch": 0.5734135877569315, + "step": 8955 + }, + { + "loss": 2.2343, + "grad_norm": 1.712069034576416, + "learning_rate": 5e-05, + "epoch": 0.5737337516808606, + "step": 8960 + }, + { + "loss": 2.189, + "grad_norm": 1.712024211883545, + "learning_rate": 5e-05, + "epoch": 0.5740539156047897, + "step": 8965 + }, + { + "loss": 2.1816, + "grad_norm": 1.7028721570968628, + "learning_rate": 5e-05, + "epoch": 0.5743740795287187, + "step": 8970 + }, + { + "loss": 2.1809, + "grad_norm": 1.7358742952346802, + "learning_rate": 5e-05, + "epoch": 0.5746942434526477, + "step": 8975 + }, + { + "loss": 2.2039, + "grad_norm": 1.7282836437225342, + "learning_rate": 5e-05, + "epoch": 0.5750144073765768, + "step": 8980 + }, + { + "loss": 2.2086, + "grad_norm": 1.6507220268249512, + "learning_rate": 5e-05, + "epoch": 0.5753345713005058, + "step": 8985 + }, + { + "loss": 2.1958, + "grad_norm": 1.7284539937973022, + "learning_rate": 5e-05, + "epoch": 0.5756547352244349, + "step": 8990 + }, + { + "loss": 2.1792, + "grad_norm": 1.5841361284255981, + "learning_rate": 5e-05, + "epoch": 0.575974899148364, + "step": 8995 + }, + { + "loss": 2.1905, + "grad_norm": 1.7276769876480103, + "learning_rate": 5e-05, + "epoch": 0.5762950630722931, + "step": 9000 + }, + { + "eval_loss": 2.0714640617370605, + "eval_runtime": 9.0714, + "eval_samples_per_second": 225.763, + "eval_steps_per_second": 28.22, + "epoch": 0.5762950630722931, + "step": 9000 + }, + { + "loss": 2.217, + "grad_norm": 1.657918095588684, + "learning_rate": 5e-05, + "epoch": 0.576615226996222, + "step": 9005 + }, + { + "loss": 2.2101, + "grad_norm": 1.6392052173614502, + "learning_rate": 5e-05, + "epoch": 0.5769353909201511, + "step": 9010 + }, + { + "loss": 2.1809, + "grad_norm": 1.600339412689209, + "learning_rate": 5e-05, + "epoch": 0.5772555548440802, + "step": 9015 + }, + { + "loss": 2.1968, + "grad_norm": 1.6771150827407837, + "learning_rate": 5e-05, + "epoch": 0.5775757187680092, + "step": 9020 + }, + { + "loss": 2.2139, + "grad_norm": 1.615399718284607, + "learning_rate": 5e-05, + "epoch": 0.5778958826919383, + "step": 9025 + }, + { + "loss": 2.1459, + "grad_norm": 1.6415457725524902, + "learning_rate": 5e-05, + "epoch": 0.5782160466158673, + "step": 9030 + }, + { + "loss": 2.2141, + "grad_norm": 1.626848816871643, + "learning_rate": 5e-05, + "epoch": 0.5785362105397964, + "step": 9035 + }, + { + "loss": 2.1816, + "grad_norm": 1.6501692533493042, + "learning_rate": 5e-05, + "epoch": 0.5788563744637254, + "step": 9040 + }, + { + "loss": 2.2044, + "grad_norm": 1.6761474609375, + "learning_rate": 5e-05, + "epoch": 0.5791765383876545, + "step": 9045 + }, + { + "loss": 2.2251, + "grad_norm": 1.6290276050567627, + "learning_rate": 5e-05, + "epoch": 0.5794967023115836, + "step": 9050 + }, + { + "loss": 2.1981, + "grad_norm": 1.680826187133789, + "learning_rate": 5e-05, + "epoch": 0.5798168662355125, + "step": 9055 + }, + { + "loss": 2.2105, + "grad_norm": 1.649733066558838, + "learning_rate": 5e-05, + "epoch": 0.5801370301594416, + "step": 9060 + }, + { + "loss": 2.1951, + "grad_norm": 1.550475001335144, + "learning_rate": 5e-05, + "epoch": 0.5804571940833707, + "step": 9065 + }, + { + "loss": 2.1964, + "grad_norm": 1.6704422235488892, + "learning_rate": 5e-05, + "epoch": 0.5807773580072997, + "step": 9070 + }, + { + "loss": 2.1977, + "grad_norm": 1.5833699703216553, + "learning_rate": 5e-05, + "epoch": 0.5810975219312288, + "step": 9075 + }, + { + "loss": 2.2158, + "grad_norm": 1.6299668550491333, + "learning_rate": 5e-05, + "epoch": 0.5814176858551579, + "step": 9080 + }, + { + "loss": 2.1806, + "grad_norm": 1.6842360496520996, + "learning_rate": 5e-05, + "epoch": 0.5817378497790869, + "step": 9085 + }, + { + "loss": 2.2131, + "grad_norm": 1.6969107389450073, + "learning_rate": 5e-05, + "epoch": 0.5820580137030159, + "step": 9090 + }, + { + "loss": 2.2133, + "grad_norm": 1.7344791889190674, + "learning_rate": 5e-05, + "epoch": 0.582378177626945, + "step": 9095 + }, + { + "loss": 2.2099, + "grad_norm": 1.6542928218841553, + "learning_rate": 5e-05, + "epoch": 0.5826983415508741, + "step": 9100 + }, + { + "loss": 2.1778, + "grad_norm": 1.6566765308380127, + "learning_rate": 5e-05, + "epoch": 0.583018505474803, + "step": 9105 + }, + { + "loss": 2.1857, + "grad_norm": 1.6014552116394043, + "learning_rate": 5e-05, + "epoch": 0.5833386693987321, + "step": 9110 + }, + { + "loss": 2.2152, + "grad_norm": 1.6418790817260742, + "learning_rate": 5e-05, + "epoch": 0.5836588333226612, + "step": 9115 + }, + { + "loss": 2.1682, + "grad_norm": 1.601062536239624, + "learning_rate": 5e-05, + "epoch": 0.5839789972465903, + "step": 9120 + }, + { + "loss": 2.179, + "grad_norm": 1.721508502960205, + "learning_rate": 5e-05, + "epoch": 0.5842991611705193, + "step": 9125 + }, + { + "loss": 2.1789, + "grad_norm": 1.6199660301208496, + "learning_rate": 5e-05, + "epoch": 0.5846193250944484, + "step": 9130 + }, + { + "loss": 2.2258, + "grad_norm": 1.7283803224563599, + "learning_rate": 5e-05, + "epoch": 0.5849394890183774, + "step": 9135 + }, + { + "loss": 2.1966, + "grad_norm": 1.761349081993103, + "learning_rate": 5e-05, + "epoch": 0.5852596529423064, + "step": 9140 + }, + { + "loss": 2.1639, + "grad_norm": 1.6975593566894531, + "learning_rate": 5e-05, + "epoch": 0.5855798168662355, + "step": 9145 + }, + { + "loss": 2.1935, + "grad_norm": 1.672235369682312, + "learning_rate": 5e-05, + "epoch": 0.5858999807901646, + "step": 9150 + }, + { + "loss": 2.2, + "grad_norm": 1.6695904731750488, + "learning_rate": 5e-05, + "epoch": 0.5862201447140936, + "step": 9155 + }, + { + "loss": 2.2062, + "grad_norm": 1.6303057670593262, + "learning_rate": 5e-05, + "epoch": 0.5865403086380226, + "step": 9160 + }, + { + "loss": 2.2062, + "grad_norm": 1.704704761505127, + "learning_rate": 5e-05, + "epoch": 0.5868604725619517, + "step": 9165 + }, + { + "loss": 2.183, + "grad_norm": 1.5838806629180908, + "learning_rate": 5e-05, + "epoch": 0.5871806364858808, + "step": 9170 + }, + { + "loss": 2.2028, + "grad_norm": 1.6641960144042969, + "learning_rate": 5e-05, + "epoch": 0.5875008004098098, + "step": 9175 + }, + { + "loss": 2.2251, + "grad_norm": 1.6648904085159302, + "learning_rate": 5e-05, + "epoch": 0.5878209643337389, + "step": 9180 + }, + { + "loss": 2.2126, + "grad_norm": 1.7670952081680298, + "learning_rate": 5e-05, + "epoch": 0.588141128257668, + "step": 9185 + }, + { + "loss": 2.2139, + "grad_norm": 1.7805712223052979, + "learning_rate": 5e-05, + "epoch": 0.5884612921815969, + "step": 9190 + }, + { + "loss": 2.2055, + "grad_norm": 1.7651017904281616, + "learning_rate": 5e-05, + "epoch": 0.588781456105526, + "step": 9195 + }, + { + "loss": 2.1971, + "grad_norm": 1.8590534925460815, + "learning_rate": 5e-05, + "epoch": 0.5891016200294551, + "step": 9200 + }, + { + "eval_loss": 2.0600976943969727, + "eval_runtime": 9.0846, + "eval_samples_per_second": 225.436, + "eval_steps_per_second": 28.18, + "epoch": 0.5891016200294551, + "step": 9200 + }, + { + "loss": 2.1996, + "grad_norm": 1.6831485033035278, + "learning_rate": 5e-05, + "epoch": 0.5894217839533842, + "step": 9205 + }, + { + "loss": 2.1846, + "grad_norm": 1.6748493909835815, + "learning_rate": 5e-05, + "epoch": 0.5897419478773132, + "step": 9210 + }, + { + "loss": 2.1819, + "grad_norm": 1.6625423431396484, + "learning_rate": 5e-05, + "epoch": 0.5900621118012422, + "step": 9215 + }, + { + "loss": 2.2139, + "grad_norm": 1.6799538135528564, + "learning_rate": 5e-05, + "epoch": 0.5903822757251713, + "step": 9220 + }, + { + "loss": 2.2051, + "grad_norm": 1.723824143409729, + "learning_rate": 5e-05, + "epoch": 0.5907024396491003, + "step": 9225 + }, + { + "loss": 2.1924, + "grad_norm": 1.634695291519165, + "learning_rate": 5e-05, + "epoch": 0.5910226035730294, + "step": 9230 + }, + { + "loss": 2.1907, + "grad_norm": 1.6800014972686768, + "learning_rate": 5e-05, + "epoch": 0.5913427674969585, + "step": 9235 + }, + { + "loss": 2.1846, + "grad_norm": 1.5842480659484863, + "learning_rate": 5e-05, + "epoch": 0.5916629314208874, + "step": 9240 + }, + { + "loss": 2.1945, + "grad_norm": 1.6501984596252441, + "learning_rate": 5e-05, + "epoch": 0.5919830953448165, + "step": 9245 + }, + { + "loss": 2.2027, + "grad_norm": 1.6772314310073853, + "learning_rate": 5e-05, + "epoch": 0.5923032592687456, + "step": 9250 + }, + { + "loss": 2.1834, + "grad_norm": 1.6418979167938232, + "learning_rate": 5e-05, + "epoch": 0.5926234231926747, + "step": 9255 + }, + { + "loss": 2.1975, + "grad_norm": 1.6140997409820557, + "learning_rate": 5e-05, + "epoch": 0.5929435871166037, + "step": 9260 + }, + { + "loss": 2.2261, + "grad_norm": 1.6440484523773193, + "learning_rate": 5e-05, + "epoch": 0.5932637510405327, + "step": 9265 + }, + { + "loss": 2.1802, + "grad_norm": 1.642822027206421, + "learning_rate": 5e-05, + "epoch": 0.5935839149644618, + "step": 9270 + }, + { + "loss": 2.1736, + "grad_norm": 1.622124433517456, + "learning_rate": 5e-05, + "epoch": 0.5939040788883908, + "step": 9275 + }, + { + "loss": 2.1894, + "grad_norm": 1.6651124954223633, + "learning_rate": 5e-05, + "epoch": 0.5942242428123199, + "step": 9280 + }, + { + "loss": 2.1898, + "grad_norm": 1.6245893239974976, + "learning_rate": 5e-05, + "epoch": 0.594544406736249, + "step": 9285 + }, + { + "loss": 2.1863, + "grad_norm": 1.5905382633209229, + "learning_rate": 5e-05, + "epoch": 0.5948645706601781, + "step": 9290 + }, + { + "loss": 2.1795, + "grad_norm": 1.688947319984436, + "learning_rate": 5e-05, + "epoch": 0.595184734584107, + "step": 9295 + }, + { + "loss": 2.2024, + "grad_norm": 1.6358672380447388, + "learning_rate": 5e-05, + "epoch": 0.5955048985080361, + "step": 9300 + }, + { + "loss": 2.1899, + "grad_norm": 1.6548181772232056, + "learning_rate": 5e-05, + "epoch": 0.5958250624319652, + "step": 9305 + }, + { + "loss": 2.1889, + "grad_norm": 1.612070918083191, + "learning_rate": 5e-05, + "epoch": 0.5961452263558942, + "step": 9310 + }, + { + "loss": 2.2026, + "grad_norm": 1.6708488464355469, + "learning_rate": 5e-05, + "epoch": 0.5964653902798233, + "step": 9315 + }, + { + "loss": 2.1895, + "grad_norm": 1.5530942678451538, + "learning_rate": 5e-05, + "epoch": 0.5967855542037523, + "step": 9320 + }, + { + "loss": 2.2076, + "grad_norm": 1.713191270828247, + "learning_rate": 5e-05, + "epoch": 0.5971057181276813, + "step": 9325 + }, + { + "loss": 2.2008, + "grad_norm": 1.6353609561920166, + "learning_rate": 5e-05, + "epoch": 0.5974258820516104, + "step": 9330 + }, + { + "loss": 2.1978, + "grad_norm": 1.6699703931808472, + "learning_rate": 5e-05, + "epoch": 0.5977460459755395, + "step": 9335 + }, + { + "loss": 2.2398, + "grad_norm": 1.6570907831192017, + "learning_rate": 5e-05, + "epoch": 0.5980662098994686, + "step": 9340 + }, + { + "loss": 2.1825, + "grad_norm": 1.5736383199691772, + "learning_rate": 5e-05, + "epoch": 0.5983863738233975, + "step": 9345 + }, + { + "loss": 2.1911, + "grad_norm": 1.6274065971374512, + "learning_rate": 5e-05, + "epoch": 0.5987065377473266, + "step": 9350 + }, + { + "loss": 2.2171, + "grad_norm": 1.6303297281265259, + "learning_rate": 5e-05, + "epoch": 0.5990267016712557, + "step": 9355 + }, + { + "loss": 2.2006, + "grad_norm": 1.5498477220535278, + "learning_rate": 5e-05, + "epoch": 0.5993468655951847, + "step": 9360 + }, + { + "loss": 2.2076, + "grad_norm": 1.6340336799621582, + "learning_rate": 5e-05, + "epoch": 0.5996670295191138, + "step": 9365 + }, + { + "loss": 2.197, + "grad_norm": 1.693532943725586, + "learning_rate": 5e-05, + "epoch": 0.5999871934430429, + "step": 9370 + }, + { + "loss": 2.1982, + "grad_norm": 1.7246029376983643, + "learning_rate": 5e-05, + "epoch": 0.6003073573669719, + "step": 9375 + }, + { + "loss": 2.1855, + "grad_norm": 1.6022217273712158, + "learning_rate": 5e-05, + "epoch": 0.6006275212909009, + "step": 9380 + }, + { + "loss": 2.1501, + "grad_norm": 1.6127444505691528, + "learning_rate": 5e-05, + "epoch": 0.60094768521483, + "step": 9385 + }, + { + "loss": 2.1776, + "grad_norm": 1.6102303266525269, + "learning_rate": 5e-05, + "epoch": 0.6012678491387591, + "step": 9390 + }, + { + "loss": 2.1867, + "grad_norm": 1.6318905353546143, + "learning_rate": 5e-05, + "epoch": 0.601588013062688, + "step": 9395 + }, + { + "loss": 2.1573, + "grad_norm": 1.6750482320785522, + "learning_rate": 5e-05, + "epoch": 0.6019081769866171, + "step": 9400 + }, + { + "eval_loss": 2.0679101943969727, + "eval_runtime": 9.4704, + "eval_samples_per_second": 216.253, + "eval_steps_per_second": 27.032, + "epoch": 0.6019081769866171, + "step": 9400 + }, + { + "loss": 2.1869, + "grad_norm": 1.6901905536651611, + "learning_rate": 5e-05, + "epoch": 0.6022283409105462, + "step": 9405 + }, + { + "loss": 2.1978, + "grad_norm": 1.6613404750823975, + "learning_rate": 5e-05, + "epoch": 0.6025485048344752, + "step": 9410 + }, + { + "loss": 2.2079, + "grad_norm": 1.7926342487335205, + "learning_rate": 5e-05, + "epoch": 0.6028686687584043, + "step": 9415 + }, + { + "loss": 2.2059, + "grad_norm": 1.624189019203186, + "learning_rate": 5e-05, + "epoch": 0.6031888326823334, + "step": 9420 + }, + { + "loss": 2.1925, + "grad_norm": 1.628775715827942, + "learning_rate": 5e-05, + "epoch": 0.6035089966062624, + "step": 9425 + }, + { + "loss": 2.199, + "grad_norm": 1.6382794380187988, + "learning_rate": 5e-05, + "epoch": 0.6038291605301914, + "step": 9430 + }, + { + "loss": 2.174, + "grad_norm": 1.6540191173553467, + "learning_rate": 5e-05, + "epoch": 0.6041493244541205, + "step": 9435 + }, + { + "loss": 2.2111, + "grad_norm": 1.689916729927063, + "learning_rate": 5e-05, + "epoch": 0.6044694883780496, + "step": 9440 + }, + { + "loss": 2.163, + "grad_norm": 1.7421029806137085, + "learning_rate": 5e-05, + "epoch": 0.6047896523019786, + "step": 9445 + }, + { + "loss": 2.1926, + "grad_norm": 1.6341474056243896, + "learning_rate": 5e-05, + "epoch": 0.6051098162259076, + "step": 9450 + }, + { + "loss": 2.1649, + "grad_norm": 1.5881348848342896, + "learning_rate": 5e-05, + "epoch": 0.6054299801498367, + "step": 9455 + }, + { + "loss": 2.199, + "grad_norm": 1.727407693862915, + "learning_rate": 5e-05, + "epoch": 0.6057501440737658, + "step": 9460 + }, + { + "loss": 2.1982, + "grad_norm": 1.7314342260360718, + "learning_rate": 5e-05, + "epoch": 0.6060703079976948, + "step": 9465 + }, + { + "loss": 2.1971, + "grad_norm": 1.656754732131958, + "learning_rate": 5e-05, + "epoch": 0.6063904719216239, + "step": 9470 + }, + { + "loss": 2.1756, + "grad_norm": 1.6005347967147827, + "learning_rate": 5e-05, + "epoch": 0.606710635845553, + "step": 9475 + }, + { + "loss": 2.1881, + "grad_norm": 1.5883443355560303, + "learning_rate": 5e-05, + "epoch": 0.6070307997694819, + "step": 9480 + }, + { + "loss": 2.1901, + "grad_norm": 1.8181809186935425, + "learning_rate": 5e-05, + "epoch": 0.607350963693411, + "step": 9485 + }, + { + "loss": 2.1893, + "grad_norm": 1.8270716667175293, + "learning_rate": 5e-05, + "epoch": 0.6076711276173401, + "step": 9490 + }, + { + "loss": 2.2014, + "grad_norm": 1.7661374807357788, + "learning_rate": 5e-05, + "epoch": 0.6079912915412691, + "step": 9495 + }, + { + "loss": 2.2077, + "grad_norm": 1.6404733657836914, + "learning_rate": 5e-05, + "epoch": 0.6083114554651982, + "step": 9500 + }, + { + "loss": 2.1991, + "grad_norm": 1.6874150037765503, + "learning_rate": 5e-05, + "epoch": 0.6086316193891272, + "step": 9505 + }, + { + "loss": 2.212, + "grad_norm": 1.7771927118301392, + "learning_rate": 5e-05, + "epoch": 0.6089517833130563, + "step": 9510 + }, + { + "loss": 2.1921, + "grad_norm": 1.7601332664489746, + "learning_rate": 5e-05, + "epoch": 0.6092719472369853, + "step": 9515 + }, + { + "loss": 2.1887, + "grad_norm": 1.7171956300735474, + "learning_rate": 5e-05, + "epoch": 0.6095921111609144, + "step": 9520 + }, + { + "loss": 2.2107, + "grad_norm": 1.5932697057724, + "learning_rate": 5e-05, + "epoch": 0.6099122750848435, + "step": 9525 + }, + { + "loss": 2.1952, + "grad_norm": 1.6156558990478516, + "learning_rate": 5e-05, + "epoch": 0.6102324390087724, + "step": 9530 + }, + { + "loss": 2.2223, + "grad_norm": 1.643193244934082, + "learning_rate": 5e-05, + "epoch": 0.6105526029327015, + "step": 9535 + }, + { + "loss": 2.1795, + "grad_norm": 1.7439320087432861, + "learning_rate": 5e-05, + "epoch": 0.6108727668566306, + "step": 9540 + }, + { + "loss": 2.2021, + "grad_norm": 1.6661970615386963, + "learning_rate": 5e-05, + "epoch": 0.6111929307805597, + "step": 9545 + }, + { + "loss": 2.1948, + "grad_norm": 1.6624956130981445, + "learning_rate": 5e-05, + "epoch": 0.6115130947044887, + "step": 9550 + }, + { + "loss": 2.191, + "grad_norm": 1.6625850200653076, + "learning_rate": 5e-05, + "epoch": 0.6118332586284178, + "step": 9555 + }, + { + "loss": 2.1988, + "grad_norm": 1.6776957511901855, + "learning_rate": 5e-05, + "epoch": 0.6121534225523468, + "step": 9560 + }, + { + "loss": 2.2195, + "grad_norm": 1.6892213821411133, + "learning_rate": 5e-05, + "epoch": 0.6124735864762758, + "step": 9565 + }, + { + "loss": 2.2082, + "grad_norm": 1.7207622528076172, + "learning_rate": 5e-05, + "epoch": 0.6127937504002049, + "step": 9570 + }, + { + "loss": 2.1844, + "grad_norm": 1.6284791231155396, + "learning_rate": 5e-05, + "epoch": 0.613113914324134, + "step": 9575 + }, + { + "loss": 2.199, + "grad_norm": 1.6319007873535156, + "learning_rate": 5e-05, + "epoch": 0.613434078248063, + "step": 9580 + }, + { + "loss": 2.1955, + "grad_norm": 1.634732723236084, + "learning_rate": 5e-05, + "epoch": 0.613754242171992, + "step": 9585 + }, + { + "loss": 2.1924, + "grad_norm": 1.7350897789001465, + "learning_rate": 5e-05, + "epoch": 0.6140744060959211, + "step": 9590 + }, + { + "loss": 2.1722, + "grad_norm": 1.6487643718719482, + "learning_rate": 5e-05, + "epoch": 0.6143945700198502, + "step": 9595 + }, + { + "loss": 2.1732, + "grad_norm": 1.678441047668457, + "learning_rate": 5e-05, + "epoch": 0.6147147339437792, + "step": 9600 + }, + { + "eval_loss": 2.0491390228271484, + "eval_runtime": 10.1854, + "eval_samples_per_second": 201.071, + "eval_steps_per_second": 25.134, + "epoch": 0.6147147339437792, + "step": 9600 + }, + { + "loss": 2.2301, + "grad_norm": 1.7390731573104858, + "learning_rate": 5e-05, + "epoch": 0.6150348978677083, + "step": 9605 + }, + { + "loss": 2.2033, + "grad_norm": 1.70026433467865, + "learning_rate": 5e-05, + "epoch": 0.6153550617916373, + "step": 9610 + }, + { + "loss": 2.2143, + "grad_norm": 1.6489602327346802, + "learning_rate": 5e-05, + "epoch": 0.6156752257155663, + "step": 9615 + }, + { + "loss": 2.1629, + "grad_norm": 1.707454800605774, + "learning_rate": 5e-05, + "epoch": 0.6159953896394954, + "step": 9620 + }, + { + "loss": 2.1867, + "grad_norm": 1.7108656167984009, + "learning_rate": 5e-05, + "epoch": 0.6163155535634245, + "step": 9625 + }, + { + "loss": 2.1792, + "grad_norm": 1.6427521705627441, + "learning_rate": 5e-05, + "epoch": 0.6166357174873536, + "step": 9630 + }, + { + "loss": 2.193, + "grad_norm": 1.6153441667556763, + "learning_rate": 5e-05, + "epoch": 0.6169558814112825, + "step": 9635 + }, + { + "loss": 2.1779, + "grad_norm": 1.5934603214263916, + "learning_rate": 5e-05, + "epoch": 0.6172760453352116, + "step": 9640 + }, + { + "loss": 2.1825, + "grad_norm": 1.7031588554382324, + "learning_rate": 5e-05, + "epoch": 0.6175962092591407, + "step": 9645 + }, + { + "loss": 2.1919, + "grad_norm": 1.7012118101119995, + "learning_rate": 5e-05, + "epoch": 0.6179163731830697, + "step": 9650 + }, + { + "loss": 2.1638, + "grad_norm": 1.695015549659729, + "learning_rate": 5e-05, + "epoch": 0.6182365371069988, + "step": 9655 + }, + { + "loss": 2.191, + "grad_norm": 1.6796445846557617, + "learning_rate": 5e-05, + "epoch": 0.6185567010309279, + "step": 9660 + }, + { + "loss": 2.2073, + "grad_norm": 1.71908700466156, + "learning_rate": 5e-05, + "epoch": 0.6188768649548568, + "step": 9665 + }, + { + "loss": 2.222, + "grad_norm": 1.6226277351379395, + "learning_rate": 5e-05, + "epoch": 0.6191970288787859, + "step": 9670 + }, + { + "loss": 2.1851, + "grad_norm": 1.6331515312194824, + "learning_rate": 5e-05, + "epoch": 0.619517192802715, + "step": 9675 + }, + { + "loss": 2.2073, + "grad_norm": 1.7072153091430664, + "learning_rate": 5e-05, + "epoch": 0.6198373567266441, + "step": 9680 + }, + { + "loss": 2.1987, + "grad_norm": 1.7724852561950684, + "learning_rate": 5e-05, + "epoch": 0.6201575206505731, + "step": 9685 + }, + { + "loss": 2.2316, + "grad_norm": 1.5883903503417969, + "learning_rate": 5e-05, + "epoch": 0.6204776845745021, + "step": 9690 + }, + { + "loss": 2.1984, + "grad_norm": 1.6855353116989136, + "learning_rate": 5e-05, + "epoch": 0.6207978484984312, + "step": 9695 + }, + { + "loss": 2.1934, + "grad_norm": 1.6740260124206543, + "learning_rate": 5e-05, + "epoch": 0.6211180124223602, + "step": 9700 + }, + { + "loss": 2.1905, + "grad_norm": 1.5806589126586914, + "learning_rate": 5e-05, + "epoch": 0.6214381763462893, + "step": 9705 + }, + { + "loss": 2.1811, + "grad_norm": 1.7320712804794312, + "learning_rate": 5e-05, + "epoch": 0.6217583402702184, + "step": 9710 + }, + { + "loss": 2.1561, + "grad_norm": 1.6316450834274292, + "learning_rate": 5e-05, + "epoch": 0.6220785041941475, + "step": 9715 + }, + { + "loss": 2.1887, + "grad_norm": 1.693595290184021, + "learning_rate": 5e-05, + "epoch": 0.6223986681180764, + "step": 9720 + }, + { + "loss": 2.1818, + "grad_norm": 1.6201201677322388, + "learning_rate": 5e-05, + "epoch": 0.6227188320420055, + "step": 9725 + }, + { + "loss": 2.1803, + "grad_norm": 1.6515777111053467, + "learning_rate": 5e-05, + "epoch": 0.6230389959659346, + "step": 9730 + }, + { + "loss": 2.2073, + "grad_norm": 1.6333870887756348, + "learning_rate": 5e-05, + "epoch": 0.6233591598898636, + "step": 9735 + }, + { + "loss": 2.2036, + "grad_norm": 1.6375046968460083, + "learning_rate": 5e-05, + "epoch": 0.6236793238137927, + "step": 9740 + }, + { + "loss": 2.1786, + "grad_norm": 1.6964610815048218, + "learning_rate": 5e-05, + "epoch": 0.6239994877377217, + "step": 9745 + }, + { + "loss": 2.1623, + "grad_norm": 1.571964144706726, + "learning_rate": 5e-05, + "epoch": 0.6243196516616507, + "step": 9750 + }, + { + "loss": 2.1862, + "grad_norm": 1.5701056718826294, + "learning_rate": 5e-05, + "epoch": 0.6246398155855798, + "step": 9755 + }, + { + "loss": 2.2058, + "grad_norm": 1.6287676095962524, + "learning_rate": 5e-05, + "epoch": 0.6249599795095089, + "step": 9760 + }, + { + "loss": 2.1764, + "grad_norm": 1.6932698488235474, + "learning_rate": 5e-05, + "epoch": 0.625280143433438, + "step": 9765 + }, + { + "loss": 2.1822, + "grad_norm": 1.6898211240768433, + "learning_rate": 5e-05, + "epoch": 0.6256003073573669, + "step": 9770 + }, + { + "loss": 2.196, + "grad_norm": 1.8175290822982788, + "learning_rate": 5e-05, + "epoch": 0.625920471281296, + "step": 9775 + }, + { + "loss": 2.1976, + "grad_norm": 1.6941791772842407, + "learning_rate": 5e-05, + "epoch": 0.6262406352052251, + "step": 9780 + }, + { + "loss": 2.202, + "grad_norm": 1.6704522371292114, + "learning_rate": 5e-05, + "epoch": 0.6265607991291541, + "step": 9785 + }, + { + "loss": 2.1804, + "grad_norm": 1.6961215734481812, + "learning_rate": 5e-05, + "epoch": 0.6268809630530832, + "step": 9790 + }, + { + "loss": 2.1622, + "grad_norm": 1.6616603136062622, + "learning_rate": 5e-05, + "epoch": 0.6272011269770122, + "step": 9795 + }, + { + "loss": 2.2176, + "grad_norm": 1.7092373371124268, + "learning_rate": 5e-05, + "epoch": 0.6275212909009413, + "step": 9800 + }, + { + "eval_loss": 2.066584587097168, + "eval_runtime": 9.576, + "eval_samples_per_second": 213.869, + "eval_steps_per_second": 26.734, + "epoch": 0.6275212909009413, + "step": 9800 + }, + { + "loss": 2.1975, + "grad_norm": 1.7031360864639282, + "learning_rate": 5e-05, + "epoch": 0.6278414548248703, + "step": 9805 + }, + { + "loss": 2.1963, + "grad_norm": 1.5917502641677856, + "learning_rate": 5e-05, + "epoch": 0.6281616187487994, + "step": 9810 + }, + { + "loss": 2.1941, + "grad_norm": 1.6682829856872559, + "learning_rate": 5e-05, + "epoch": 0.6284817826727285, + "step": 9815 + }, + { + "loss": 2.1917, + "grad_norm": 1.5641679763793945, + "learning_rate": 5e-05, + "epoch": 0.6288019465966574, + "step": 9820 + }, + { + "loss": 2.1773, + "grad_norm": 1.645300269126892, + "learning_rate": 5e-05, + "epoch": 0.6291221105205865, + "step": 9825 + }, + { + "loss": 2.1631, + "grad_norm": 1.5623157024383545, + "learning_rate": 5e-05, + "epoch": 0.6294422744445156, + "step": 9830 + }, + { + "loss": 2.1714, + "grad_norm": 1.5632286071777344, + "learning_rate": 5e-05, + "epoch": 0.6297624383684447, + "step": 9835 + }, + { + "loss": 2.2008, + "grad_norm": 1.5870970487594604, + "learning_rate": 5e-05, + "epoch": 0.6300826022923737, + "step": 9840 + }, + { + "loss": 2.177, + "grad_norm": 1.6274374723434448, + "learning_rate": 5e-05, + "epoch": 0.6304027662163028, + "step": 9845 + }, + { + "loss": 2.1598, + "grad_norm": 1.6094468832015991, + "learning_rate": 5e-05, + "epoch": 0.6307229301402318, + "step": 9850 + }, + { + "loss": 2.2126, + "grad_norm": 1.6894714832305908, + "learning_rate": 5e-05, + "epoch": 0.6310430940641608, + "step": 9855 + }, + { + "loss": 2.182, + "grad_norm": 1.6404249668121338, + "learning_rate": 5e-05, + "epoch": 0.6313632579880899, + "step": 9860 + }, + { + "loss": 2.1884, + "grad_norm": 1.5737414360046387, + "learning_rate": 5e-05, + "epoch": 0.631683421912019, + "step": 9865 + }, + { + "loss": 2.1913, + "grad_norm": 1.6798261404037476, + "learning_rate": 5e-05, + "epoch": 0.632003585835948, + "step": 9870 + }, + { + "loss": 2.1629, + "grad_norm": 1.6156235933303833, + "learning_rate": 5e-05, + "epoch": 0.632323749759877, + "step": 9875 + }, + { + "loss": 2.1785, + "grad_norm": 1.7109487056732178, + "learning_rate": 5e-05, + "epoch": 0.6326439136838061, + "step": 9880 + }, + { + "loss": 2.211, + "grad_norm": 1.6673591136932373, + "learning_rate": 5e-05, + "epoch": 0.6329640776077352, + "step": 9885 + }, + { + "loss": 2.2038, + "grad_norm": 1.6591060161590576, + "learning_rate": 5e-05, + "epoch": 0.6332842415316642, + "step": 9890 + }, + { + "loss": 2.1607, + "grad_norm": 1.6050101518630981, + "learning_rate": 5e-05, + "epoch": 0.6336044054555933, + "step": 9895 + }, + { + "loss": 2.1672, + "grad_norm": 1.6460251808166504, + "learning_rate": 5e-05, + "epoch": 0.6339245693795224, + "step": 9900 + }, + { + "loss": 2.1672, + "grad_norm": 1.7413328886032104, + "learning_rate": 5e-05, + "epoch": 0.6342447333034513, + "step": 9905 + }, + { + "loss": 2.2128, + "grad_norm": 1.684525966644287, + "learning_rate": 5e-05, + "epoch": 0.6345648972273804, + "step": 9910 + }, + { + "loss": 2.1987, + "grad_norm": 1.6904735565185547, + "learning_rate": 5e-05, + "epoch": 0.6348850611513095, + "step": 9915 + }, + { + "loss": 2.1979, + "grad_norm": 1.6782430410385132, + "learning_rate": 5e-05, + "epoch": 0.6352052250752386, + "step": 9920 + }, + { + "loss": 2.1839, + "grad_norm": 1.7190313339233398, + "learning_rate": 5e-05, + "epoch": 0.6355253889991676, + "step": 9925 + }, + { + "loss": 2.1885, + "grad_norm": 1.745063304901123, + "learning_rate": 5e-05, + "epoch": 0.6358455529230966, + "step": 9930 + }, + { + "loss": 2.174, + "grad_norm": 1.6339811086654663, + "learning_rate": 5e-05, + "epoch": 0.6361657168470257, + "step": 9935 + }, + { + "loss": 2.197, + "grad_norm": 1.6524280309677124, + "learning_rate": 5e-05, + "epoch": 0.6364858807709547, + "step": 9940 + }, + { + "loss": 2.1844, + "grad_norm": 1.7359994649887085, + "learning_rate": 5e-05, + "epoch": 0.6368060446948838, + "step": 9945 + }, + { + "loss": 2.1894, + "grad_norm": 1.6910420656204224, + "learning_rate": 5e-05, + "epoch": 0.6371262086188129, + "step": 9950 + }, + { + "loss": 2.1981, + "grad_norm": 1.6106345653533936, + "learning_rate": 5e-05, + "epoch": 0.6374463725427418, + "step": 9955 + }, + { + "loss": 2.2047, + "grad_norm": 1.6369112730026245, + "learning_rate": 5e-05, + "epoch": 0.6377665364666709, + "step": 9960 + }, + { + "loss": 2.2073, + "grad_norm": 1.6089766025543213, + "learning_rate": 5e-05, + "epoch": 0.6380867003906, + "step": 9965 + }, + { + "loss": 2.184, + "grad_norm": 1.7142517566680908, + "learning_rate": 5e-05, + "epoch": 0.6384068643145291, + "step": 9970 + }, + { + "loss": 2.1588, + "grad_norm": 1.6717356443405151, + "learning_rate": 5e-05, + "epoch": 0.6387270282384581, + "step": 9975 + }, + { + "loss": 2.1723, + "grad_norm": 1.7235606908798218, + "learning_rate": 5e-05, + "epoch": 0.6390471921623871, + "step": 9980 + }, + { + "loss": 2.201, + "grad_norm": 1.6770853996276855, + "learning_rate": 5e-05, + "epoch": 0.6393673560863162, + "step": 9985 + }, + { + "loss": 2.1764, + "grad_norm": 1.6714833974838257, + "learning_rate": 5e-05, + "epoch": 0.6396875200102452, + "step": 9990 + }, + { + "loss": 2.1779, + "grad_norm": 1.587377905845642, + "learning_rate": 5e-05, + "epoch": 0.6400076839341743, + "step": 9995 + }, + { + "loss": 2.1861, + "grad_norm": 1.6190916299819946, + "learning_rate": 5e-05, + "epoch": 0.6403278478581034, + "step": 10000 + }, + { + "eval_loss": 2.057950258255005, + "eval_runtime": 9.0347, + "eval_samples_per_second": 226.682, + "eval_steps_per_second": 28.335, + "epoch": 0.6403278478581034, + "step": 10000 + }, + { + "loss": 2.2194, + "grad_norm": 1.6693397760391235, + "learning_rate": 5e-05, + "epoch": 0.6406480117820325, + "step": 10005 + }, + { + "loss": 2.1777, + "grad_norm": 1.6318798065185547, + "learning_rate": 5e-05, + "epoch": 0.6409681757059614, + "step": 10010 + }, + { + "loss": 2.1711, + "grad_norm": 1.5991014242172241, + "learning_rate": 5e-05, + "epoch": 0.6412883396298905, + "step": 10015 + }, + { + "loss": 2.1958, + "grad_norm": 1.601650595664978, + "learning_rate": 5e-05, + "epoch": 0.6416085035538196, + "step": 10020 + }, + { + "loss": 2.1848, + "grad_norm": 1.6435421705245972, + "learning_rate": 5e-05, + "epoch": 0.6419286674777486, + "step": 10025 + }, + { + "loss": 2.1893, + "grad_norm": 1.6513253450393677, + "learning_rate": 5e-05, + "epoch": 0.6422488314016777, + "step": 10030 + }, + { + "loss": 2.192, + "grad_norm": 1.7683993577957153, + "learning_rate": 5e-05, + "epoch": 0.6425689953256067, + "step": 10035 + }, + { + "loss": 2.1817, + "grad_norm": 1.6842045783996582, + "learning_rate": 5e-05, + "epoch": 0.6428891592495357, + "step": 10040 + }, + { + "loss": 2.1972, + "grad_norm": 1.6233887672424316, + "learning_rate": 5e-05, + "epoch": 0.6432093231734648, + "step": 10045 + }, + { + "loss": 2.179, + "grad_norm": 1.659524917602539, + "learning_rate": 5e-05, + "epoch": 0.6435294870973939, + "step": 10050 + }, + { + "loss": 2.1826, + "grad_norm": 1.620766282081604, + "learning_rate": 5e-05, + "epoch": 0.643849651021323, + "step": 10055 + }, + { + "loss": 2.1796, + "grad_norm": 1.7828584909439087, + "learning_rate": 5e-05, + "epoch": 0.6441698149452519, + "step": 10060 + }, + { + "loss": 2.1733, + "grad_norm": 1.6462182998657227, + "learning_rate": 5e-05, + "epoch": 0.644489978869181, + "step": 10065 + }, + { + "loss": 2.1725, + "grad_norm": 1.6294734477996826, + "learning_rate": 5e-05, + "epoch": 0.6448101427931101, + "step": 10070 + }, + { + "loss": 2.1705, + "grad_norm": 1.6464056968688965, + "learning_rate": 5e-05, + "epoch": 0.6451303067170391, + "step": 10075 + }, + { + "loss": 2.1945, + "grad_norm": 1.6422755718231201, + "learning_rate": 5e-05, + "epoch": 0.6454504706409682, + "step": 10080 + }, + { + "loss": 2.1755, + "grad_norm": 1.6333072185516357, + "learning_rate": 5e-05, + "epoch": 0.6457706345648973, + "step": 10085 + }, + { + "loss": 2.1642, + "grad_norm": 1.6198492050170898, + "learning_rate": 5e-05, + "epoch": 0.6460907984888263, + "step": 10090 + }, + { + "loss": 2.1547, + "grad_norm": 1.7265499830245972, + "learning_rate": 5e-05, + "epoch": 0.6464109624127553, + "step": 10095 + }, + { + "loss": 2.1862, + "grad_norm": 1.6740344762802124, + "learning_rate": 5e-05, + "epoch": 0.6467311263366844, + "step": 10100 + }, + { + "loss": 2.1726, + "grad_norm": 1.6273069381713867, + "learning_rate": 5e-05, + "epoch": 0.6470512902606135, + "step": 10105 + }, + { + "loss": 2.1927, + "grad_norm": 1.6094673871994019, + "learning_rate": 5e-05, + "epoch": 0.6473714541845424, + "step": 10110 + }, + { + "loss": 2.1617, + "grad_norm": 1.621466040611267, + "learning_rate": 5e-05, + "epoch": 0.6476916181084715, + "step": 10115 + }, + { + "loss": 2.1788, + "grad_norm": 1.7757426500320435, + "learning_rate": 5e-05, + "epoch": 0.6480117820324006, + "step": 10120 + }, + { + "loss": 2.2063, + "grad_norm": 1.6837197542190552, + "learning_rate": 5e-05, + "epoch": 0.6483319459563296, + "step": 10125 + }, + { + "loss": 2.2046, + "grad_norm": 1.810106635093689, + "learning_rate": 5e-05, + "epoch": 0.6486521098802587, + "step": 10130 + }, + { + "loss": 2.1667, + "grad_norm": 1.6960062980651855, + "learning_rate": 5e-05, + "epoch": 0.6489722738041878, + "step": 10135 + }, + { + "loss": 2.1799, + "grad_norm": 1.574356198310852, + "learning_rate": 5e-05, + "epoch": 0.6492924377281168, + "step": 10140 + }, + { + "loss": 2.1849, + "grad_norm": 1.5934480428695679, + "learning_rate": 5e-05, + "epoch": 0.6496126016520458, + "step": 10145 + }, + { + "loss": 2.1843, + "grad_norm": 1.5735338926315308, + "learning_rate": 5e-05, + "epoch": 0.6499327655759749, + "step": 10150 + }, + { + "loss": 2.1852, + "grad_norm": 1.611663818359375, + "learning_rate": 5e-05, + "epoch": 0.650252929499904, + "step": 10155 + }, + { + "loss": 2.2181, + "grad_norm": 1.699562430381775, + "learning_rate": 5e-05, + "epoch": 0.650573093423833, + "step": 10160 + }, + { + "loss": 2.1745, + "grad_norm": 1.612734079360962, + "learning_rate": 5e-05, + "epoch": 0.650893257347762, + "step": 10165 + }, + { + "loss": 2.1816, + "grad_norm": 1.620051622390747, + "learning_rate": 5e-05, + "epoch": 0.6512134212716911, + "step": 10170 + }, + { + "loss": 2.193, + "grad_norm": 1.772910475730896, + "learning_rate": 5e-05, + "epoch": 0.6515335851956202, + "step": 10175 + }, + { + "loss": 2.2045, + "grad_norm": 1.6278610229492188, + "learning_rate": 5e-05, + "epoch": 0.6518537491195492, + "step": 10180 + }, + { + "loss": 2.1702, + "grad_norm": 1.6800163984298706, + "learning_rate": 5e-05, + "epoch": 0.6521739130434783, + "step": 10185 + }, + { + "loss": 2.1728, + "grad_norm": 1.5924550294876099, + "learning_rate": 5e-05, + "epoch": 0.6524940769674074, + "step": 10190 + }, + { + "loss": 2.1876, + "grad_norm": 1.7043887376785278, + "learning_rate": 5e-05, + "epoch": 0.6528142408913363, + "step": 10195 + }, + { + "loss": 2.197, + "grad_norm": 1.7267884016036987, + "learning_rate": 5e-05, + "epoch": 0.6531344048152654, + "step": 10200 + }, + { + "eval_loss": 2.0496010780334473, + "eval_runtime": 9.1194, + "eval_samples_per_second": 224.576, + "eval_steps_per_second": 28.072, + "epoch": 0.6531344048152654, + "step": 10200 + }, + { + "loss": 2.1663, + "grad_norm": 1.721843957901001, + "learning_rate": 5e-05, + "epoch": 0.6534545687391945, + "step": 10205 + }, + { + "loss": 2.194, + "grad_norm": 1.666116714477539, + "learning_rate": 5e-05, + "epoch": 0.6537747326631235, + "step": 10210 + }, + { + "loss": 2.1648, + "grad_norm": 1.6751972436904907, + "learning_rate": 5e-05, + "epoch": 0.6540948965870526, + "step": 10215 + }, + { + "loss": 2.1695, + "grad_norm": 1.6365602016448975, + "learning_rate": 5e-05, + "epoch": 0.6544150605109816, + "step": 10220 + }, + { + "loss": 2.1997, + "grad_norm": 1.6664323806762695, + "learning_rate": 5e-05, + "epoch": 0.6547352244349107, + "step": 10225 + }, + { + "loss": 2.1405, + "grad_norm": 1.6744410991668701, + "learning_rate": 5e-05, + "epoch": 0.6550553883588397, + "step": 10230 + }, + { + "loss": 2.1786, + "grad_norm": 1.5833547115325928, + "learning_rate": 5e-05, + "epoch": 0.6553755522827688, + "step": 10235 + }, + { + "loss": 2.1724, + "grad_norm": 1.6934590339660645, + "learning_rate": 5e-05, + "epoch": 0.6556957162066979, + "step": 10240 + }, + { + "loss": 2.2072, + "grad_norm": 1.6612778902053833, + "learning_rate": 5e-05, + "epoch": 0.6560158801306268, + "step": 10245 + }, + { + "loss": 2.2002, + "grad_norm": 1.6952781677246094, + "learning_rate": 5e-05, + "epoch": 0.6563360440545559, + "step": 10250 + }, + { + "loss": 2.2005, + "grad_norm": 1.5911494493484497, + "learning_rate": 5e-05, + "epoch": 0.656656207978485, + "step": 10255 + }, + { + "loss": 2.1954, + "grad_norm": 1.6101614236831665, + "learning_rate": 5e-05, + "epoch": 0.6569763719024141, + "step": 10260 + }, + { + "loss": 2.2006, + "grad_norm": 1.5616638660430908, + "learning_rate": 5e-05, + "epoch": 0.6572965358263431, + "step": 10265 + }, + { + "loss": 2.1962, + "grad_norm": 1.6221803426742554, + "learning_rate": 5e-05, + "epoch": 0.6576166997502721, + "step": 10270 + }, + { + "loss": 2.1947, + "grad_norm": 1.5761173963546753, + "learning_rate": 5e-05, + "epoch": 0.6579368636742012, + "step": 10275 + }, + { + "loss": 2.171, + "grad_norm": 1.6054048538208008, + "learning_rate": 5e-05, + "epoch": 0.6582570275981302, + "step": 10280 + }, + { + "loss": 2.1872, + "grad_norm": 1.6371126174926758, + "learning_rate": 5e-05, + "epoch": 0.6585771915220593, + "step": 10285 + }, + { + "loss": 2.1471, + "grad_norm": 1.6947263479232788, + "learning_rate": 5e-05, + "epoch": 0.6588973554459884, + "step": 10290 + }, + { + "loss": 2.1586, + "grad_norm": 1.6005196571350098, + "learning_rate": 5e-05, + "epoch": 0.6592175193699173, + "step": 10295 + }, + { + "loss": 2.1868, + "grad_norm": 1.6588430404663086, + "learning_rate": 5e-05, + "epoch": 0.6595376832938464, + "step": 10300 + }, + { + "loss": 2.1795, + "grad_norm": 1.719780683517456, + "learning_rate": 5e-05, + "epoch": 0.6598578472177755, + "step": 10305 + }, + { + "loss": 2.2096, + "grad_norm": 1.7076449394226074, + "learning_rate": 5e-05, + "epoch": 0.6601780111417046, + "step": 10310 + }, + { + "loss": 2.1607, + "grad_norm": 1.708767294883728, + "learning_rate": 5e-05, + "epoch": 0.6604981750656336, + "step": 10315 + }, + { + "loss": 2.1654, + "grad_norm": 1.5694926977157593, + "learning_rate": 5e-05, + "epoch": 0.6608183389895627, + "step": 10320 + }, + { + "loss": 2.1733, + "grad_norm": 1.6309330463409424, + "learning_rate": 5e-05, + "epoch": 0.6611385029134917, + "step": 10325 + }, + { + "loss": 2.1714, + "grad_norm": 1.545966386795044, + "learning_rate": 5e-05, + "epoch": 0.6614586668374207, + "step": 10330 + }, + { + "loss": 2.1862, + "grad_norm": 1.6850484609603882, + "learning_rate": 5e-05, + "epoch": 0.6617788307613498, + "step": 10335 + }, + { + "loss": 2.2057, + "grad_norm": 1.7686723470687866, + "learning_rate": 5e-05, + "epoch": 0.6620989946852789, + "step": 10340 + }, + { + "loss": 2.2103, + "grad_norm": 1.6861554384231567, + "learning_rate": 5e-05, + "epoch": 0.662419158609208, + "step": 10345 + }, + { + "loss": 2.1704, + "grad_norm": 1.6404895782470703, + "learning_rate": 5e-05, + "epoch": 0.6627393225331369, + "step": 10350 + }, + { + "loss": 2.1692, + "grad_norm": 1.6215230226516724, + "learning_rate": 5e-05, + "epoch": 0.663059486457066, + "step": 10355 + }, + { + "loss": 2.1937, + "grad_norm": 1.6805126667022705, + "learning_rate": 5e-05, + "epoch": 0.6633796503809951, + "step": 10360 + }, + { + "loss": 2.1821, + "grad_norm": 1.6423799991607666, + "learning_rate": 5e-05, + "epoch": 0.6636998143049241, + "step": 10365 + }, + { + "loss": 2.1924, + "grad_norm": 1.6313560009002686, + "learning_rate": 5e-05, + "epoch": 0.6640199782288532, + "step": 10370 + }, + { + "loss": 2.1803, + "grad_norm": 1.6506428718566895, + "learning_rate": 5e-05, + "epoch": 0.6643401421527823, + "step": 10375 + }, + { + "loss": 2.1903, + "grad_norm": 1.6371464729309082, + "learning_rate": 5e-05, + "epoch": 0.6646603060767112, + "step": 10380 + }, + { + "loss": 2.1877, + "grad_norm": 1.6620516777038574, + "learning_rate": 5e-05, + "epoch": 0.6649804700006403, + "step": 10385 + }, + { + "loss": 2.1649, + "grad_norm": 1.6384953260421753, + "learning_rate": 5e-05, + "epoch": 0.6653006339245694, + "step": 10390 + }, + { + "loss": 2.2066, + "grad_norm": 1.5989880561828613, + "learning_rate": 5e-05, + "epoch": 0.6656207978484985, + "step": 10395 + }, + { + "loss": 2.1917, + "grad_norm": 1.6223093271255493, + "learning_rate": 5e-05, + "epoch": 0.6659409617724275, + "step": 10400 + }, + { + "eval_loss": 2.0479464530944824, + "eval_runtime": 9.5359, + "eval_samples_per_second": 214.766, + "eval_steps_per_second": 26.846, + "epoch": 0.6659409617724275, + "step": 10400 + }, + { + "loss": 2.1724, + "grad_norm": 1.6815154552459717, + "learning_rate": 5e-05, + "epoch": 0.6662611256963565, + "step": 10405 + }, + { + "loss": 2.1865, + "grad_norm": 1.6925560235977173, + "learning_rate": 5e-05, + "epoch": 0.6665812896202856, + "step": 10410 + }, + { + "loss": 2.1976, + "grad_norm": 1.6829434633255005, + "learning_rate": 5e-05, + "epoch": 0.6669014535442146, + "step": 10415 + }, + { + "loss": 2.1846, + "grad_norm": 1.6039493083953857, + "learning_rate": 5e-05, + "epoch": 0.6672216174681437, + "step": 10420 + }, + { + "loss": 2.1652, + "grad_norm": 1.5750089883804321, + "learning_rate": 5e-05, + "epoch": 0.6675417813920728, + "step": 10425 + }, + { + "loss": 2.1989, + "grad_norm": 1.6804012060165405, + "learning_rate": 5e-05, + "epoch": 0.6678619453160018, + "step": 10430 + }, + { + "loss": 2.1673, + "grad_norm": 1.6494613885879517, + "learning_rate": 5e-05, + "epoch": 0.6681821092399308, + "step": 10435 + }, + { + "loss": 2.1598, + "grad_norm": 1.5852676630020142, + "learning_rate": 5e-05, + "epoch": 0.6685022731638599, + "step": 10440 + }, + { + "loss": 2.188, + "grad_norm": 1.6259963512420654, + "learning_rate": 5e-05, + "epoch": 0.668822437087789, + "step": 10445 + }, + { + "loss": 2.211, + "grad_norm": 1.6978424787521362, + "learning_rate": 5e-05, + "epoch": 0.669142601011718, + "step": 10450 + }, + { + "loss": 2.1663, + "grad_norm": 1.6471407413482666, + "learning_rate": 5e-05, + "epoch": 0.669462764935647, + "step": 10455 + }, + { + "loss": 2.1957, + "grad_norm": 1.5860986709594727, + "learning_rate": 5e-05, + "epoch": 0.6697829288595761, + "step": 10460 + }, + { + "loss": 2.1557, + "grad_norm": 1.65492582321167, + "learning_rate": 5e-05, + "epoch": 0.6701030927835051, + "step": 10465 + }, + { + "loss": 2.18, + "grad_norm": 1.6820275783538818, + "learning_rate": 5e-05, + "epoch": 0.6704232567074342, + "step": 10470 + }, + { + "loss": 2.1984, + "grad_norm": 1.6048341989517212, + "learning_rate": 5e-05, + "epoch": 0.6707434206313633, + "step": 10475 + }, + { + "loss": 2.1651, + "grad_norm": 1.6907062530517578, + "learning_rate": 5e-05, + "epoch": 0.6710635845552924, + "step": 10480 + }, + { + "loss": 2.1792, + "grad_norm": 1.6291903257369995, + "learning_rate": 5e-05, + "epoch": 0.6713837484792213, + "step": 10485 + }, + { + "loss": 2.1838, + "grad_norm": 1.5670826435089111, + "learning_rate": 5e-05, + "epoch": 0.6717039124031504, + "step": 10490 + }, + { + "loss": 2.1827, + "grad_norm": 1.6459773778915405, + "learning_rate": 5e-05, + "epoch": 0.6720240763270795, + "step": 10495 + }, + { + "loss": 2.1948, + "grad_norm": 1.6180630922317505, + "learning_rate": 5e-05, + "epoch": 0.6723442402510085, + "step": 10500 + }, + { + "loss": 2.1589, + "grad_norm": 1.5975276231765747, + "learning_rate": 5e-05, + "epoch": 0.6726644041749376, + "step": 10505 + }, + { + "loss": 2.1806, + "grad_norm": 1.6482629776000977, + "learning_rate": 5e-05, + "epoch": 0.6729845680988666, + "step": 10510 + }, + { + "loss": 2.2065, + "grad_norm": 1.535201907157898, + "learning_rate": 5e-05, + "epoch": 0.6733047320227957, + "step": 10515 + }, + { + "loss": 2.1731, + "grad_norm": 1.6452282667160034, + "learning_rate": 5e-05, + "epoch": 0.6736248959467247, + "step": 10520 + }, + { + "loss": 2.1929, + "grad_norm": 1.5697953701019287, + "learning_rate": 5e-05, + "epoch": 0.6739450598706538, + "step": 10525 + }, + { + "loss": 2.1677, + "grad_norm": 1.689011812210083, + "learning_rate": 5e-05, + "epoch": 0.6742652237945829, + "step": 10530 + }, + { + "loss": 2.1907, + "grad_norm": 1.5526947975158691, + "learning_rate": 5e-05, + "epoch": 0.6745853877185118, + "step": 10535 + }, + { + "loss": 2.1569, + "grad_norm": 1.6372793912887573, + "learning_rate": 5e-05, + "epoch": 0.6749055516424409, + "step": 10540 + }, + { + "loss": 2.185, + "grad_norm": 1.670624852180481, + "learning_rate": 5e-05, + "epoch": 0.67522571556637, + "step": 10545 + }, + { + "loss": 2.1731, + "grad_norm": 1.6175780296325684, + "learning_rate": 5e-05, + "epoch": 0.675545879490299, + "step": 10550 + }, + { + "loss": 2.1483, + "grad_norm": 1.5563071966171265, + "learning_rate": 5e-05, + "epoch": 0.6758660434142281, + "step": 10555 + }, + { + "loss": 2.1656, + "grad_norm": 1.6369822025299072, + "learning_rate": 5e-05, + "epoch": 0.6761862073381572, + "step": 10560 + }, + { + "loss": 2.1781, + "grad_norm": 1.5494641065597534, + "learning_rate": 5e-05, + "epoch": 0.6765063712620862, + "step": 10565 + }, + { + "loss": 2.175, + "grad_norm": 1.6584258079528809, + "learning_rate": 5e-05, + "epoch": 0.6768265351860152, + "step": 10570 + }, + { + "loss": 2.1616, + "grad_norm": 1.6767727136611938, + "learning_rate": 5e-05, + "epoch": 0.6771466991099443, + "step": 10575 + }, + { + "loss": 2.1636, + "grad_norm": 1.555019497871399, + "learning_rate": 5e-05, + "epoch": 0.6774668630338734, + "step": 10580 + }, + { + "loss": 2.1667, + "grad_norm": 1.597027063369751, + "learning_rate": 5e-05, + "epoch": 0.6777870269578024, + "step": 10585 + }, + { + "loss": 2.1898, + "grad_norm": 1.5965244770050049, + "learning_rate": 5e-05, + "epoch": 0.6781071908817314, + "step": 10590 + }, + { + "loss": 2.1712, + "grad_norm": 1.53036630153656, + "learning_rate": 5e-05, + "epoch": 0.6784273548056605, + "step": 10595 + }, + { + "loss": 2.1438, + "grad_norm": 1.5849616527557373, + "learning_rate": 5e-05, + "epoch": 0.6787475187295896, + "step": 10600 + }, + { + "eval_loss": 2.038008213043213, + "eval_runtime": 9.1765, + "eval_samples_per_second": 223.178, + "eval_steps_per_second": 27.897, + "epoch": 0.6787475187295896, + "step": 10600 + }, + { + "loss": 2.1674, + "grad_norm": 1.6421003341674805, + "learning_rate": 5e-05, + "epoch": 0.6790676826535186, + "step": 10605 + }, + { + "loss": 2.1808, + "grad_norm": 1.602632761001587, + "learning_rate": 5e-05, + "epoch": 0.6793878465774477, + "step": 10610 + }, + { + "loss": 2.1703, + "grad_norm": 1.7214833498001099, + "learning_rate": 5e-05, + "epoch": 0.6797080105013767, + "step": 10615 + }, + { + "loss": 2.2029, + "grad_norm": 1.5542736053466797, + "learning_rate": 5e-05, + "epoch": 0.6800281744253057, + "step": 10620 + }, + { + "loss": 2.1652, + "grad_norm": 1.7546051740646362, + "learning_rate": 5e-05, + "epoch": 0.6803483383492348, + "step": 10625 + }, + { + "loss": 2.1935, + "grad_norm": 1.6220976114273071, + "learning_rate": 5e-05, + "epoch": 0.6806685022731639, + "step": 10630 + }, + { + "loss": 2.1631, + "grad_norm": 1.7736234664916992, + "learning_rate": 5e-05, + "epoch": 0.6809886661970929, + "step": 10635 + }, + { + "loss": 2.1659, + "grad_norm": 1.6324067115783691, + "learning_rate": 5e-05, + "epoch": 0.681308830121022, + "step": 10640 + }, + { + "loss": 2.1806, + "grad_norm": 1.6361428499221802, + "learning_rate": 5e-05, + "epoch": 0.681628994044951, + "step": 10645 + }, + { + "loss": 2.1511, + "grad_norm": 1.695727825164795, + "learning_rate": 5e-05, + "epoch": 0.6819491579688801, + "step": 10650 + }, + { + "loss": 2.1771, + "grad_norm": 1.5814673900604248, + "learning_rate": 5e-05, + "epoch": 0.6822693218928091, + "step": 10655 + }, + { + "loss": 2.156, + "grad_norm": 1.5904582738876343, + "learning_rate": 5e-05, + "epoch": 0.6825894858167382, + "step": 10660 + }, + { + "loss": 2.1845, + "grad_norm": 1.6580817699432373, + "learning_rate": 5e-05, + "epoch": 0.6829096497406673, + "step": 10665 + }, + { + "loss": 2.1571, + "grad_norm": 1.5745543241500854, + "learning_rate": 5e-05, + "epoch": 0.6832298136645962, + "step": 10670 + }, + { + "loss": 2.1504, + "grad_norm": 1.5689435005187988, + "learning_rate": 5e-05, + "epoch": 0.6835499775885253, + "step": 10675 + }, + { + "loss": 2.1806, + "grad_norm": 1.5868749618530273, + "learning_rate": 5e-05, + "epoch": 0.6838701415124544, + "step": 10680 + }, + { + "loss": 2.1685, + "grad_norm": 1.6363295316696167, + "learning_rate": 5e-05, + "epoch": 0.6841903054363835, + "step": 10685 + }, + { + "loss": 2.1508, + "grad_norm": 1.5658912658691406, + "learning_rate": 5e-05, + "epoch": 0.6845104693603125, + "step": 10690 + }, + { + "loss": 2.1798, + "grad_norm": 1.73909330368042, + "learning_rate": 5e-05, + "epoch": 0.6848306332842415, + "step": 10695 + }, + { + "loss": 2.1349, + "grad_norm": 1.5258785486221313, + "learning_rate": 5e-05, + "epoch": 0.6851507972081706, + "step": 10700 + }, + { + "loss": 2.1775, + "grad_norm": 1.6583811044692993, + "learning_rate": 5e-05, + "epoch": 0.6854709611320996, + "step": 10705 + }, + { + "loss": 2.1723, + "grad_norm": 1.623826503753662, + "learning_rate": 5e-05, + "epoch": 0.6857911250560287, + "step": 10710 + }, + { + "loss": 2.162, + "grad_norm": 1.5844995975494385, + "learning_rate": 5e-05, + "epoch": 0.6861112889799578, + "step": 10715 + }, + { + "loss": 2.1719, + "grad_norm": 1.5476335287094116, + "learning_rate": 5e-05, + "epoch": 0.6864314529038867, + "step": 10720 + }, + { + "loss": 2.1687, + "grad_norm": 1.6284759044647217, + "learning_rate": 5e-05, + "epoch": 0.6867516168278158, + "step": 10725 + }, + { + "loss": 2.1917, + "grad_norm": 1.6278412342071533, + "learning_rate": 5e-05, + "epoch": 0.6870717807517449, + "step": 10730 + }, + { + "loss": 2.1692, + "grad_norm": 1.6887261867523193, + "learning_rate": 5e-05, + "epoch": 0.687391944675674, + "step": 10735 + }, + { + "loss": 2.1672, + "grad_norm": 1.6040128469467163, + "learning_rate": 5e-05, + "epoch": 0.687712108599603, + "step": 10740 + }, + { + "loss": 2.1755, + "grad_norm": 1.6104363203048706, + "learning_rate": 5e-05, + "epoch": 0.688032272523532, + "step": 10745 + }, + { + "loss": 2.1828, + "grad_norm": 1.5533658266067505, + "learning_rate": 5e-05, + "epoch": 0.6883524364474611, + "step": 10750 + }, + { + "loss": 2.1762, + "grad_norm": 1.6379051208496094, + "learning_rate": 5e-05, + "epoch": 0.6886726003713901, + "step": 10755 + }, + { + "loss": 2.1811, + "grad_norm": 1.6067684888839722, + "learning_rate": 5e-05, + "epoch": 0.6889927642953192, + "step": 10760 + }, + { + "loss": 2.1711, + "grad_norm": 1.6196955442428589, + "learning_rate": 5e-05, + "epoch": 0.6893129282192483, + "step": 10765 + }, + { + "loss": 2.1666, + "grad_norm": 1.7209603786468506, + "learning_rate": 5e-05, + "epoch": 0.6896330921431774, + "step": 10770 + }, + { + "loss": 2.1632, + "grad_norm": 1.6124151945114136, + "learning_rate": 5e-05, + "epoch": 0.6899532560671063, + "step": 10775 + }, + { + "loss": 2.1812, + "grad_norm": 1.6349486112594604, + "learning_rate": 5e-05, + "epoch": 0.6902734199910354, + "step": 10780 + }, + { + "loss": 2.1459, + "grad_norm": 1.5787373781204224, + "learning_rate": 5e-05, + "epoch": 0.6905935839149645, + "step": 10785 + }, + { + "loss": 2.1705, + "grad_norm": 1.658933401107788, + "learning_rate": 5e-05, + "epoch": 0.6909137478388935, + "step": 10790 + }, + { + "loss": 2.2042, + "grad_norm": 1.6257023811340332, + "learning_rate": 5e-05, + "epoch": 0.6912339117628226, + "step": 10795 + }, + { + "loss": 2.156, + "grad_norm": 1.6555736064910889, + "learning_rate": 5e-05, + "epoch": 0.6915540756867516, + "step": 10800 + }, + { + "eval_loss": 2.0406501293182373, + "eval_runtime": 9.4746, + "eval_samples_per_second": 216.157, + "eval_steps_per_second": 27.02, + "epoch": 0.6915540756867516, + "step": 10800 + }, + { + "loss": 2.1915, + "grad_norm": 1.5673022270202637, + "learning_rate": 5e-05, + "epoch": 0.6918742396106806, + "step": 10805 + }, + { + "loss": 2.1848, + "grad_norm": 1.6205099821090698, + "learning_rate": 5e-05, + "epoch": 0.6921944035346097, + "step": 10810 + }, + { + "loss": 2.1478, + "grad_norm": 1.605733871459961, + "learning_rate": 5e-05, + "epoch": 0.6925145674585388, + "step": 10815 + }, + { + "loss": 2.1869, + "grad_norm": 1.6565288305282593, + "learning_rate": 5e-05, + "epoch": 0.6928347313824679, + "step": 10820 + }, + { + "loss": 2.1886, + "grad_norm": 1.5877238512039185, + "learning_rate": 5e-05, + "epoch": 0.6931548953063968, + "step": 10825 + }, + { + "loss": 2.1702, + "grad_norm": 1.612343668937683, + "learning_rate": 5e-05, + "epoch": 0.6934750592303259, + "step": 10830 + }, + { + "loss": 2.1676, + "grad_norm": 1.5942809581756592, + "learning_rate": 5e-05, + "epoch": 0.693795223154255, + "step": 10835 + }, + { + "loss": 2.1859, + "grad_norm": 1.575735330581665, + "learning_rate": 5e-05, + "epoch": 0.694115387078184, + "step": 10840 + }, + { + "loss": 2.1989, + "grad_norm": 1.6750792264938354, + "learning_rate": 5e-05, + "epoch": 0.6944355510021131, + "step": 10845 + }, + { + "loss": 2.188, + "grad_norm": 1.6374013423919678, + "learning_rate": 5e-05, + "epoch": 0.6947557149260422, + "step": 10850 + }, + { + "loss": 2.1745, + "grad_norm": 1.6327176094055176, + "learning_rate": 5e-05, + "epoch": 0.6950758788499712, + "step": 10855 + }, + { + "loss": 2.1858, + "grad_norm": 1.6038893461227417, + "learning_rate": 5e-05, + "epoch": 0.6953960427739002, + "step": 10860 + }, + { + "loss": 2.1917, + "grad_norm": 1.6516578197479248, + "learning_rate": 5e-05, + "epoch": 0.6957162066978293, + "step": 10865 + }, + { + "loss": 2.1508, + "grad_norm": 1.6180627346038818, + "learning_rate": 5e-05, + "epoch": 0.6960363706217584, + "step": 10870 + }, + { + "loss": 2.155, + "grad_norm": 1.7027835845947266, + "learning_rate": 5e-05, + "epoch": 0.6963565345456874, + "step": 10875 + }, + { + "loss": 2.1702, + "grad_norm": 1.6208568811416626, + "learning_rate": 5e-05, + "epoch": 0.6966766984696164, + "step": 10880 + }, + { + "loss": 2.1676, + "grad_norm": 1.5398364067077637, + "learning_rate": 5e-05, + "epoch": 0.6969968623935455, + "step": 10885 + }, + { + "loss": 2.1617, + "grad_norm": 1.579201340675354, + "learning_rate": 5e-05, + "epoch": 0.6973170263174745, + "step": 10890 + }, + { + "loss": 2.2142, + "grad_norm": 1.629888653755188, + "learning_rate": 5e-05, + "epoch": 0.6976371902414036, + "step": 10895 + }, + { + "loss": 2.1654, + "grad_norm": 1.5622855424880981, + "learning_rate": 5e-05, + "epoch": 0.6979573541653327, + "step": 10900 + }, + { + "loss": 2.1655, + "grad_norm": 1.6659269332885742, + "learning_rate": 5e-05, + "epoch": 0.6982775180892618, + "step": 10905 + }, + { + "loss": 2.1779, + "grad_norm": 1.5977221727371216, + "learning_rate": 5e-05, + "epoch": 0.6985976820131907, + "step": 10910 + }, + { + "loss": 2.1613, + "grad_norm": 1.604508638381958, + "learning_rate": 5e-05, + "epoch": 0.6989178459371198, + "step": 10915 + }, + { + "loss": 2.1914, + "grad_norm": 1.6567248106002808, + "learning_rate": 5e-05, + "epoch": 0.6992380098610489, + "step": 10920 + }, + { + "loss": 2.1479, + "grad_norm": 1.681601881980896, + "learning_rate": 5e-05, + "epoch": 0.6995581737849779, + "step": 10925 + }, + { + "loss": 2.1561, + "grad_norm": 1.6984683275222778, + "learning_rate": 5e-05, + "epoch": 0.699878337708907, + "step": 10930 + }, + { + "loss": 2.1897, + "grad_norm": 1.6151689291000366, + "learning_rate": 5e-05, + "epoch": 0.700198501632836, + "step": 10935 + }, + { + "loss": 2.1693, + "grad_norm": 1.680700659751892, + "learning_rate": 5e-05, + "epoch": 0.7005186655567651, + "step": 10940 + }, + { + "loss": 2.2056, + "grad_norm": 1.6594295501708984, + "learning_rate": 5e-05, + "epoch": 0.7008388294806941, + "step": 10945 + }, + { + "loss": 2.1944, + "grad_norm": 1.569491982460022, + "learning_rate": 5e-05, + "epoch": 0.7011589934046232, + "step": 10950 + }, + { + "loss": 2.1825, + "grad_norm": 1.6589845418930054, + "learning_rate": 5e-05, + "epoch": 0.7014791573285523, + "step": 10955 + }, + { + "loss": 2.1556, + "grad_norm": 1.6408551931381226, + "learning_rate": 5e-05, + "epoch": 0.7017993212524812, + "step": 10960 + }, + { + "loss": 2.1657, + "grad_norm": 1.580729365348816, + "learning_rate": 5e-05, + "epoch": 0.7021194851764103, + "step": 10965 + }, + { + "loss": 2.203, + "grad_norm": 2.0288913249969482, + "learning_rate": 5e-05, + "epoch": 0.7024396491003394, + "step": 10970 + }, + { + "loss": 2.1871, + "grad_norm": 2.0533010959625244, + "learning_rate": 5e-05, + "epoch": 0.7027598130242684, + "step": 10975 + }, + { + "loss": 2.1798, + "grad_norm": 1.7345025539398193, + "learning_rate": 5e-05, + "epoch": 0.7030799769481975, + "step": 10980 + }, + { + "loss": 2.1758, + "grad_norm": 1.7670248746871948, + "learning_rate": 5e-05, + "epoch": 0.7034001408721265, + "step": 10985 + }, + { + "loss": 2.1745, + "grad_norm": 1.7378064393997192, + "learning_rate": 5e-05, + "epoch": 0.7037203047960556, + "step": 10990 + }, + { + "loss": 2.1861, + "grad_norm": 1.7046862840652466, + "learning_rate": 5e-05, + "epoch": 0.7040404687199846, + "step": 10995 + }, + { + "loss": 2.1677, + "grad_norm": 1.7800571918487549, + "learning_rate": 5e-05, + "epoch": 0.7043606326439137, + "step": 11000 + }, + { + "eval_loss": 2.030477523803711, + "eval_runtime": 11.977, + "eval_samples_per_second": 170.995, + "eval_steps_per_second": 21.374, + "epoch": 0.7043606326439137, + "step": 11000 + }, + { + "loss": 2.1441, + "grad_norm": 1.6233124732971191, + "learning_rate": 5e-05, + "epoch": 0.7046807965678428, + "step": 11005 + }, + { + "loss": 2.1677, + "grad_norm": 1.6337502002716064, + "learning_rate": 5e-05, + "epoch": 0.7050009604917717, + "step": 11010 + }, + { + "loss": 2.1558, + "grad_norm": 1.602023720741272, + "learning_rate": 5e-05, + "epoch": 0.7053211244157008, + "step": 11015 + }, + { + "loss": 2.2029, + "grad_norm": 1.642838478088379, + "learning_rate": 5e-05, + "epoch": 0.7056412883396299, + "step": 11020 + }, + { + "loss": 2.1435, + "grad_norm": 1.6262296438217163, + "learning_rate": 5e-05, + "epoch": 0.705961452263559, + "step": 11025 + }, + { + "loss": 2.1539, + "grad_norm": 1.6026281118392944, + "learning_rate": 5e-05, + "epoch": 0.706281616187488, + "step": 11030 + }, + { + "loss": 2.1486, + "grad_norm": 1.722970962524414, + "learning_rate": 5e-05, + "epoch": 0.706601780111417, + "step": 11035 + }, + { + "loss": 2.1827, + "grad_norm": 1.606521487236023, + "learning_rate": 5e-05, + "epoch": 0.7069219440353461, + "step": 11040 + }, + { + "loss": 2.1569, + "grad_norm": 1.6021209955215454, + "learning_rate": 5e-05, + "epoch": 0.7072421079592751, + "step": 11045 + }, + { + "loss": 2.1489, + "grad_norm": 1.5476322174072266, + "learning_rate": 5e-05, + "epoch": 0.7075622718832042, + "step": 11050 + }, + { + "loss": 2.1817, + "grad_norm": 1.5742813348770142, + "learning_rate": 5e-05, + "epoch": 0.7078824358071333, + "step": 11055 + }, + { + "loss": 2.1811, + "grad_norm": 1.5461931228637695, + "learning_rate": 5e-05, + "epoch": 0.7082025997310623, + "step": 11060 + }, + { + "loss": 2.1569, + "grad_norm": 1.5782997608184814, + "learning_rate": 5e-05, + "epoch": 0.7085227636549913, + "step": 11065 + }, + { + "loss": 2.1863, + "grad_norm": 1.652904748916626, + "learning_rate": 5e-05, + "epoch": 0.7088429275789204, + "step": 11070 + }, + { + "loss": 2.1902, + "grad_norm": 1.6661841869354248, + "learning_rate": 5e-05, + "epoch": 0.7091630915028495, + "step": 11075 + }, + { + "loss": 2.1641, + "grad_norm": 1.5823413133621216, + "learning_rate": 5e-05, + "epoch": 0.7094832554267785, + "step": 11080 + }, + { + "loss": 2.1793, + "grad_norm": 1.6849126815795898, + "learning_rate": 5e-05, + "epoch": 0.7098034193507076, + "step": 11085 + }, + { + "loss": 2.1686, + "grad_norm": 1.4970881938934326, + "learning_rate": 5e-05, + "epoch": 0.7101235832746366, + "step": 11090 + }, + { + "loss": 2.1678, + "grad_norm": 1.5892386436462402, + "learning_rate": 5e-05, + "epoch": 0.7104437471985656, + "step": 11095 + }, + { + "loss": 2.1718, + "grad_norm": 1.6064002513885498, + "learning_rate": 5e-05, + "epoch": 0.7107639111224947, + "step": 11100 + }, + { + "loss": 2.1564, + "grad_norm": 1.6332510709762573, + "learning_rate": 5e-05, + "epoch": 0.7110840750464238, + "step": 11105 + }, + { + "loss": 2.1548, + "grad_norm": 1.5667170286178589, + "learning_rate": 5e-05, + "epoch": 0.7114042389703529, + "step": 11110 + }, + { + "loss": 2.1598, + "grad_norm": 1.6204004287719727, + "learning_rate": 5e-05, + "epoch": 0.7117244028942818, + "step": 11115 + }, + { + "loss": 2.1704, + "grad_norm": 1.6428627967834473, + "learning_rate": 5e-05, + "epoch": 0.7120445668182109, + "step": 11120 + }, + { + "loss": 2.1742, + "grad_norm": 1.6737961769104004, + "learning_rate": 5e-05, + "epoch": 0.71236473074214, + "step": 11125 + }, + { + "loss": 2.1952, + "grad_norm": 1.6475317478179932, + "learning_rate": 5e-05, + "epoch": 0.712684894666069, + "step": 11130 + }, + { + "loss": 2.1933, + "grad_norm": 1.6665571928024292, + "learning_rate": 5e-05, + "epoch": 0.7130050585899981, + "step": 11135 + }, + { + "loss": 2.1678, + "grad_norm": 1.5727708339691162, + "learning_rate": 5e-05, + "epoch": 0.7133252225139272, + "step": 11140 + }, + { + "loss": 2.1553, + "grad_norm": 1.5942537784576416, + "learning_rate": 5e-05, + "epoch": 0.7136453864378561, + "step": 11145 + }, + { + "loss": 2.1724, + "grad_norm": 1.6233826875686646, + "learning_rate": 5e-05, + "epoch": 0.7139655503617852, + "step": 11150 + }, + { + "loss": 2.17, + "grad_norm": 1.6468729972839355, + "learning_rate": 5e-05, + "epoch": 0.7142857142857143, + "step": 11155 + }, + { + "loss": 2.1759, + "grad_norm": 1.6635937690734863, + "learning_rate": 5e-05, + "epoch": 0.7146058782096434, + "step": 11160 + }, + { + "loss": 2.1676, + "grad_norm": 1.67452073097229, + "learning_rate": 5e-05, + "epoch": 0.7149260421335724, + "step": 11165 + }, + { + "loss": 2.1679, + "grad_norm": 1.6762311458587646, + "learning_rate": 5e-05, + "epoch": 0.7152462060575014, + "step": 11170 + }, + { + "loss": 2.2012, + "grad_norm": 1.6281007528305054, + "learning_rate": 5e-05, + "epoch": 0.7155663699814305, + "step": 11175 + }, + { + "loss": 2.1857, + "grad_norm": 1.6250513792037964, + "learning_rate": 5e-05, + "epoch": 0.7158865339053595, + "step": 11180 + }, + { + "loss": 2.1537, + "grad_norm": 1.57022225856781, + "learning_rate": 5e-05, + "epoch": 0.7162066978292886, + "step": 11185 + }, + { + "loss": 2.1607, + "grad_norm": 1.6798429489135742, + "learning_rate": 5e-05, + "epoch": 0.7165268617532177, + "step": 11190 + }, + { + "loss": 2.1764, + "grad_norm": 1.6130719184875488, + "learning_rate": 5e-05, + "epoch": 0.7168470256771468, + "step": 11195 + }, + { + "loss": 2.1805, + "grad_norm": 1.6312038898468018, + "learning_rate": 5e-05, + "epoch": 0.7171671896010757, + "step": 11200 + }, + { + "eval_loss": 2.0418291091918945, + "eval_runtime": 9.4156, + "eval_samples_per_second": 217.512, + "eval_steps_per_second": 27.189, + "epoch": 0.7171671896010757, + "step": 11200 + }, + { + "loss": 2.1583, + "grad_norm": 1.6148508787155151, + "learning_rate": 5e-05, + "epoch": 0.7174873535250048, + "step": 11205 + }, + { + "loss": 2.156, + "grad_norm": 1.6483427286148071, + "learning_rate": 5e-05, + "epoch": 0.7178075174489339, + "step": 11210 + }, + { + "loss": 2.1894, + "grad_norm": 1.7447383403778076, + "learning_rate": 5e-05, + "epoch": 0.7181276813728629, + "step": 11215 + }, + { + "loss": 2.1905, + "grad_norm": 1.605284571647644, + "learning_rate": 5e-05, + "epoch": 0.718447845296792, + "step": 11220 + }, + { + "loss": 2.1626, + "grad_norm": 1.5911649465560913, + "learning_rate": 5e-05, + "epoch": 0.718768009220721, + "step": 11225 + }, + { + "loss": 2.1717, + "grad_norm": 1.535005807876587, + "learning_rate": 5e-05, + "epoch": 0.71908817314465, + "step": 11230 + }, + { + "loss": 2.1844, + "grad_norm": 1.5297882556915283, + "learning_rate": 5e-05, + "epoch": 0.7194083370685791, + "step": 11235 + }, + { + "loss": 2.1602, + "grad_norm": 1.6361292600631714, + "learning_rate": 5e-05, + "epoch": 0.7197285009925082, + "step": 11240 + }, + { + "loss": 2.1776, + "grad_norm": 1.6264761686325073, + "learning_rate": 5e-05, + "epoch": 0.7200486649164373, + "step": 11245 + }, + { + "loss": 2.1692, + "grad_norm": 1.6878418922424316, + "learning_rate": 5e-05, + "epoch": 0.7203688288403662, + "step": 11250 + }, + { + "loss": 2.1613, + "grad_norm": 1.6586750745773315, + "learning_rate": 5e-05, + "epoch": 0.7206889927642953, + "step": 11255 + }, + { + "loss": 2.1647, + "grad_norm": 1.6286550760269165, + "learning_rate": 5e-05, + "epoch": 0.7210091566882244, + "step": 11260 + }, + { + "loss": 2.1485, + "grad_norm": 1.6325013637542725, + "learning_rate": 5e-05, + "epoch": 0.7213293206121534, + "step": 11265 + }, + { + "loss": 2.1846, + "grad_norm": 1.5908008813858032, + "learning_rate": 5e-05, + "epoch": 0.7216494845360825, + "step": 11270 + }, + { + "loss": 2.1725, + "grad_norm": 1.5738271474838257, + "learning_rate": 5e-05, + "epoch": 0.7219696484600115, + "step": 11275 + }, + { + "loss": 2.1878, + "grad_norm": 1.6095255613327026, + "learning_rate": 5e-05, + "epoch": 0.7222898123839406, + "step": 11280 + }, + { + "loss": 2.1599, + "grad_norm": 1.5834318399429321, + "learning_rate": 5e-05, + "epoch": 0.7226099763078696, + "step": 11285 + }, + { + "loss": 2.2047, + "grad_norm": 1.5938414335250854, + "learning_rate": 5e-05, + "epoch": 0.7229301402317987, + "step": 11290 + }, + { + "loss": 2.1833, + "grad_norm": 1.62465238571167, + "learning_rate": 5e-05, + "epoch": 0.7232503041557278, + "step": 11295 + }, + { + "loss": 2.1639, + "grad_norm": 1.6351125240325928, + "learning_rate": 5e-05, + "epoch": 0.7235704680796567, + "step": 11300 + }, + { + "loss": 2.1567, + "grad_norm": 1.6405686140060425, + "learning_rate": 5e-05, + "epoch": 0.7238906320035858, + "step": 11305 + }, + { + "loss": 2.1425, + "grad_norm": 1.6778993606567383, + "learning_rate": 5e-05, + "epoch": 0.7242107959275149, + "step": 11310 + }, + { + "loss": 2.1388, + "grad_norm": 1.5974764823913574, + "learning_rate": 5e-05, + "epoch": 0.7245309598514439, + "step": 11315 + }, + { + "loss": 2.1866, + "grad_norm": 1.6215441226959229, + "learning_rate": 5e-05, + "epoch": 0.724851123775373, + "step": 11320 + }, + { + "loss": 2.1612, + "grad_norm": 1.638525366783142, + "learning_rate": 5e-05, + "epoch": 0.7251712876993021, + "step": 11325 + }, + { + "loss": 2.1588, + "grad_norm": 1.677040934562683, + "learning_rate": 5e-05, + "epoch": 0.7254914516232311, + "step": 11330 + }, + { + "loss": 2.1894, + "grad_norm": 1.6689181327819824, + "learning_rate": 5e-05, + "epoch": 0.7258116155471601, + "step": 11335 + }, + { + "loss": 2.1748, + "grad_norm": 1.6328116655349731, + "learning_rate": 5e-05, + "epoch": 0.7261317794710892, + "step": 11340 + }, + { + "loss": 2.154, + "grad_norm": 1.6823488473892212, + "learning_rate": 5e-05, + "epoch": 0.7264519433950183, + "step": 11345 + }, + { + "loss": 2.1432, + "grad_norm": 1.6382944583892822, + "learning_rate": 5e-05, + "epoch": 0.7267721073189473, + "step": 11350 + }, + { + "loss": 2.1632, + "grad_norm": 1.6119322776794434, + "learning_rate": 5e-05, + "epoch": 0.7270922712428763, + "step": 11355 + }, + { + "loss": 2.1636, + "grad_norm": 1.6790704727172852, + "learning_rate": 5e-05, + "epoch": 0.7274124351668054, + "step": 11360 + }, + { + "loss": 2.1905, + "grad_norm": 1.6697032451629639, + "learning_rate": 5e-05, + "epoch": 0.7277325990907345, + "step": 11365 + }, + { + "loss": 2.1917, + "grad_norm": 1.6095983982086182, + "learning_rate": 5e-05, + "epoch": 0.7280527630146635, + "step": 11370 + }, + { + "loss": 2.1417, + "grad_norm": 1.5918678045272827, + "learning_rate": 5e-05, + "epoch": 0.7283729269385926, + "step": 11375 + }, + { + "loss": 2.1782, + "grad_norm": 1.5360814332962036, + "learning_rate": 5e-05, + "epoch": 0.7286930908625217, + "step": 11380 + }, + { + "loss": 2.1869, + "grad_norm": 1.613787293434143, + "learning_rate": 5e-05, + "epoch": 0.7290132547864506, + "step": 11385 + }, + { + "loss": 2.1888, + "grad_norm": 1.5174287557601929, + "learning_rate": 5e-05, + "epoch": 0.7293334187103797, + "step": 11390 + }, + { + "loss": 2.1803, + "grad_norm": 1.625603437423706, + "learning_rate": 5e-05, + "epoch": 0.7296535826343088, + "step": 11395 + }, + { + "loss": 2.1788, + "grad_norm": 1.572482943534851, + "learning_rate": 5e-05, + "epoch": 0.7299737465582378, + "step": 11400 + }, + { + "eval_loss": 2.03629732131958, + "eval_runtime": 9.2282, + "eval_samples_per_second": 221.929, + "eval_steps_per_second": 27.741, + "epoch": 0.7299737465582378, + "step": 11400 + }, + { + "loss": 2.1694, + "grad_norm": 1.6652473211288452, + "learning_rate": 5e-05, + "epoch": 0.7302939104821669, + "step": 11405 + }, + { + "loss": 2.1589, + "grad_norm": 1.5308523178100586, + "learning_rate": 5e-05, + "epoch": 0.7306140744060959, + "step": 11410 + }, + { + "loss": 2.1666, + "grad_norm": 1.578356385231018, + "learning_rate": 5e-05, + "epoch": 0.730934238330025, + "step": 11415 + }, + { + "loss": 2.1538, + "grad_norm": 1.6113513708114624, + "learning_rate": 5e-05, + "epoch": 0.731254402253954, + "step": 11420 + }, + { + "loss": 2.184, + "grad_norm": 1.6025482416152954, + "learning_rate": 5e-05, + "epoch": 0.7315745661778831, + "step": 11425 + }, + { + "loss": 2.1466, + "grad_norm": 1.674232006072998, + "learning_rate": 5e-05, + "epoch": 0.7318947301018122, + "step": 11430 + }, + { + "loss": 2.1636, + "grad_norm": 1.6667330265045166, + "learning_rate": 5e-05, + "epoch": 0.7322148940257411, + "step": 11435 + }, + { + "loss": 2.1873, + "grad_norm": 1.5927730798721313, + "learning_rate": 5e-05, + "epoch": 0.7325350579496702, + "step": 11440 + }, + { + "loss": 2.1408, + "grad_norm": 1.6314619779586792, + "learning_rate": 5e-05, + "epoch": 0.7328552218735993, + "step": 11445 + }, + { + "loss": 2.1728, + "grad_norm": 1.6256287097930908, + "learning_rate": 5e-05, + "epoch": 0.7331753857975284, + "step": 11450 + }, + { + "loss": 2.1653, + "grad_norm": 1.556431531906128, + "learning_rate": 5e-05, + "epoch": 0.7334955497214574, + "step": 11455 + }, + { + "loss": 2.1638, + "grad_norm": 1.600176453590393, + "learning_rate": 5e-05, + "epoch": 0.7338157136453864, + "step": 11460 + }, + { + "loss": 2.152, + "grad_norm": 1.5737305879592896, + "learning_rate": 5e-05, + "epoch": 0.7341358775693155, + "step": 11465 + }, + { + "loss": 2.1757, + "grad_norm": 1.5820810794830322, + "learning_rate": 5e-05, + "epoch": 0.7344560414932445, + "step": 11470 + }, + { + "loss": 2.1806, + "grad_norm": 1.7114322185516357, + "learning_rate": 5e-05, + "epoch": 0.7347762054171736, + "step": 11475 + }, + { + "loss": 2.1672, + "grad_norm": 1.5795398950576782, + "learning_rate": 5e-05, + "epoch": 0.7350963693411027, + "step": 11480 + }, + { + "loss": 2.178, + "grad_norm": 1.6970211267471313, + "learning_rate": 5e-05, + "epoch": 0.7354165332650316, + "step": 11485 + }, + { + "loss": 2.1437, + "grad_norm": 1.6389212608337402, + "learning_rate": 5e-05, + "epoch": 0.7357366971889607, + "step": 11490 + }, + { + "loss": 2.1463, + "grad_norm": 1.5622590780258179, + "learning_rate": 5e-05, + "epoch": 0.7360568611128898, + "step": 11495 + }, + { + "loss": 2.1763, + "grad_norm": 1.6667590141296387, + "learning_rate": 5e-05, + "epoch": 0.7363770250368189, + "step": 11500 + }, + { + "loss": 2.1737, + "grad_norm": 1.6279217004776, + "learning_rate": 5e-05, + "epoch": 0.7366971889607479, + "step": 11505 + }, + { + "loss": 2.1736, + "grad_norm": 1.5891218185424805, + "learning_rate": 5e-05, + "epoch": 0.737017352884677, + "step": 11510 + }, + { + "loss": 2.1645, + "grad_norm": 1.5445228815078735, + "learning_rate": 5e-05, + "epoch": 0.737337516808606, + "step": 11515 + }, + { + "loss": 2.1715, + "grad_norm": 1.6425042152404785, + "learning_rate": 5e-05, + "epoch": 0.737657680732535, + "step": 11520 + }, + { + "loss": 2.1548, + "grad_norm": 1.6075410842895508, + "learning_rate": 5e-05, + "epoch": 0.7379778446564641, + "step": 11525 + }, + { + "loss": 2.1311, + "grad_norm": 1.6689494848251343, + "learning_rate": 5e-05, + "epoch": 0.7382980085803932, + "step": 11530 + }, + { + "loss": 2.1788, + "grad_norm": 1.6945607662200928, + "learning_rate": 5e-05, + "epoch": 0.7386181725043223, + "step": 11535 + }, + { + "loss": 2.1646, + "grad_norm": 1.7109571695327759, + "learning_rate": 5e-05, + "epoch": 0.7389383364282512, + "step": 11540 + }, + { + "loss": 2.1656, + "grad_norm": 1.6874438524246216, + "learning_rate": 5e-05, + "epoch": 0.7392585003521803, + "step": 11545 + }, + { + "loss": 2.1719, + "grad_norm": 1.6662932634353638, + "learning_rate": 5e-05, + "epoch": 0.7395786642761094, + "step": 11550 + }, + { + "loss": 2.1489, + "grad_norm": 1.5764780044555664, + "learning_rate": 5e-05, + "epoch": 0.7398988282000384, + "step": 11555 + }, + { + "loss": 2.1715, + "grad_norm": 1.6159734725952148, + "learning_rate": 5e-05, + "epoch": 0.7402189921239675, + "step": 11560 + }, + { + "loss": 2.1861, + "grad_norm": 1.5857573747634888, + "learning_rate": 5e-05, + "epoch": 0.7405391560478966, + "step": 11565 + }, + { + "loss": 2.1622, + "grad_norm": 1.5534696578979492, + "learning_rate": 5e-05, + "epoch": 0.7408593199718255, + "step": 11570 + }, + { + "loss": 2.144, + "grad_norm": 1.5971225500106812, + "learning_rate": 5e-05, + "epoch": 0.7411794838957546, + "step": 11575 + }, + { + "loss": 2.1669, + "grad_norm": 1.6196457147598267, + "learning_rate": 5e-05, + "epoch": 0.7414996478196837, + "step": 11580 + }, + { + "loss": 2.1851, + "grad_norm": 1.584476351737976, + "learning_rate": 5e-05, + "epoch": 0.7418198117436128, + "step": 11585 + }, + { + "loss": 2.1444, + "grad_norm": 1.6179357767105103, + "learning_rate": 5e-05, + "epoch": 0.7421399756675418, + "step": 11590 + }, + { + "loss": 2.1497, + "grad_norm": 1.549419641494751, + "learning_rate": 5e-05, + "epoch": 0.7424601395914708, + "step": 11595 + }, + { + "loss": 2.1902, + "grad_norm": 1.6180243492126465, + "learning_rate": 5e-05, + "epoch": 0.7427803035153999, + "step": 11600 + }, + { + "eval_loss": 2.032823085784912, + "eval_runtime": 12.5035, + "eval_samples_per_second": 163.795, + "eval_steps_per_second": 20.474, + "epoch": 0.7427803035153999, + "step": 11600 + }, + { + "loss": 2.1372, + "grad_norm": 1.7094162702560425, + "learning_rate": 5e-05, + "epoch": 0.7431004674393289, + "step": 11605 + }, + { + "loss": 2.169, + "grad_norm": 1.5947761535644531, + "learning_rate": 5e-05, + "epoch": 0.743420631363258, + "step": 11610 + }, + { + "loss": 2.1527, + "grad_norm": 1.634129524230957, + "learning_rate": 5e-05, + "epoch": 0.7437407952871871, + "step": 11615 + }, + { + "loss": 2.1599, + "grad_norm": 1.6361533403396606, + "learning_rate": 5e-05, + "epoch": 0.7440609592111161, + "step": 11620 + }, + { + "loss": 2.1682, + "grad_norm": 1.6670771837234497, + "learning_rate": 5e-05, + "epoch": 0.7443811231350451, + "step": 11625 + }, + { + "loss": 2.1661, + "grad_norm": 1.6897422075271606, + "learning_rate": 5e-05, + "epoch": 0.7447012870589742, + "step": 11630 + }, + { + "loss": 2.1425, + "grad_norm": 1.5701279640197754, + "learning_rate": 5e-05, + "epoch": 0.7450214509829033, + "step": 11635 + }, + { + "loss": 2.1747, + "grad_norm": 1.6329246759414673, + "learning_rate": 5e-05, + "epoch": 0.7453416149068323, + "step": 11640 + }, + { + "loss": 2.186, + "grad_norm": 1.651111364364624, + "learning_rate": 5e-05, + "epoch": 0.7456617788307613, + "step": 11645 + }, + { + "loss": 2.1843, + "grad_norm": 1.5741336345672607, + "learning_rate": 5e-05, + "epoch": 0.7459819427546904, + "step": 11650 + }, + { + "loss": 2.1641, + "grad_norm": 1.6159652471542358, + "learning_rate": 5e-05, + "epoch": 0.7463021066786194, + "step": 11655 + }, + { + "loss": 2.1791, + "grad_norm": 1.538552165031433, + "learning_rate": 5e-05, + "epoch": 0.7466222706025485, + "step": 11660 + }, + { + "loss": 2.1707, + "grad_norm": 1.616956353187561, + "learning_rate": 5e-05, + "epoch": 0.7469424345264776, + "step": 11665 + }, + { + "loss": 2.1778, + "grad_norm": 1.530639410018921, + "learning_rate": 5e-05, + "epoch": 0.7472625984504067, + "step": 11670 + }, + { + "loss": 2.1523, + "grad_norm": 1.5348337888717651, + "learning_rate": 5e-05, + "epoch": 0.7475827623743356, + "step": 11675 + }, + { + "loss": 2.1787, + "grad_norm": 1.6515288352966309, + "learning_rate": 5e-05, + "epoch": 0.7479029262982647, + "step": 11680 + }, + { + "loss": 2.167, + "grad_norm": 1.654701828956604, + "learning_rate": 5e-05, + "epoch": 0.7482230902221938, + "step": 11685 + }, + { + "loss": 2.149, + "grad_norm": 1.5613808631896973, + "learning_rate": 5e-05, + "epoch": 0.7485432541461228, + "step": 11690 + }, + { + "loss": 2.1814, + "grad_norm": 1.6129508018493652, + "learning_rate": 5e-05, + "epoch": 0.7488634180700519, + "step": 11695 + }, + { + "loss": 2.1738, + "grad_norm": 1.5563007593154907, + "learning_rate": 5e-05, + "epoch": 0.7491835819939809, + "step": 11700 + }, + { + "loss": 2.1543, + "grad_norm": 1.5803391933441162, + "learning_rate": 5e-05, + "epoch": 0.74950374591791, + "step": 11705 + }, + { + "loss": 2.19, + "grad_norm": 1.61357581615448, + "learning_rate": 5e-05, + "epoch": 0.749823909841839, + "step": 11710 + }, + { + "loss": 2.1295, + "grad_norm": 1.5979573726654053, + "learning_rate": 5e-05, + "epoch": 0.7501440737657681, + "step": 11715 + }, + { + "loss": 2.1445, + "grad_norm": 1.6031203269958496, + "learning_rate": 5e-05, + "epoch": 0.7504642376896972, + "step": 11720 + }, + { + "loss": 2.1808, + "grad_norm": 1.645012378692627, + "learning_rate": 5e-05, + "epoch": 0.7507844016136261, + "step": 11725 + }, + { + "loss": 2.161, + "grad_norm": 1.629136562347412, + "learning_rate": 5e-05, + "epoch": 0.7511045655375552, + "step": 11730 + }, + { + "loss": 2.1554, + "grad_norm": 1.5675849914550781, + "learning_rate": 5e-05, + "epoch": 0.7514247294614843, + "step": 11735 + }, + { + "loss": 2.1769, + "grad_norm": 1.6842530965805054, + "learning_rate": 5e-05, + "epoch": 0.7517448933854133, + "step": 11740 + }, + { + "loss": 2.1571, + "grad_norm": 1.645548939704895, + "learning_rate": 5e-05, + "epoch": 0.7520650573093424, + "step": 11745 + }, + { + "loss": 2.1565, + "grad_norm": 1.6353763341903687, + "learning_rate": 5e-05, + "epoch": 0.7523852212332715, + "step": 11750 + }, + { + "loss": 2.1574, + "grad_norm": 1.6045830249786377, + "learning_rate": 5e-05, + "epoch": 0.7527053851572005, + "step": 11755 + }, + { + "loss": 2.1557, + "grad_norm": 1.6563011407852173, + "learning_rate": 5e-05, + "epoch": 0.7530255490811295, + "step": 11760 + }, + { + "loss": 2.1776, + "grad_norm": 1.662995457649231, + "learning_rate": 5e-05, + "epoch": 0.7533457130050586, + "step": 11765 + }, + { + "loss": 2.1611, + "grad_norm": 1.6755454540252686, + "learning_rate": 5e-05, + "epoch": 0.7536658769289877, + "step": 11770 + }, + { + "loss": 2.164, + "grad_norm": 1.6755766868591309, + "learning_rate": 5e-05, + "epoch": 0.7539860408529167, + "step": 11775 + }, + { + "loss": 2.1476, + "grad_norm": 1.637885570526123, + "learning_rate": 5e-05, + "epoch": 0.7543062047768457, + "step": 11780 + }, + { + "loss": 2.1591, + "grad_norm": 1.5392520427703857, + "learning_rate": 5e-05, + "epoch": 0.7546263687007748, + "step": 11785 + }, + { + "loss": 2.1723, + "grad_norm": 1.702089548110962, + "learning_rate": 5e-05, + "epoch": 0.7549465326247039, + "step": 11790 + }, + { + "loss": 2.1572, + "grad_norm": 1.5574604272842407, + "learning_rate": 5e-05, + "epoch": 0.7552666965486329, + "step": 11795 + }, + { + "loss": 2.1696, + "grad_norm": 1.565976858139038, + "learning_rate": 5e-05, + "epoch": 0.755586860472562, + "step": 11800 + }, + { + "eval_loss": 2.033466339111328, + "eval_runtime": 9.2054, + "eval_samples_per_second": 222.478, + "eval_steps_per_second": 27.81, + "epoch": 0.755586860472562, + "step": 11800 + }, + { + "loss": 2.154, + "grad_norm": 1.5538219213485718, + "learning_rate": 5e-05, + "epoch": 0.755907024396491, + "step": 11805 + }, + { + "loss": 2.175, + "grad_norm": 1.6611093282699585, + "learning_rate": 5e-05, + "epoch": 0.75622718832042, + "step": 11810 + }, + { + "loss": 2.1702, + "grad_norm": 1.61954665184021, + "learning_rate": 5e-05, + "epoch": 0.7565473522443491, + "step": 11815 + }, + { + "loss": 2.1554, + "grad_norm": 1.5703262090682983, + "learning_rate": 5e-05, + "epoch": 0.7568675161682782, + "step": 11820 + }, + { + "loss": 2.1639, + "grad_norm": 1.5392646789550781, + "learning_rate": 5e-05, + "epoch": 0.7571876800922072, + "step": 11825 + }, + { + "loss": 2.1582, + "grad_norm": 1.6835620403289795, + "learning_rate": 5e-05, + "epoch": 0.7575078440161362, + "step": 11830 + }, + { + "loss": 2.1526, + "grad_norm": 1.6214685440063477, + "learning_rate": 5e-05, + "epoch": 0.7578280079400653, + "step": 11835 + }, + { + "loss": 2.1829, + "grad_norm": 1.613537311553955, + "learning_rate": 5e-05, + "epoch": 0.7581481718639944, + "step": 11840 + }, + { + "loss": 2.1452, + "grad_norm": 1.5517228841781616, + "learning_rate": 5e-05, + "epoch": 0.7584683357879234, + "step": 11845 + }, + { + "loss": 2.1803, + "grad_norm": 1.6012214422225952, + "learning_rate": 5e-05, + "epoch": 0.7587884997118525, + "step": 11850 + }, + { + "loss": 2.1673, + "grad_norm": 1.5846041440963745, + "learning_rate": 5e-05, + "epoch": 0.7591086636357816, + "step": 11855 + }, + { + "loss": 2.1348, + "grad_norm": 1.588087558746338, + "learning_rate": 5e-05, + "epoch": 0.7594288275597105, + "step": 11860 + }, + { + "loss": 2.1479, + "grad_norm": 1.585292100906372, + "learning_rate": 5e-05, + "epoch": 0.7597489914836396, + "step": 11865 + }, + { + "loss": 2.1432, + "grad_norm": 1.5393071174621582, + "learning_rate": 5e-05, + "epoch": 0.7600691554075687, + "step": 11870 + }, + { + "loss": 2.1886, + "grad_norm": 1.5727068185806274, + "learning_rate": 5e-05, + "epoch": 0.7603893193314978, + "step": 11875 + }, + { + "loss": 2.177, + "grad_norm": 1.6157071590423584, + "learning_rate": 5e-05, + "epoch": 0.7607094832554268, + "step": 11880 + }, + { + "loss": 2.1629, + "grad_norm": 1.6175097227096558, + "learning_rate": 5e-05, + "epoch": 0.7610296471793558, + "step": 11885 + }, + { + "loss": 2.1848, + "grad_norm": 1.7654550075531006, + "learning_rate": 5e-05, + "epoch": 0.7613498111032849, + "step": 11890 + }, + { + "loss": 2.1423, + "grad_norm": 1.6491674184799194, + "learning_rate": 5e-05, + "epoch": 0.7616699750272139, + "step": 11895 + }, + { + "loss": 2.1446, + "grad_norm": 1.5910719633102417, + "learning_rate": 5e-05, + "epoch": 0.761990138951143, + "step": 11900 + }, + { + "loss": 2.1554, + "grad_norm": 1.5420751571655273, + "learning_rate": 5e-05, + "epoch": 0.7623103028750721, + "step": 11905 + }, + { + "loss": 2.1404, + "grad_norm": 1.6134288311004639, + "learning_rate": 5e-05, + "epoch": 0.762630466799001, + "step": 11910 + }, + { + "loss": 2.1559, + "grad_norm": 1.4922846555709839, + "learning_rate": 5e-05, + "epoch": 0.7629506307229301, + "step": 11915 + }, + { + "loss": 2.152, + "grad_norm": 1.5739011764526367, + "learning_rate": 5e-05, + "epoch": 0.7632707946468592, + "step": 11920 + }, + { + "loss": 2.1655, + "grad_norm": 1.6074965000152588, + "learning_rate": 5e-05, + "epoch": 0.7635909585707883, + "step": 11925 + }, + { + "loss": 2.1552, + "grad_norm": 1.5739712715148926, + "learning_rate": 5e-05, + "epoch": 0.7639111224947173, + "step": 11930 + }, + { + "loss": 2.1671, + "grad_norm": 1.595373272895813, + "learning_rate": 5e-05, + "epoch": 0.7642312864186463, + "step": 11935 + }, + { + "loss": 2.1588, + "grad_norm": 1.5630054473876953, + "learning_rate": 5e-05, + "epoch": 0.7645514503425754, + "step": 11940 + }, + { + "loss": 2.159, + "grad_norm": 1.6202868223190308, + "learning_rate": 5e-05, + "epoch": 0.7648716142665044, + "step": 11945 + }, + { + "loss": 2.2056, + "grad_norm": 1.671276330947876, + "learning_rate": 5e-05, + "epoch": 0.7651917781904335, + "step": 11950 + }, + { + "loss": 2.1376, + "grad_norm": 1.714341163635254, + "learning_rate": 5e-05, + "epoch": 0.7655119421143626, + "step": 11955 + }, + { + "loss": 2.165, + "grad_norm": 1.734993815422058, + "learning_rate": 5e-05, + "epoch": 0.7658321060382917, + "step": 11960 + }, + { + "loss": 2.1515, + "grad_norm": 1.7148008346557617, + "learning_rate": 5e-05, + "epoch": 0.7661522699622206, + "step": 11965 + }, + { + "loss": 2.1716, + "grad_norm": 1.6618858575820923, + "learning_rate": 5e-05, + "epoch": 0.7664724338861497, + "step": 11970 + }, + { + "loss": 2.1366, + "grad_norm": 1.603656530380249, + "learning_rate": 5e-05, + "epoch": 0.7667925978100788, + "step": 11975 + }, + { + "loss": 2.1583, + "grad_norm": 1.618535041809082, + "learning_rate": 5e-05, + "epoch": 0.7671127617340078, + "step": 11980 + }, + { + "loss": 2.14, + "grad_norm": 1.553106665611267, + "learning_rate": 5e-05, + "epoch": 0.7674329256579369, + "step": 11985 + }, + { + "loss": 2.1562, + "grad_norm": 1.5241880416870117, + "learning_rate": 5e-05, + "epoch": 0.7677530895818659, + "step": 11990 + }, + { + "loss": 2.1738, + "grad_norm": 1.5153862237930298, + "learning_rate": 5e-05, + "epoch": 0.7680732535057949, + "step": 11995 + }, + { + "loss": 2.1916, + "grad_norm": 1.577313780784607, + "learning_rate": 5e-05, + "epoch": 0.768393417429724, + "step": 12000 + }, + { + "eval_loss": 2.0328667163848877, + "eval_runtime": 12.6548, + "eval_samples_per_second": 161.836, + "eval_steps_per_second": 20.23, + "epoch": 0.768393417429724, + "step": 12000 + }, + { + "loss": 2.1293, + "grad_norm": 1.6270264387130737, + "learning_rate": 5e-05, + "epoch": 0.7687135813536531, + "step": 12005 + }, + { + "loss": 2.1822, + "grad_norm": 1.679459571838379, + "learning_rate": 5e-05, + "epoch": 0.7690337452775822, + "step": 12010 + }, + { + "loss": 2.1355, + "grad_norm": 1.6691720485687256, + "learning_rate": 5e-05, + "epoch": 0.7693539092015111, + "step": 12015 + }, + { + "loss": 2.1708, + "grad_norm": 1.6165096759796143, + "learning_rate": 5e-05, + "epoch": 0.7696740731254402, + "step": 12020 + }, + { + "loss": 2.1629, + "grad_norm": 1.6087703704833984, + "learning_rate": 5e-05, + "epoch": 0.7699942370493693, + "step": 12025 + }, + { + "loss": 2.169, + "grad_norm": 1.5682085752487183, + "learning_rate": 5e-05, + "epoch": 0.7703144009732983, + "step": 12030 + }, + { + "loss": 2.1344, + "grad_norm": 1.5505659580230713, + "learning_rate": 5e-05, + "epoch": 0.7706345648972274, + "step": 12035 + }, + { + "loss": 2.1576, + "grad_norm": 1.5263081789016724, + "learning_rate": 5e-05, + "epoch": 0.7709547288211565, + "step": 12040 + }, + { + "loss": 2.1756, + "grad_norm": 1.700647234916687, + "learning_rate": 5e-05, + "epoch": 0.7712748927450855, + "step": 12045 + }, + { + "loss": 2.1783, + "grad_norm": 1.6649285554885864, + "learning_rate": 5e-05, + "epoch": 0.7715950566690145, + "step": 12050 + }, + { + "loss": 2.1548, + "grad_norm": 1.6890583038330078, + "learning_rate": 5e-05, + "epoch": 0.7719152205929436, + "step": 12055 + }, + { + "loss": 2.1775, + "grad_norm": 1.7615318298339844, + "learning_rate": 5e-05, + "epoch": 0.7722353845168727, + "step": 12060 + }, + { + "loss": 2.1643, + "grad_norm": 1.7467012405395508, + "learning_rate": 5e-05, + "epoch": 0.7725555484408017, + "step": 12065 + }, + { + "loss": 2.158, + "grad_norm": 1.6698206663131714, + "learning_rate": 5e-05, + "epoch": 0.7728757123647307, + "step": 12070 + }, + { + "loss": 2.1472, + "grad_norm": 1.592130422592163, + "learning_rate": 5e-05, + "epoch": 0.7731958762886598, + "step": 12075 + }, + { + "loss": 2.1408, + "grad_norm": 1.6456536054611206, + "learning_rate": 5e-05, + "epoch": 0.7735160402125888, + "step": 12080 + }, + { + "loss": 2.1562, + "grad_norm": 1.637117862701416, + "learning_rate": 5e-05, + "epoch": 0.7738362041365179, + "step": 12085 + }, + { + "loss": 2.1518, + "grad_norm": 1.6462478637695312, + "learning_rate": 5e-05, + "epoch": 0.774156368060447, + "step": 12090 + }, + { + "loss": 2.1468, + "grad_norm": 1.6634985208511353, + "learning_rate": 5e-05, + "epoch": 0.774476531984376, + "step": 12095 + }, + { + "loss": 2.1616, + "grad_norm": 1.602612853050232, + "learning_rate": 5e-05, + "epoch": 0.774796695908305, + "step": 12100 + }, + { + "loss": 2.141, + "grad_norm": 1.580141305923462, + "learning_rate": 5e-05, + "epoch": 0.7751168598322341, + "step": 12105 + }, + { + "loss": 2.1513, + "grad_norm": 1.6583237648010254, + "learning_rate": 5e-05, + "epoch": 0.7754370237561632, + "step": 12110 + }, + { + "loss": 2.1497, + "grad_norm": 1.6343486309051514, + "learning_rate": 5e-05, + "epoch": 0.7757571876800922, + "step": 12115 + }, + { + "loss": 2.1309, + "grad_norm": 1.5499143600463867, + "learning_rate": 5e-05, + "epoch": 0.7760773516040212, + "step": 12120 + }, + { + "loss": 2.1546, + "grad_norm": 1.6126841306686401, + "learning_rate": 5e-05, + "epoch": 0.7763975155279503, + "step": 12125 + }, + { + "loss": 2.1419, + "grad_norm": 1.5695915222167969, + "learning_rate": 5e-05, + "epoch": 0.7767176794518794, + "step": 12130 + }, + { + "loss": 2.165, + "grad_norm": 1.7308309078216553, + "learning_rate": 5e-05, + "epoch": 0.7770378433758084, + "step": 12135 + }, + { + "loss": 2.1722, + "grad_norm": 1.6199977397918701, + "learning_rate": 5e-05, + "epoch": 0.7773580072997375, + "step": 12140 + }, + { + "loss": 2.1572, + "grad_norm": 1.6317228078842163, + "learning_rate": 5e-05, + "epoch": 0.7776781712236666, + "step": 12145 + }, + { + "loss": 2.163, + "grad_norm": 1.6066442728042603, + "learning_rate": 5e-05, + "epoch": 0.7779983351475955, + "step": 12150 + }, + { + "loss": 2.1893, + "grad_norm": 1.6254138946533203, + "learning_rate": 5e-05, + "epoch": 0.7783184990715246, + "step": 12155 + }, + { + "loss": 2.1504, + "grad_norm": 1.650006890296936, + "learning_rate": 5e-05, + "epoch": 0.7786386629954537, + "step": 12160 + }, + { + "loss": 2.1524, + "grad_norm": 1.605966567993164, + "learning_rate": 5e-05, + "epoch": 0.7789588269193827, + "step": 12165 + }, + { + "loss": 2.1521, + "grad_norm": 1.5684596300125122, + "learning_rate": 5e-05, + "epoch": 0.7792789908433118, + "step": 12170 + }, + { + "loss": 2.1581, + "grad_norm": 1.5677785873413086, + "learning_rate": 5e-05, + "epoch": 0.7795991547672408, + "step": 12175 + }, + { + "loss": 2.1275, + "grad_norm": 1.6513335704803467, + "learning_rate": 5e-05, + "epoch": 0.7799193186911699, + "step": 12180 + }, + { + "loss": 2.1446, + "grad_norm": 1.650525450706482, + "learning_rate": 5e-05, + "epoch": 0.7802394826150989, + "step": 12185 + }, + { + "loss": 2.152, + "grad_norm": 1.576680302619934, + "learning_rate": 5e-05, + "epoch": 0.780559646539028, + "step": 12190 + }, + { + "loss": 2.1527, + "grad_norm": 1.634871244430542, + "learning_rate": 5e-05, + "epoch": 0.7808798104629571, + "step": 12195 + }, + { + "loss": 2.1764, + "grad_norm": 1.6193811893463135, + "learning_rate": 5e-05, + "epoch": 0.781199974386886, + "step": 12200 + }, + { + "eval_loss": 2.030806303024292, + "eval_runtime": 10.0212, + "eval_samples_per_second": 204.366, + "eval_steps_per_second": 25.546, + "epoch": 0.781199974386886, + "step": 12200 + }, + { + "loss": 2.1441, + "grad_norm": 1.6034654378890991, + "learning_rate": 5e-05, + "epoch": 0.7815201383108151, + "step": 12205 + }, + { + "loss": 2.1311, + "grad_norm": 1.5852737426757812, + "learning_rate": 5e-05, + "epoch": 0.7818403022347442, + "step": 12210 + }, + { + "loss": 2.1637, + "grad_norm": 1.6145589351654053, + "learning_rate": 5e-05, + "epoch": 0.7821604661586733, + "step": 12215 + }, + { + "loss": 2.1387, + "grad_norm": 1.6558140516281128, + "learning_rate": 5e-05, + "epoch": 0.7824806300826023, + "step": 12220 + }, + { + "loss": 2.1663, + "grad_norm": 1.5966055393218994, + "learning_rate": 5e-05, + "epoch": 0.7828007940065314, + "step": 12225 + }, + { + "loss": 2.1405, + "grad_norm": 1.6753573417663574, + "learning_rate": 5e-05, + "epoch": 0.7831209579304604, + "step": 12230 + }, + { + "loss": 2.1673, + "grad_norm": 1.6698678731918335, + "learning_rate": 5e-05, + "epoch": 0.7834411218543894, + "step": 12235 + }, + { + "loss": 2.1487, + "grad_norm": 1.6776996850967407, + "learning_rate": 5e-05, + "epoch": 0.7837612857783185, + "step": 12240 + }, + { + "loss": 2.1743, + "grad_norm": 1.6791410446166992, + "learning_rate": 5e-05, + "epoch": 0.7840814497022476, + "step": 12245 + }, + { + "loss": 2.2024, + "grad_norm": 1.6838544607162476, + "learning_rate": 5e-05, + "epoch": 0.7844016136261766, + "step": 12250 + }, + { + "loss": 2.1468, + "grad_norm": 1.572723627090454, + "learning_rate": 5e-05, + "epoch": 0.7847217775501056, + "step": 12255 + }, + { + "loss": 2.1581, + "grad_norm": 1.5989056825637817, + "learning_rate": 5e-05, + "epoch": 0.7850419414740347, + "step": 12260 + }, + { + "loss": 2.1342, + "grad_norm": 1.5742263793945312, + "learning_rate": 5e-05, + "epoch": 0.7853621053979638, + "step": 12265 + }, + { + "loss": 2.1455, + "grad_norm": 1.6334121227264404, + "learning_rate": 5e-05, + "epoch": 0.7856822693218928, + "step": 12270 + }, + { + "loss": 2.1602, + "grad_norm": 1.6674188375473022, + "learning_rate": 5e-05, + "epoch": 0.7860024332458219, + "step": 12275 + }, + { + "loss": 2.1542, + "grad_norm": 1.6382502317428589, + "learning_rate": 5e-05, + "epoch": 0.786322597169751, + "step": 12280 + }, + { + "loss": 2.1901, + "grad_norm": 1.642731785774231, + "learning_rate": 5e-05, + "epoch": 0.7866427610936799, + "step": 12285 + }, + { + "loss": 2.1802, + "grad_norm": 1.6009544134140015, + "learning_rate": 5e-05, + "epoch": 0.786962925017609, + "step": 12290 + }, + { + "loss": 2.1362, + "grad_norm": 1.54159414768219, + "learning_rate": 5e-05, + "epoch": 0.7872830889415381, + "step": 12295 + }, + { + "loss": 2.1792, + "grad_norm": 1.5700483322143555, + "learning_rate": 5e-05, + "epoch": 0.7876032528654672, + "step": 12300 + }, + { + "loss": 2.1434, + "grad_norm": 1.6987546682357788, + "learning_rate": 5e-05, + "epoch": 0.7879234167893961, + "step": 12305 + }, + { + "loss": 2.1475, + "grad_norm": 1.595435380935669, + "learning_rate": 5e-05, + "epoch": 0.7882435807133252, + "step": 12310 + }, + { + "loss": 2.1381, + "grad_norm": 1.5674047470092773, + "learning_rate": 5e-05, + "epoch": 0.7885637446372543, + "step": 12315 + }, + { + "loss": 2.186, + "grad_norm": 1.5283838510513306, + "learning_rate": 5e-05, + "epoch": 0.7888839085611833, + "step": 12320 + }, + { + "loss": 2.1748, + "grad_norm": 1.6678552627563477, + "learning_rate": 5e-05, + "epoch": 0.7892040724851124, + "step": 12325 + }, + { + "loss": 2.1484, + "grad_norm": 1.5990204811096191, + "learning_rate": 5e-05, + "epoch": 0.7895242364090415, + "step": 12330 + }, + { + "loss": 2.1156, + "grad_norm": 1.5516057014465332, + "learning_rate": 5e-05, + "epoch": 0.7898444003329704, + "step": 12335 + }, + { + "loss": 2.1274, + "grad_norm": 1.4775924682617188, + "learning_rate": 5e-05, + "epoch": 0.7901645642568995, + "step": 12340 + }, + { + "loss": 2.1675, + "grad_norm": 1.6825110912322998, + "learning_rate": 5e-05, + "epoch": 0.7904847281808286, + "step": 12345 + }, + { + "loss": 2.144, + "grad_norm": 1.600658655166626, + "learning_rate": 5e-05, + "epoch": 0.7908048921047577, + "step": 12350 + }, + { + "loss": 2.1516, + "grad_norm": 1.5016052722930908, + "learning_rate": 5e-05, + "epoch": 0.7911250560286867, + "step": 12355 + }, + { + "loss": 2.1524, + "grad_norm": 1.5548697710037231, + "learning_rate": 5e-05, + "epoch": 0.7914452199526157, + "step": 12360 + }, + { + "loss": 2.1653, + "grad_norm": 1.5483704805374146, + "learning_rate": 5e-05, + "epoch": 0.7917653838765448, + "step": 12365 + }, + { + "loss": 2.1624, + "grad_norm": 1.5267717838287354, + "learning_rate": 5e-05, + "epoch": 0.7920855478004738, + "step": 12370 + }, + { + "loss": 2.1488, + "grad_norm": 1.738539457321167, + "learning_rate": 5e-05, + "epoch": 0.7924057117244029, + "step": 12375 + }, + { + "loss": 2.1473, + "grad_norm": 1.6237080097198486, + "learning_rate": 5e-05, + "epoch": 0.792725875648332, + "step": 12380 + }, + { + "loss": 2.166, + "grad_norm": 1.6005204916000366, + "learning_rate": 5e-05, + "epoch": 0.793046039572261, + "step": 12385 + }, + { + "loss": 2.1519, + "grad_norm": 1.5341179370880127, + "learning_rate": 5e-05, + "epoch": 0.79336620349619, + "step": 12390 + }, + { + "loss": 2.1747, + "grad_norm": 1.566355586051941, + "learning_rate": 5e-05, + "epoch": 0.7936863674201191, + "step": 12395 + }, + { + "loss": 2.1942, + "grad_norm": 1.706238865852356, + "learning_rate": 5e-05, + "epoch": 0.7940065313440482, + "step": 12400 + }, + { + "eval_loss": 2.0258755683898926, + "eval_runtime": 11.8297, + "eval_samples_per_second": 173.124, + "eval_steps_per_second": 21.64, + "epoch": 0.7940065313440482, + "step": 12400 + }, + { + "loss": 2.1421, + "grad_norm": 1.6088368892669678, + "learning_rate": 5e-05, + "epoch": 0.7943266952679772, + "step": 12405 + }, + { + "loss": 2.1716, + "grad_norm": 1.6068791151046753, + "learning_rate": 5e-05, + "epoch": 0.7946468591919063, + "step": 12410 + }, + { + "loss": 2.1201, + "grad_norm": 1.5572909116744995, + "learning_rate": 5e-05, + "epoch": 0.7949670231158353, + "step": 12415 + }, + { + "loss": 2.1499, + "grad_norm": 1.5897774696350098, + "learning_rate": 5e-05, + "epoch": 0.7952871870397643, + "step": 12420 + }, + { + "loss": 2.1673, + "grad_norm": 1.5571075677871704, + "learning_rate": 5e-05, + "epoch": 0.7956073509636934, + "step": 12425 + }, + { + "loss": 2.1511, + "grad_norm": 1.6140809059143066, + "learning_rate": 5e-05, + "epoch": 0.7959275148876225, + "step": 12430 + }, + { + "loss": 2.1885, + "grad_norm": 1.743863821029663, + "learning_rate": 5e-05, + "epoch": 0.7962476788115516, + "step": 12435 + }, + { + "loss": 2.1563, + "grad_norm": 1.5255663394927979, + "learning_rate": 5e-05, + "epoch": 0.7965678427354805, + "step": 12440 + }, + { + "loss": 2.1199, + "grad_norm": 1.540199875831604, + "learning_rate": 5e-05, + "epoch": 0.7968880066594096, + "step": 12445 + }, + { + "loss": 2.1226, + "grad_norm": 1.5515542030334473, + "learning_rate": 5e-05, + "epoch": 0.7972081705833387, + "step": 12450 + }, + { + "loss": 2.1358, + "grad_norm": 1.60235595703125, + "learning_rate": 5e-05, + "epoch": 0.7975283345072677, + "step": 12455 + }, + { + "loss": 2.1982, + "grad_norm": 1.604455590248108, + "learning_rate": 5e-05, + "epoch": 0.7978484984311968, + "step": 12460 + }, + { + "loss": 2.148, + "grad_norm": 1.5921660661697388, + "learning_rate": 5e-05, + "epoch": 0.7981686623551258, + "step": 12465 + }, + { + "loss": 2.1461, + "grad_norm": 1.583845615386963, + "learning_rate": 5e-05, + "epoch": 0.7984888262790549, + "step": 12470 + }, + { + "loss": 2.1552, + "grad_norm": 1.599627137184143, + "learning_rate": 5e-05, + "epoch": 0.7988089902029839, + "step": 12475 + }, + { + "loss": 2.1764, + "grad_norm": 1.6735512018203735, + "learning_rate": 5e-05, + "epoch": 0.799129154126913, + "step": 12480 + }, + { + "loss": 2.1477, + "grad_norm": 1.571876049041748, + "learning_rate": 5e-05, + "epoch": 0.7994493180508421, + "step": 12485 + }, + { + "loss": 2.1418, + "grad_norm": 1.580018162727356, + "learning_rate": 5e-05, + "epoch": 0.799769481974771, + "step": 12490 + }, + { + "loss": 2.1611, + "grad_norm": 1.6157264709472656, + "learning_rate": 5e-05, + "epoch": 0.8000896458987001, + "step": 12495 + }, + { + "loss": 2.1363, + "grad_norm": 1.5641580820083618, + "learning_rate": 5e-05, + "epoch": 0.8004098098226292, + "step": 12500 + }, + { + "loss": 2.1749, + "grad_norm": 1.6098390817642212, + "learning_rate": 5e-05, + "epoch": 0.8007299737465582, + "step": 12505 + }, + { + "loss": 2.1095, + "grad_norm": 1.5591392517089844, + "learning_rate": 5e-05, + "epoch": 0.8010501376704873, + "step": 12510 + }, + { + "loss": 2.1488, + "grad_norm": 1.5563864707946777, + "learning_rate": 5e-05, + "epoch": 0.8013703015944164, + "step": 12515 + }, + { + "loss": 2.1685, + "grad_norm": 1.564354419708252, + "learning_rate": 5e-05, + "epoch": 0.8016904655183454, + "step": 12520 + }, + { + "loss": 2.1467, + "grad_norm": 1.567794680595398, + "learning_rate": 5e-05, + "epoch": 0.8020106294422744, + "step": 12525 + }, + { + "loss": 2.1129, + "grad_norm": 1.5561848878860474, + "learning_rate": 5e-05, + "epoch": 0.8023307933662035, + "step": 12530 + }, + { + "loss": 2.1407, + "grad_norm": 1.5778709650039673, + "learning_rate": 5e-05, + "epoch": 0.8026509572901326, + "step": 12535 + }, + { + "loss": 2.1594, + "grad_norm": 1.5555260181427002, + "learning_rate": 5e-05, + "epoch": 0.8029711212140616, + "step": 12540 + }, + { + "loss": 2.1593, + "grad_norm": 1.5886701345443726, + "learning_rate": 5e-05, + "epoch": 0.8032912851379906, + "step": 12545 + }, + { + "loss": 2.1298, + "grad_norm": 1.5618678331375122, + "learning_rate": 5e-05, + "epoch": 0.8036114490619197, + "step": 12550 + }, + { + "loss": 2.1498, + "grad_norm": 1.7027949094772339, + "learning_rate": 5e-05, + "epoch": 0.8039316129858488, + "step": 12555 + }, + { + "loss": 2.1509, + "grad_norm": 1.625081181526184, + "learning_rate": 5e-05, + "epoch": 0.8042517769097778, + "step": 12560 + }, + { + "loss": 2.1231, + "grad_norm": 1.631125569343567, + "learning_rate": 5e-05, + "epoch": 0.8045719408337069, + "step": 12565 + }, + { + "loss": 2.165, + "grad_norm": 1.5852395296096802, + "learning_rate": 5e-05, + "epoch": 0.804892104757636, + "step": 12570 + }, + { + "loss": 2.1538, + "grad_norm": 1.7171579599380493, + "learning_rate": 5e-05, + "epoch": 0.8052122686815649, + "step": 12575 + }, + { + "loss": 2.1685, + "grad_norm": 1.660180687904358, + "learning_rate": 5e-05, + "epoch": 0.805532432605494, + "step": 12580 + }, + { + "loss": 2.1729, + "grad_norm": 1.589772343635559, + "learning_rate": 5e-05, + "epoch": 0.8058525965294231, + "step": 12585 + }, + { + "loss": 2.1709, + "grad_norm": 1.6301699876785278, + "learning_rate": 5e-05, + "epoch": 0.8061727604533521, + "step": 12590 + }, + { + "loss": 2.1447, + "grad_norm": 1.6717979907989502, + "learning_rate": 5e-05, + "epoch": 0.8064929243772812, + "step": 12595 + }, + { + "loss": 2.1402, + "grad_norm": 1.5903136730194092, + "learning_rate": 5e-05, + "epoch": 0.8068130883012102, + "step": 12600 + }, + { + "eval_loss": 2.0219223499298096, + "eval_runtime": 9.19, + "eval_samples_per_second": 222.851, + "eval_steps_per_second": 27.856, + "epoch": 0.8068130883012102, + "step": 12600 + }, + { + "loss": 2.1761, + "grad_norm": 1.5364222526550293, + "learning_rate": 5e-05, + "epoch": 0.8071332522251393, + "step": 12605 + }, + { + "loss": 2.1475, + "grad_norm": 1.5884590148925781, + "learning_rate": 5e-05, + "epoch": 0.8074534161490683, + "step": 12610 + }, + { + "loss": 2.1438, + "grad_norm": 1.6371623277664185, + "learning_rate": 5e-05, + "epoch": 0.8077735800729974, + "step": 12615 + }, + { + "loss": 2.1583, + "grad_norm": 1.5629013776779175, + "learning_rate": 5e-05, + "epoch": 0.8080937439969265, + "step": 12620 + }, + { + "loss": 2.1505, + "grad_norm": 1.6306496858596802, + "learning_rate": 5e-05, + "epoch": 0.8084139079208554, + "step": 12625 + }, + { + "loss": 2.1618, + "grad_norm": 1.6803566217422485, + "learning_rate": 5e-05, + "epoch": 0.8087340718447845, + "step": 12630 + }, + { + "loss": 2.155, + "grad_norm": 1.5759366750717163, + "learning_rate": 5e-05, + "epoch": 0.8090542357687136, + "step": 12635 + }, + { + "loss": 2.1685, + "grad_norm": 1.5736042261123657, + "learning_rate": 5e-05, + "epoch": 0.8093743996926427, + "step": 12640 + }, + { + "loss": 2.1334, + "grad_norm": 1.639487862586975, + "learning_rate": 5e-05, + "epoch": 0.8096945636165717, + "step": 12645 + }, + { + "loss": 2.1347, + "grad_norm": 1.5598664283752441, + "learning_rate": 5e-05, + "epoch": 0.8100147275405007, + "step": 12650 + }, + { + "loss": 2.163, + "grad_norm": 1.6298346519470215, + "learning_rate": 5e-05, + "epoch": 0.8103348914644298, + "step": 12655 + }, + { + "loss": 2.1457, + "grad_norm": 1.593445062637329, + "learning_rate": 5e-05, + "epoch": 0.8106550553883588, + "step": 12660 + }, + { + "loss": 2.199, + "grad_norm": 1.5960693359375, + "learning_rate": 5e-05, + "epoch": 0.8109752193122879, + "step": 12665 + }, + { + "loss": 2.1398, + "grad_norm": 1.595033884048462, + "learning_rate": 5e-05, + "epoch": 0.811295383236217, + "step": 12670 + }, + { + "loss": 2.1474, + "grad_norm": 1.6295945644378662, + "learning_rate": 5e-05, + "epoch": 0.811615547160146, + "step": 12675 + }, + { + "loss": 2.1516, + "grad_norm": 1.6431686878204346, + "learning_rate": 5e-05, + "epoch": 0.811935711084075, + "step": 12680 + }, + { + "loss": 2.1303, + "grad_norm": 1.6344900131225586, + "learning_rate": 5e-05, + "epoch": 0.8122558750080041, + "step": 12685 + }, + { + "loss": 2.1446, + "grad_norm": 1.6356950998306274, + "learning_rate": 5e-05, + "epoch": 0.8125760389319332, + "step": 12690 + }, + { + "loss": 2.1369, + "grad_norm": 1.6093062162399292, + "learning_rate": 5e-05, + "epoch": 0.8128962028558622, + "step": 12695 + }, + { + "loss": 2.1676, + "grad_norm": 1.7104160785675049, + "learning_rate": 5e-05, + "epoch": 0.8132163667797913, + "step": 12700 + }, + { + "loss": 2.1573, + "grad_norm": 1.6704941987991333, + "learning_rate": 5e-05, + "epoch": 0.8135365307037203, + "step": 12705 + }, + { + "loss": 2.1416, + "grad_norm": 1.568360686302185, + "learning_rate": 5e-05, + "epoch": 0.8138566946276493, + "step": 12710 + }, + { + "loss": 2.1544, + "grad_norm": 1.5690499544143677, + "learning_rate": 5e-05, + "epoch": 0.8141768585515784, + "step": 12715 + }, + { + "loss": 2.1495, + "grad_norm": 1.6691917181015015, + "learning_rate": 5e-05, + "epoch": 0.8144970224755075, + "step": 12720 + }, + { + "loss": 2.1458, + "grad_norm": 1.5928118228912354, + "learning_rate": 5e-05, + "epoch": 0.8148171863994366, + "step": 12725 + }, + { + "loss": 2.1354, + "grad_norm": 1.573602557182312, + "learning_rate": 5e-05, + "epoch": 0.8151373503233655, + "step": 12730 + }, + { + "loss": 2.1671, + "grad_norm": 1.608485221862793, + "learning_rate": 5e-05, + "epoch": 0.8154575142472946, + "step": 12735 + }, + { + "loss": 2.1242, + "grad_norm": 1.6055208444595337, + "learning_rate": 5e-05, + "epoch": 0.8157776781712237, + "step": 12740 + }, + { + "loss": 2.1676, + "grad_norm": 1.4978792667388916, + "learning_rate": 5e-05, + "epoch": 0.8160978420951527, + "step": 12745 + }, + { + "loss": 2.1317, + "grad_norm": 1.5075442790985107, + "learning_rate": 5e-05, + "epoch": 0.8164180060190818, + "step": 12750 + }, + { + "loss": 2.153, + "grad_norm": 1.5905604362487793, + "learning_rate": 5e-05, + "epoch": 0.8167381699430108, + "step": 12755 + }, + { + "loss": 2.1718, + "grad_norm": 1.6042873859405518, + "learning_rate": 5e-05, + "epoch": 0.8170583338669398, + "step": 12760 + }, + { + "loss": 2.1638, + "grad_norm": 1.515039086341858, + "learning_rate": 5e-05, + "epoch": 0.8173784977908689, + "step": 12765 + }, + { + "loss": 2.1604, + "grad_norm": 1.7022101879119873, + "learning_rate": 5e-05, + "epoch": 0.817698661714798, + "step": 12770 + }, + { + "loss": 2.1893, + "grad_norm": 1.5627135038375854, + "learning_rate": 5e-05, + "epoch": 0.8180188256387271, + "step": 12775 + }, + { + "loss": 2.1391, + "grad_norm": 1.6236927509307861, + "learning_rate": 5e-05, + "epoch": 0.818338989562656, + "step": 12780 + }, + { + "loss": 2.1661, + "grad_norm": 1.627759337425232, + "learning_rate": 5e-05, + "epoch": 0.8186591534865851, + "step": 12785 + }, + { + "loss": 2.1579, + "grad_norm": 1.5399779081344604, + "learning_rate": 5e-05, + "epoch": 0.8189793174105142, + "step": 12790 + }, + { + "loss": 2.1772, + "grad_norm": 1.5167617797851562, + "learning_rate": 5e-05, + "epoch": 0.8192994813344432, + "step": 12795 + }, + { + "loss": 2.1573, + "grad_norm": 1.5164883136749268, + "learning_rate": 5e-05, + "epoch": 0.8196196452583723, + "step": 12800 + }, + { + "eval_loss": 2.017529010772705, + "eval_runtime": 10.1774, + "eval_samples_per_second": 201.231, + "eval_steps_per_second": 25.154, + "epoch": 0.8196196452583723, + "step": 12800 + }, + { + "loss": 2.1762, + "grad_norm": 1.6219836473464966, + "learning_rate": 5e-05, + "epoch": 0.8199398091823014, + "step": 12805 + }, + { + "loss": 2.1406, + "grad_norm": 1.5230660438537598, + "learning_rate": 5e-05, + "epoch": 0.8202599731062304, + "step": 12810 + }, + { + "loss": 2.1179, + "grad_norm": 1.6092220544815063, + "learning_rate": 5e-05, + "epoch": 0.8205801370301594, + "step": 12815 + }, + { + "loss": 2.157, + "grad_norm": 1.5099693536758423, + "learning_rate": 5e-05, + "epoch": 0.8209003009540885, + "step": 12820 + }, + { + "loss": 2.1253, + "grad_norm": 1.5344569683074951, + "learning_rate": 5e-05, + "epoch": 0.8212204648780176, + "step": 12825 + }, + { + "loss": 2.1538, + "grad_norm": 1.6269365549087524, + "learning_rate": 5e-05, + "epoch": 0.8215406288019466, + "step": 12830 + }, + { + "loss": 2.1528, + "grad_norm": 1.5916081666946411, + "learning_rate": 5e-05, + "epoch": 0.8218607927258756, + "step": 12835 + }, + { + "loss": 2.1554, + "grad_norm": 1.5954616069793701, + "learning_rate": 5e-05, + "epoch": 0.8221809566498047, + "step": 12840 + }, + { + "loss": 2.1217, + "grad_norm": 1.5432181358337402, + "learning_rate": 5e-05, + "epoch": 0.8225011205737337, + "step": 12845 + }, + { + "loss": 2.1388, + "grad_norm": 1.579157829284668, + "learning_rate": 5e-05, + "epoch": 0.8228212844976628, + "step": 12850 + }, + { + "loss": 2.1628, + "grad_norm": 1.5736826658248901, + "learning_rate": 5e-05, + "epoch": 0.8231414484215919, + "step": 12855 + }, + { + "loss": 2.1405, + "grad_norm": 1.5886597633361816, + "learning_rate": 5e-05, + "epoch": 0.823461612345521, + "step": 12860 + }, + { + "loss": 2.1396, + "grad_norm": 1.6655110120773315, + "learning_rate": 5e-05, + "epoch": 0.8237817762694499, + "step": 12865 + }, + { + "loss": 2.157, + "grad_norm": 1.6713991165161133, + "learning_rate": 5e-05, + "epoch": 0.824101940193379, + "step": 12870 + }, + { + "loss": 2.1416, + "grad_norm": 1.5701757669448853, + "learning_rate": 5e-05, + "epoch": 0.8244221041173081, + "step": 12875 + }, + { + "loss": 2.1355, + "grad_norm": 1.609489917755127, + "learning_rate": 5e-05, + "epoch": 0.8247422680412371, + "step": 12880 + }, + { + "loss": 2.1341, + "grad_norm": 1.637588620185852, + "learning_rate": 5e-05, + "epoch": 0.8250624319651662, + "step": 12885 + }, + { + "loss": 2.1274, + "grad_norm": 1.5560768842697144, + "learning_rate": 5e-05, + "epoch": 0.8253825958890952, + "step": 12890 + }, + { + "loss": 2.1296, + "grad_norm": 1.547018051147461, + "learning_rate": 5e-05, + "epoch": 0.8257027598130243, + "step": 12895 + }, + { + "loss": 2.1605, + "grad_norm": 1.576536774635315, + "learning_rate": 5e-05, + "epoch": 0.8260229237369533, + "step": 12900 + }, + { + "loss": 2.1281, + "grad_norm": 1.5521260499954224, + "learning_rate": 5e-05, + "epoch": 0.8263430876608824, + "step": 12905 + }, + { + "loss": 2.1792, + "grad_norm": 1.5745978355407715, + "learning_rate": 5e-05, + "epoch": 0.8266632515848115, + "step": 12910 + }, + { + "loss": 2.1682, + "grad_norm": 1.5464338064193726, + "learning_rate": 5e-05, + "epoch": 0.8269834155087404, + "step": 12915 + }, + { + "loss": 2.1368, + "grad_norm": 1.6071354150772095, + "learning_rate": 5e-05, + "epoch": 0.8273035794326695, + "step": 12920 + }, + { + "loss": 2.1642, + "grad_norm": 1.6548957824707031, + "learning_rate": 5e-05, + "epoch": 0.8276237433565986, + "step": 12925 + }, + { + "loss": 2.1636, + "grad_norm": 1.6200032234191895, + "learning_rate": 5e-05, + "epoch": 0.8279439072805276, + "step": 12930 + }, + { + "loss": 2.1587, + "grad_norm": 1.5933823585510254, + "learning_rate": 5e-05, + "epoch": 0.8282640712044567, + "step": 12935 + }, + { + "loss": 2.1459, + "grad_norm": 1.6406372785568237, + "learning_rate": 5e-05, + "epoch": 0.8285842351283857, + "step": 12940 + }, + { + "loss": 2.1455, + "grad_norm": 1.5688544511795044, + "learning_rate": 5e-05, + "epoch": 0.8289043990523148, + "step": 12945 + }, + { + "loss": 2.1704, + "grad_norm": 1.5574911832809448, + "learning_rate": 5e-05, + "epoch": 0.8292245629762438, + "step": 12950 + }, + { + "loss": 2.1366, + "grad_norm": 1.5636203289031982, + "learning_rate": 5e-05, + "epoch": 0.8295447269001729, + "step": 12955 + }, + { + "loss": 2.1216, + "grad_norm": 1.5622069835662842, + "learning_rate": 5e-05, + "epoch": 0.829864890824102, + "step": 12960 + }, + { + "loss": 2.1306, + "grad_norm": 1.5027891397476196, + "learning_rate": 5e-05, + "epoch": 0.830185054748031, + "step": 12965 + }, + { + "loss": 2.1621, + "grad_norm": 1.565948486328125, + "learning_rate": 5e-05, + "epoch": 0.83050521867196, + "step": 12970 + }, + { + "loss": 2.1489, + "grad_norm": 1.7101725339889526, + "learning_rate": 5e-05, + "epoch": 0.8308253825958891, + "step": 12975 + }, + { + "loss": 2.1543, + "grad_norm": 1.6061644554138184, + "learning_rate": 5e-05, + "epoch": 0.8311455465198182, + "step": 12980 + }, + { + "loss": 2.1695, + "grad_norm": 1.5707058906555176, + "learning_rate": 5e-05, + "epoch": 0.8314657104437472, + "step": 12985 + }, + { + "loss": 2.1433, + "grad_norm": 1.5309937000274658, + "learning_rate": 5e-05, + "epoch": 0.8317858743676763, + "step": 12990 + }, + { + "loss": 2.1318, + "grad_norm": 1.538222074508667, + "learning_rate": 5e-05, + "epoch": 0.8321060382916053, + "step": 12995 + }, + { + "loss": 2.1446, + "grad_norm": 1.738500714302063, + "learning_rate": 5e-05, + "epoch": 0.8324262022155343, + "step": 13000 + }, + { + "eval_loss": 2.0216612815856934, + "eval_runtime": 9.2438, + "eval_samples_per_second": 221.554, + "eval_steps_per_second": 27.694, + "epoch": 0.8324262022155343, + "step": 13000 + }, + { + "loss": 2.1179, + "grad_norm": 1.625812292098999, + "learning_rate": 5e-05, + "epoch": 0.8327463661394634, + "step": 13005 + }, + { + "loss": 2.1423, + "grad_norm": 1.795238733291626, + "learning_rate": 5e-05, + "epoch": 0.8330665300633925, + "step": 13010 + }, + { + "loss": 2.1532, + "grad_norm": 1.6200240850448608, + "learning_rate": 5e-05, + "epoch": 0.8333866939873215, + "step": 13015 + }, + { + "loss": 2.1766, + "grad_norm": 1.5638982057571411, + "learning_rate": 5e-05, + "epoch": 0.8337068579112505, + "step": 13020 + }, + { + "loss": 2.1575, + "grad_norm": 1.6555575132369995, + "learning_rate": 5e-05, + "epoch": 0.8340270218351796, + "step": 13025 + }, + { + "loss": 2.1378, + "grad_norm": 1.560592532157898, + "learning_rate": 5e-05, + "epoch": 0.8343471857591087, + "step": 13030 + }, + { + "loss": 2.144, + "grad_norm": 1.5742806196212769, + "learning_rate": 5e-05, + "epoch": 0.8346673496830377, + "step": 13035 + }, + { + "loss": 2.1508, + "grad_norm": 1.6514239311218262, + "learning_rate": 5e-05, + "epoch": 0.8349875136069668, + "step": 13040 + }, + { + "loss": 2.1589, + "grad_norm": 1.5992977619171143, + "learning_rate": 5e-05, + "epoch": 0.8353076775308959, + "step": 13045 + }, + { + "loss": 2.1343, + "grad_norm": 1.783820629119873, + "learning_rate": 5e-05, + "epoch": 0.8356278414548248, + "step": 13050 + }, + { + "loss": 2.1822, + "grad_norm": 1.6316050291061401, + "learning_rate": 5e-05, + "epoch": 0.8359480053787539, + "step": 13055 + }, + { + "loss": 2.1543, + "grad_norm": 1.723607063293457, + "learning_rate": 5e-05, + "epoch": 0.836268169302683, + "step": 13060 + }, + { + "loss": 2.1511, + "grad_norm": 1.5759859085083008, + "learning_rate": 5e-05, + "epoch": 0.8365883332266121, + "step": 13065 + }, + { + "loss": 2.162, + "grad_norm": 1.5511044263839722, + "learning_rate": 5e-05, + "epoch": 0.836908497150541, + "step": 13070 + }, + { + "loss": 2.1452, + "grad_norm": 1.5764498710632324, + "learning_rate": 5e-05, + "epoch": 0.8372286610744701, + "step": 13075 + }, + { + "loss": 2.1494, + "grad_norm": 1.6332306861877441, + "learning_rate": 5e-05, + "epoch": 0.8375488249983992, + "step": 13080 + }, + { + "loss": 2.1495, + "grad_norm": 1.5492876768112183, + "learning_rate": 5e-05, + "epoch": 0.8378689889223282, + "step": 13085 + }, + { + "loss": 2.1547, + "grad_norm": 1.6958086490631104, + "learning_rate": 5e-05, + "epoch": 0.8381891528462573, + "step": 13090 + }, + { + "loss": 2.1328, + "grad_norm": 1.6327171325683594, + "learning_rate": 5e-05, + "epoch": 0.8385093167701864, + "step": 13095 + }, + { + "loss": 2.1196, + "grad_norm": 1.4885859489440918, + "learning_rate": 5e-05, + "epoch": 0.8388294806941153, + "step": 13100 + }, + { + "loss": 2.1367, + "grad_norm": 1.5167481899261475, + "learning_rate": 5e-05, + "epoch": 0.8391496446180444, + "step": 13105 + }, + { + "loss": 2.1373, + "grad_norm": 1.5418943166732788, + "learning_rate": 5e-05, + "epoch": 0.8394698085419735, + "step": 13110 + }, + { + "loss": 2.1341, + "grad_norm": 1.5980948209762573, + "learning_rate": 5e-05, + "epoch": 0.8397899724659026, + "step": 13115 + }, + { + "loss": 2.1283, + "grad_norm": 1.5357916355133057, + "learning_rate": 5e-05, + "epoch": 0.8401101363898316, + "step": 13120 + }, + { + "loss": 2.1357, + "grad_norm": 1.6638524532318115, + "learning_rate": 5e-05, + "epoch": 0.8404303003137606, + "step": 13125 + }, + { + "loss": 2.1465, + "grad_norm": 1.622886061668396, + "learning_rate": 5e-05, + "epoch": 0.8407504642376897, + "step": 13130 + }, + { + "loss": 2.1373, + "grad_norm": 1.618874430656433, + "learning_rate": 5e-05, + "epoch": 0.8410706281616187, + "step": 13135 + }, + { + "loss": 2.1567, + "grad_norm": 1.6146790981292725, + "learning_rate": 5e-05, + "epoch": 0.8413907920855478, + "step": 13140 + }, + { + "loss": 2.1464, + "grad_norm": 1.5791168212890625, + "learning_rate": 5e-05, + "epoch": 0.8417109560094769, + "step": 13145 + }, + { + "loss": 2.158, + "grad_norm": 1.506654143333435, + "learning_rate": 5e-05, + "epoch": 0.842031119933406, + "step": 13150 + }, + { + "loss": 2.1604, + "grad_norm": 1.5425060987472534, + "learning_rate": 5e-05, + "epoch": 0.8423512838573349, + "step": 13155 + }, + { + "loss": 2.156, + "grad_norm": 1.5820544958114624, + "learning_rate": 5e-05, + "epoch": 0.842671447781264, + "step": 13160 + }, + { + "loss": 2.136, + "grad_norm": 1.6569517850875854, + "learning_rate": 5e-05, + "epoch": 0.8429916117051931, + "step": 13165 + }, + { + "loss": 2.135, + "grad_norm": 1.7146352529525757, + "learning_rate": 5e-05, + "epoch": 0.8433117756291221, + "step": 13170 + }, + { + "loss": 2.1443, + "grad_norm": 1.6757546663284302, + "learning_rate": 5e-05, + "epoch": 0.8436319395530512, + "step": 13175 + }, + { + "loss": 2.1559, + "grad_norm": 1.5445369482040405, + "learning_rate": 5e-05, + "epoch": 0.8439521034769802, + "step": 13180 + }, + { + "loss": 2.1461, + "grad_norm": 1.6682908535003662, + "learning_rate": 5e-05, + "epoch": 0.8442722674009092, + "step": 13185 + }, + { + "loss": 2.1599, + "grad_norm": 1.5197023153305054, + "learning_rate": 5e-05, + "epoch": 0.8445924313248383, + "step": 13190 + }, + { + "loss": 2.1202, + "grad_norm": 1.605768084526062, + "learning_rate": 5e-05, + "epoch": 0.8449125952487674, + "step": 13195 + }, + { + "loss": 2.1589, + "grad_norm": 1.7030861377716064, + "learning_rate": 5e-05, + "epoch": 0.8452327591726965, + "step": 13200 + }, + { + "eval_loss": 2.025972843170166, + "eval_runtime": 8.9924, + "eval_samples_per_second": 227.748, + "eval_steps_per_second": 28.469, + "epoch": 0.8452327591726965, + "step": 13200 + }, + { + "loss": 2.1577, + "grad_norm": 1.5597788095474243, + "learning_rate": 5e-05, + "epoch": 0.8455529230966254, + "step": 13205 + }, + { + "loss": 2.1244, + "grad_norm": 1.5069013833999634, + "learning_rate": 5e-05, + "epoch": 0.8458730870205545, + "step": 13210 + }, + { + "loss": 2.1457, + "grad_norm": 1.4982860088348389, + "learning_rate": 5e-05, + "epoch": 0.8461932509444836, + "step": 13215 + }, + { + "loss": 2.1392, + "grad_norm": 1.5416525602340698, + "learning_rate": 5e-05, + "epoch": 0.8465134148684126, + "step": 13220 + }, + { + "loss": 2.1341, + "grad_norm": 1.5496692657470703, + "learning_rate": 5e-05, + "epoch": 0.8468335787923417, + "step": 13225 + }, + { + "loss": 2.1419, + "grad_norm": 1.55703604221344, + "learning_rate": 5e-05, + "epoch": 0.8471537427162708, + "step": 13230 + }, + { + "loss": 2.1623, + "grad_norm": 1.5972099304199219, + "learning_rate": 5e-05, + "epoch": 0.8474739066401998, + "step": 13235 + }, + { + "loss": 2.15, + "grad_norm": 1.569321870803833, + "learning_rate": 5e-05, + "epoch": 0.8477940705641288, + "step": 13240 + }, + { + "loss": 2.1483, + "grad_norm": 1.549758791923523, + "learning_rate": 5e-05, + "epoch": 0.8481142344880579, + "step": 13245 + }, + { + "loss": 2.156, + "grad_norm": 1.6335357427597046, + "learning_rate": 5e-05, + "epoch": 0.848434398411987, + "step": 13250 + }, + { + "loss": 2.1435, + "grad_norm": 1.6042168140411377, + "learning_rate": 5e-05, + "epoch": 0.848754562335916, + "step": 13255 + }, + { + "loss": 2.1489, + "grad_norm": 1.5949348211288452, + "learning_rate": 5e-05, + "epoch": 0.849074726259845, + "step": 13260 + }, + { + "loss": 2.1586, + "grad_norm": 1.5524771213531494, + "learning_rate": 5e-05, + "epoch": 0.8493948901837741, + "step": 13265 + }, + { + "loss": 2.1251, + "grad_norm": 1.6360365152359009, + "learning_rate": 5e-05, + "epoch": 0.8497150541077031, + "step": 13270 + }, + { + "loss": 2.1508, + "grad_norm": 1.500728964805603, + "learning_rate": 5e-05, + "epoch": 0.8500352180316322, + "step": 13275 + }, + { + "loss": 2.1457, + "grad_norm": 1.6618624925613403, + "learning_rate": 5e-05, + "epoch": 0.8503553819555613, + "step": 13280 + }, + { + "loss": 2.1543, + "grad_norm": 1.5445833206176758, + "learning_rate": 5e-05, + "epoch": 0.8506755458794903, + "step": 13285 + }, + { + "loss": 2.1769, + "grad_norm": 1.5497047901153564, + "learning_rate": 5e-05, + "epoch": 0.8509957098034193, + "step": 13290 + }, + { + "loss": 2.1584, + "grad_norm": 1.6006513833999634, + "learning_rate": 5e-05, + "epoch": 0.8513158737273484, + "step": 13295 + }, + { + "loss": 2.1348, + "grad_norm": 1.671229600906372, + "learning_rate": 5e-05, + "epoch": 0.8516360376512775, + "step": 13300 + }, + { + "loss": 2.1527, + "grad_norm": 1.5722646713256836, + "learning_rate": 5e-05, + "epoch": 0.8519562015752065, + "step": 13305 + }, + { + "loss": 2.156, + "grad_norm": 1.5288598537445068, + "learning_rate": 5e-05, + "epoch": 0.8522763654991355, + "step": 13310 + }, + { + "loss": 2.1362, + "grad_norm": 1.6392875909805298, + "learning_rate": 5e-05, + "epoch": 0.8525965294230646, + "step": 13315 + }, + { + "loss": 2.1416, + "grad_norm": 1.5657522678375244, + "learning_rate": 5e-05, + "epoch": 0.8529166933469937, + "step": 13320 + }, + { + "loss": 2.1231, + "grad_norm": 1.6025290489196777, + "learning_rate": 5e-05, + "epoch": 0.8532368572709227, + "step": 13325 + }, + { + "loss": 2.1308, + "grad_norm": 1.575181245803833, + "learning_rate": 5e-05, + "epoch": 0.8535570211948518, + "step": 13330 + }, + { + "loss": 2.1447, + "grad_norm": 1.7142658233642578, + "learning_rate": 5e-05, + "epoch": 0.8538771851187809, + "step": 13335 + }, + { + "loss": 2.1016, + "grad_norm": 1.573530912399292, + "learning_rate": 5e-05, + "epoch": 0.8541973490427098, + "step": 13340 + }, + { + "loss": 2.1634, + "grad_norm": 1.64275324344635, + "learning_rate": 5e-05, + "epoch": 0.8545175129666389, + "step": 13345 + }, + { + "loss": 2.1234, + "grad_norm": 1.6251364946365356, + "learning_rate": 5e-05, + "epoch": 0.854837676890568, + "step": 13350 + }, + { + "loss": 2.1461, + "grad_norm": 1.5351976156234741, + "learning_rate": 5e-05, + "epoch": 0.855157840814497, + "step": 13355 + }, + { + "loss": 2.1326, + "grad_norm": 1.557486891746521, + "learning_rate": 5e-05, + "epoch": 0.8554780047384261, + "step": 13360 + }, + { + "loss": 2.1336, + "grad_norm": 1.638458013534546, + "learning_rate": 5e-05, + "epoch": 0.8557981686623551, + "step": 13365 + }, + { + "loss": 2.1435, + "grad_norm": 1.5943306684494019, + "learning_rate": 5e-05, + "epoch": 0.8561183325862842, + "step": 13370 + }, + { + "loss": 2.1446, + "grad_norm": 1.5555797815322876, + "learning_rate": 5e-05, + "epoch": 0.8564384965102132, + "step": 13375 + }, + { + "loss": 2.1498, + "grad_norm": 1.5393874645233154, + "learning_rate": 5e-05, + "epoch": 0.8567586604341423, + "step": 13380 + }, + { + "loss": 2.1332, + "grad_norm": 1.7171132564544678, + "learning_rate": 5e-05, + "epoch": 0.8570788243580714, + "step": 13385 + }, + { + "loss": 2.1282, + "grad_norm": 1.6407911777496338, + "learning_rate": 5e-05, + "epoch": 0.8573989882820003, + "step": 13390 + }, + { + "loss": 2.1315, + "grad_norm": 1.6605337858200073, + "learning_rate": 5e-05, + "epoch": 0.8577191522059294, + "step": 13395 + }, + { + "loss": 2.129, + "grad_norm": 1.655444860458374, + "learning_rate": 5e-05, + "epoch": 0.8580393161298585, + "step": 13400 + }, + { + "eval_loss": 2.0151822566986084, + "eval_runtime": 12.5576, + "eval_samples_per_second": 163.088, + "eval_steps_per_second": 20.386, + "epoch": 0.8580393161298585, + "step": 13400 + }, + { + "loss": 2.1229, + "grad_norm": 1.6577417850494385, + "learning_rate": 5e-05, + "epoch": 0.8583594800537876, + "step": 13405 + }, + { + "loss": 2.1458, + "grad_norm": 1.5821425914764404, + "learning_rate": 5e-05, + "epoch": 0.8586796439777166, + "step": 13410 + }, + { + "loss": 2.1436, + "grad_norm": 1.529543161392212, + "learning_rate": 5e-05, + "epoch": 0.8589998079016457, + "step": 13415 + }, + { + "loss": 2.1617, + "grad_norm": 1.5528123378753662, + "learning_rate": 5e-05, + "epoch": 0.8593199718255747, + "step": 13420 + }, + { + "loss": 2.1315, + "grad_norm": 1.5557422637939453, + "learning_rate": 5e-05, + "epoch": 0.8596401357495037, + "step": 13425 + }, + { + "loss": 2.1229, + "grad_norm": 1.7199292182922363, + "learning_rate": 5e-05, + "epoch": 0.8599602996734328, + "step": 13430 + }, + { + "loss": 2.1343, + "grad_norm": 1.5561965703964233, + "learning_rate": 5e-05, + "epoch": 0.8602804635973619, + "step": 13435 + }, + { + "loss": 2.1587, + "grad_norm": 1.6795167922973633, + "learning_rate": 5e-05, + "epoch": 0.8606006275212909, + "step": 13440 + }, + { + "loss": 2.1469, + "grad_norm": 1.5269910097122192, + "learning_rate": 5e-05, + "epoch": 0.8609207914452199, + "step": 13445 + }, + { + "loss": 2.1506, + "grad_norm": 1.5669056177139282, + "learning_rate": 5e-05, + "epoch": 0.861240955369149, + "step": 13450 + }, + { + "loss": 2.1485, + "grad_norm": 1.6965998411178589, + "learning_rate": 5e-05, + "epoch": 0.8615611192930781, + "step": 13455 + }, + { + "loss": 2.1213, + "grad_norm": 1.7064182758331299, + "learning_rate": 5e-05, + "epoch": 0.8618812832170071, + "step": 13460 + }, + { + "loss": 2.1397, + "grad_norm": 1.587528944015503, + "learning_rate": 5e-05, + "epoch": 0.8622014471409362, + "step": 13465 + }, + { + "loss": 2.1396, + "grad_norm": 1.616944670677185, + "learning_rate": 5e-05, + "epoch": 0.8625216110648652, + "step": 13470 + }, + { + "loss": 2.1513, + "grad_norm": 1.6129498481750488, + "learning_rate": 5e-05, + "epoch": 0.8628417749887942, + "step": 13475 + }, + { + "loss": 2.1434, + "grad_norm": 1.612741231918335, + "learning_rate": 5e-05, + "epoch": 0.8631619389127233, + "step": 13480 + }, + { + "loss": 2.1399, + "grad_norm": 1.6746602058410645, + "learning_rate": 5e-05, + "epoch": 0.8634821028366524, + "step": 13485 + }, + { + "loss": 2.1382, + "grad_norm": 1.6259815692901611, + "learning_rate": 5e-05, + "epoch": 0.8638022667605815, + "step": 13490 + }, + { + "loss": 2.1307, + "grad_norm": 1.5910407304763794, + "learning_rate": 5e-05, + "epoch": 0.8641224306845104, + "step": 13495 + }, + { + "loss": 2.1576, + "grad_norm": 1.6779932975769043, + "learning_rate": 5e-05, + "epoch": 0.8644425946084395, + "step": 13500 + }, + { + "loss": 2.1316, + "grad_norm": 1.5591740608215332, + "learning_rate": 5e-05, + "epoch": 0.8647627585323686, + "step": 13505 + }, + { + "loss": 2.1338, + "grad_norm": 1.5584458112716675, + "learning_rate": 5e-05, + "epoch": 0.8650829224562976, + "step": 13510 + }, + { + "loss": 2.147, + "grad_norm": 1.5230597257614136, + "learning_rate": 5e-05, + "epoch": 0.8654030863802267, + "step": 13515 + }, + { + "loss": 2.1538, + "grad_norm": 1.5795865058898926, + "learning_rate": 5e-05, + "epoch": 0.8657232503041558, + "step": 13520 + }, + { + "loss": 2.1305, + "grad_norm": 1.5907702445983887, + "learning_rate": 5e-05, + "epoch": 0.8660434142280847, + "step": 13525 + }, + { + "loss": 2.1522, + "grad_norm": 1.5211584568023682, + "learning_rate": 5e-05, + "epoch": 0.8663635781520138, + "step": 13530 + }, + { + "loss": 2.1708, + "grad_norm": 1.5619218349456787, + "learning_rate": 5e-05, + "epoch": 0.8666837420759429, + "step": 13535 + }, + { + "loss": 2.121, + "grad_norm": 1.7085723876953125, + "learning_rate": 5e-05, + "epoch": 0.867003905999872, + "step": 13540 + }, + { + "loss": 2.1772, + "grad_norm": 1.5624220371246338, + "learning_rate": 5e-05, + "epoch": 0.867324069923801, + "step": 13545 + }, + { + "loss": 2.1555, + "grad_norm": 1.5626081228256226, + "learning_rate": 5e-05, + "epoch": 0.86764423384773, + "step": 13550 + }, + { + "loss": 2.1631, + "grad_norm": 1.6283838748931885, + "learning_rate": 5e-05, + "epoch": 0.8679643977716591, + "step": 13555 + }, + { + "loss": 2.1582, + "grad_norm": 1.5527262687683105, + "learning_rate": 5e-05, + "epoch": 0.8682845616955881, + "step": 13560 + }, + { + "loss": 2.1323, + "grad_norm": 1.5154231786727905, + "learning_rate": 5e-05, + "epoch": 0.8686047256195172, + "step": 13565 + }, + { + "loss": 2.1506, + "grad_norm": 1.5625056028366089, + "learning_rate": 5e-05, + "epoch": 0.8689248895434463, + "step": 13570 + }, + { + "loss": 2.1419, + "grad_norm": 1.569746494293213, + "learning_rate": 5e-05, + "epoch": 0.8692450534673754, + "step": 13575 + }, + { + "loss": 2.1709, + "grad_norm": 1.5288923978805542, + "learning_rate": 5e-05, + "epoch": 0.8695652173913043, + "step": 13580 + }, + { + "loss": 2.149, + "grad_norm": 1.5755292177200317, + "learning_rate": 5e-05, + "epoch": 0.8698853813152334, + "step": 13585 + }, + { + "loss": 2.1528, + "grad_norm": 1.5749847888946533, + "learning_rate": 5e-05, + "epoch": 0.8702055452391625, + "step": 13590 + }, + { + "loss": 2.1523, + "grad_norm": 1.5499904155731201, + "learning_rate": 5e-05, + "epoch": 0.8705257091630915, + "step": 13595 + }, + { + "loss": 2.1692, + "grad_norm": 1.6534972190856934, + "learning_rate": 5e-05, + "epoch": 0.8708458730870205, + "step": 13600 + }, + { + "eval_loss": 2.002166748046875, + "eval_runtime": 9.601, + "eval_samples_per_second": 213.311, + "eval_steps_per_second": 26.664, + "epoch": 0.8708458730870205, + "step": 13600 + }, + { + "loss": 2.1695, + "grad_norm": 1.6838092803955078, + "learning_rate": 5e-05, + "epoch": 0.8711660370109496, + "step": 13605 + }, + { + "loss": 2.1425, + "grad_norm": 1.5831983089447021, + "learning_rate": 5e-05, + "epoch": 0.8714862009348786, + "step": 13610 + }, + { + "loss": 2.1434, + "grad_norm": 1.620985746383667, + "learning_rate": 5e-05, + "epoch": 0.8718063648588077, + "step": 13615 + }, + { + "loss": 2.1263, + "grad_norm": 1.5088939666748047, + "learning_rate": 5e-05, + "epoch": 0.8721265287827368, + "step": 13620 + }, + { + "loss": 2.1572, + "grad_norm": 1.5807956457138062, + "learning_rate": 5e-05, + "epoch": 0.8724466927066659, + "step": 13625 + }, + { + "loss": 2.1683, + "grad_norm": 1.6206260919570923, + "learning_rate": 5e-05, + "epoch": 0.8727668566305948, + "step": 13630 + }, + { + "loss": 2.1358, + "grad_norm": 1.5690107345581055, + "learning_rate": 5e-05, + "epoch": 0.8730870205545239, + "step": 13635 + }, + { + "loss": 2.1412, + "grad_norm": 1.515356183052063, + "learning_rate": 5e-05, + "epoch": 0.873407184478453, + "step": 13640 + }, + { + "loss": 2.1233, + "grad_norm": 1.603214979171753, + "learning_rate": 5e-05, + "epoch": 0.873727348402382, + "step": 13645 + }, + { + "loss": 2.1411, + "grad_norm": 1.5593687295913696, + "learning_rate": 5e-05, + "epoch": 0.8740475123263111, + "step": 13650 + }, + { + "loss": 2.1334, + "grad_norm": 1.5858949422836304, + "learning_rate": 5e-05, + "epoch": 0.8743676762502401, + "step": 13655 + }, + { + "loss": 2.1357, + "grad_norm": 1.5124623775482178, + "learning_rate": 5e-05, + "epoch": 0.8746878401741692, + "step": 13660 + }, + { + "loss": 2.1547, + "grad_norm": 1.6396076679229736, + "learning_rate": 5e-05, + "epoch": 0.8750080040980982, + "step": 13665 + }, + { + "loss": 2.1258, + "grad_norm": 1.53853440284729, + "learning_rate": 5e-05, + "epoch": 0.8753281680220273, + "step": 13670 + }, + { + "loss": 2.1557, + "grad_norm": 1.5771784782409668, + "learning_rate": 5e-05, + "epoch": 0.8756483319459564, + "step": 13675 + }, + { + "loss": 2.1493, + "grad_norm": 1.5845115184783936, + "learning_rate": 5e-05, + "epoch": 0.8759684958698853, + "step": 13680 + }, + { + "loss": 2.1492, + "grad_norm": 1.4543273448944092, + "learning_rate": 5e-05, + "epoch": 0.8762886597938144, + "step": 13685 + }, + { + "loss": 2.1413, + "grad_norm": 1.622414469718933, + "learning_rate": 5e-05, + "epoch": 0.8766088237177435, + "step": 13690 + }, + { + "loss": 2.1324, + "grad_norm": 1.5697697401046753, + "learning_rate": 5e-05, + "epoch": 0.8769289876416725, + "step": 13695 + }, + { + "loss": 2.138, + "grad_norm": 1.5314058065414429, + "learning_rate": 5e-05, + "epoch": 0.8772491515656016, + "step": 13700 + }, + { + "loss": 2.1417, + "grad_norm": 1.576690673828125, + "learning_rate": 5e-05, + "epoch": 0.8775693154895307, + "step": 13705 + }, + { + "loss": 2.1656, + "grad_norm": 1.596118450164795, + "learning_rate": 5e-05, + "epoch": 0.8778894794134597, + "step": 13710 + }, + { + "loss": 2.1618, + "grad_norm": 1.6418870687484741, + "learning_rate": 5e-05, + "epoch": 0.8782096433373887, + "step": 13715 + }, + { + "loss": 2.1169, + "grad_norm": 1.5763425827026367, + "learning_rate": 5e-05, + "epoch": 0.8785298072613178, + "step": 13720 + }, + { + "loss": 2.1293, + "grad_norm": 1.6006461381912231, + "learning_rate": 5e-05, + "epoch": 0.8788499711852469, + "step": 13725 + }, + { + "loss": 2.1461, + "grad_norm": 1.6010429859161377, + "learning_rate": 5e-05, + "epoch": 0.8791701351091759, + "step": 13730 + }, + { + "loss": 2.1372, + "grad_norm": 1.5433728694915771, + "learning_rate": 5e-05, + "epoch": 0.8794902990331049, + "step": 13735 + }, + { + "loss": 2.1334, + "grad_norm": 1.6275326013565063, + "learning_rate": 5e-05, + "epoch": 0.879810462957034, + "step": 13740 + }, + { + "loss": 2.1458, + "grad_norm": 1.6575015783309937, + "learning_rate": 5e-05, + "epoch": 0.8801306268809631, + "step": 13745 + }, + { + "loss": 2.1217, + "grad_norm": 1.591966152191162, + "learning_rate": 5e-05, + "epoch": 0.8804507908048921, + "step": 13750 + }, + { + "loss": 2.1051, + "grad_norm": 1.569419503211975, + "learning_rate": 5e-05, + "epoch": 0.8807709547288212, + "step": 13755 + }, + { + "loss": 2.1461, + "grad_norm": 1.5138541460037231, + "learning_rate": 5e-05, + "epoch": 0.8810911186527502, + "step": 13760 + }, + { + "loss": 2.1666, + "grad_norm": 1.5544683933258057, + "learning_rate": 5e-05, + "epoch": 0.8814112825766792, + "step": 13765 + }, + { + "loss": 2.1572, + "grad_norm": 1.6272661685943604, + "learning_rate": 5e-05, + "epoch": 0.8817314465006083, + "step": 13770 + }, + { + "loss": 2.1285, + "grad_norm": 1.59254789352417, + "learning_rate": 5e-05, + "epoch": 0.8820516104245374, + "step": 13775 + }, + { + "loss": 2.132, + "grad_norm": 1.5939842462539673, + "learning_rate": 5e-05, + "epoch": 0.8823717743484664, + "step": 13780 + }, + { + "loss": 2.1404, + "grad_norm": 1.512245774269104, + "learning_rate": 5e-05, + "epoch": 0.8826919382723954, + "step": 13785 + }, + { + "loss": 2.1267, + "grad_norm": 1.4971529245376587, + "learning_rate": 5e-05, + "epoch": 0.8830121021963245, + "step": 13790 + }, + { + "loss": 2.1288, + "grad_norm": 1.554547667503357, + "learning_rate": 5e-05, + "epoch": 0.8833322661202536, + "step": 13795 + }, + { + "loss": 2.124, + "grad_norm": 1.5361340045928955, + "learning_rate": 5e-05, + "epoch": 0.8836524300441826, + "step": 13800 + }, + { + "eval_loss": 2.003274917602539, + "eval_runtime": 9.0968, + "eval_samples_per_second": 225.134, + "eval_steps_per_second": 28.142, + "epoch": 0.8836524300441826, + "step": 13800 + }, + { + "loss": 2.1452, + "grad_norm": 1.6627310514450073, + "learning_rate": 5e-05, + "epoch": 0.8839725939681117, + "step": 13805 + }, + { + "loss": 2.1283, + "grad_norm": 1.6165390014648438, + "learning_rate": 5e-05, + "epoch": 0.8842927578920408, + "step": 13810 + }, + { + "loss": 2.1567, + "grad_norm": 1.6117839813232422, + "learning_rate": 5e-05, + "epoch": 0.8846129218159697, + "step": 13815 + }, + { + "loss": 2.1299, + "grad_norm": 1.6143478155136108, + "learning_rate": 5e-05, + "epoch": 0.8849330857398988, + "step": 13820 + }, + { + "loss": 2.1314, + "grad_norm": 1.5183966159820557, + "learning_rate": 5e-05, + "epoch": 0.8852532496638279, + "step": 13825 + }, + { + "loss": 2.1788, + "grad_norm": 1.5318809747695923, + "learning_rate": 5e-05, + "epoch": 0.885573413587757, + "step": 13830 + }, + { + "loss": 2.1037, + "grad_norm": 1.6234662532806396, + "learning_rate": 5e-05, + "epoch": 0.885893577511686, + "step": 13835 + }, + { + "loss": 2.1407, + "grad_norm": 1.6068276166915894, + "learning_rate": 5e-05, + "epoch": 0.886213741435615, + "step": 13840 + }, + { + "loss": 2.1296, + "grad_norm": 1.6118892431259155, + "learning_rate": 5e-05, + "epoch": 0.8865339053595441, + "step": 13845 + }, + { + "loss": 2.1491, + "grad_norm": 1.5626516342163086, + "learning_rate": 5e-05, + "epoch": 0.8868540692834731, + "step": 13850 + }, + { + "loss": 2.1442, + "grad_norm": 1.560748815536499, + "learning_rate": 5e-05, + "epoch": 0.8871742332074022, + "step": 13855 + }, + { + "loss": 2.1449, + "grad_norm": 1.6533335447311401, + "learning_rate": 5e-05, + "epoch": 0.8874943971313313, + "step": 13860 + }, + { + "loss": 2.1301, + "grad_norm": 1.671207070350647, + "learning_rate": 5e-05, + "epoch": 0.8878145610552602, + "step": 13865 + }, + { + "loss": 2.1246, + "grad_norm": 1.6996030807495117, + "learning_rate": 5e-05, + "epoch": 0.8881347249791893, + "step": 13870 + }, + { + "loss": 2.1249, + "grad_norm": 1.563635230064392, + "learning_rate": 5e-05, + "epoch": 0.8884548889031184, + "step": 13875 + }, + { + "loss": 2.1062, + "grad_norm": 1.5765130519866943, + "learning_rate": 5e-05, + "epoch": 0.8887750528270475, + "step": 13880 + }, + { + "loss": 2.1372, + "grad_norm": 1.61628258228302, + "learning_rate": 5e-05, + "epoch": 0.8890952167509765, + "step": 13885 + }, + { + "loss": 2.1548, + "grad_norm": 1.6101058721542358, + "learning_rate": 5e-05, + "epoch": 0.8894153806749056, + "step": 13890 + }, + { + "loss": 2.1294, + "grad_norm": 1.6023646593093872, + "learning_rate": 5e-05, + "epoch": 0.8897355445988346, + "step": 13895 + }, + { + "loss": 2.1555, + "grad_norm": 1.5916895866394043, + "learning_rate": 5e-05, + "epoch": 0.8900557085227636, + "step": 13900 + }, + { + "loss": 2.1373, + "grad_norm": 1.6304248571395874, + "learning_rate": 5e-05, + "epoch": 0.8903758724466927, + "step": 13905 + }, + { + "loss": 2.1543, + "grad_norm": 1.578668475151062, + "learning_rate": 5e-05, + "epoch": 0.8906960363706218, + "step": 13910 + }, + { + "loss": 2.1368, + "grad_norm": 1.7462356090545654, + "learning_rate": 5e-05, + "epoch": 0.8910162002945509, + "step": 13915 + }, + { + "loss": 2.126, + "grad_norm": 1.5746142864227295, + "learning_rate": 5e-05, + "epoch": 0.8913363642184798, + "step": 13920 + }, + { + "loss": 2.1295, + "grad_norm": 1.598989486694336, + "learning_rate": 5e-05, + "epoch": 0.8916565281424089, + "step": 13925 + }, + { + "loss": 2.1271, + "grad_norm": 1.582812786102295, + "learning_rate": 5e-05, + "epoch": 0.891976692066338, + "step": 13930 + }, + { + "loss": 2.1282, + "grad_norm": 1.6400415897369385, + "learning_rate": 5e-05, + "epoch": 0.892296855990267, + "step": 13935 + }, + { + "loss": 2.1327, + "grad_norm": 1.5493074655532837, + "learning_rate": 5e-05, + "epoch": 0.8926170199141961, + "step": 13940 + }, + { + "loss": 2.1679, + "grad_norm": 1.6458313465118408, + "learning_rate": 5e-05, + "epoch": 0.8929371838381251, + "step": 13945 + }, + { + "loss": 2.1539, + "grad_norm": 1.5993520021438599, + "learning_rate": 5e-05, + "epoch": 0.8932573477620541, + "step": 13950 + }, + { + "loss": 2.1376, + "grad_norm": 1.5654582977294922, + "learning_rate": 5e-05, + "epoch": 0.8935775116859832, + "step": 13955 + }, + { + "loss": 2.1552, + "grad_norm": 1.5213743448257446, + "learning_rate": 5e-05, + "epoch": 0.8938976756099123, + "step": 13960 + }, + { + "loss": 2.1289, + "grad_norm": 1.5369318723678589, + "learning_rate": 5e-05, + "epoch": 0.8942178395338414, + "step": 13965 + }, + { + "loss": 2.1416, + "grad_norm": 1.6027822494506836, + "learning_rate": 5e-05, + "epoch": 0.8945380034577703, + "step": 13970 + }, + { + "loss": 2.1395, + "grad_norm": 1.5644904375076294, + "learning_rate": 5e-05, + "epoch": 0.8948581673816994, + "step": 13975 + }, + { + "loss": 2.1543, + "grad_norm": 1.6486353874206543, + "learning_rate": 5e-05, + "epoch": 0.8951783313056285, + "step": 13980 + }, + { + "loss": 2.1565, + "grad_norm": 1.6289517879486084, + "learning_rate": 5e-05, + "epoch": 0.8954984952295575, + "step": 13985 + }, + { + "loss": 2.1493, + "grad_norm": 1.700780987739563, + "learning_rate": 5e-05, + "epoch": 0.8958186591534866, + "step": 13990 + }, + { + "loss": 2.1413, + "grad_norm": 1.6621733903884888, + "learning_rate": 5e-05, + "epoch": 0.8961388230774157, + "step": 13995 + }, + { + "loss": 2.1535, + "grad_norm": 1.5872445106506348, + "learning_rate": 5e-05, + "epoch": 0.8964589870013447, + "step": 14000 + }, + { + "eval_loss": 2.003190517425537, + "eval_runtime": 9.0419, + "eval_samples_per_second": 226.5, + "eval_steps_per_second": 28.313, + "epoch": 0.8964589870013447, + "step": 14000 + }, + { + "loss": 2.1151, + "grad_norm": 1.5972435474395752, + "learning_rate": 5e-05, + "epoch": 0.8967791509252737, + "step": 14005 + }, + { + "loss": 2.1639, + "grad_norm": 1.5537108182907104, + "learning_rate": 5e-05, + "epoch": 0.8970993148492028, + "step": 14010 + }, + { + "loss": 2.1425, + "grad_norm": 1.5643787384033203, + "learning_rate": 5e-05, + "epoch": 0.8974194787731319, + "step": 14015 + }, + { + "loss": 2.1339, + "grad_norm": 1.5768754482269287, + "learning_rate": 5e-05, + "epoch": 0.8977396426970609, + "step": 14020 + }, + { + "loss": 2.1362, + "grad_norm": 1.6462067365646362, + "learning_rate": 5e-05, + "epoch": 0.8980598066209899, + "step": 14025 + }, + { + "loss": 2.1413, + "grad_norm": 1.540134072303772, + "learning_rate": 5e-05, + "epoch": 0.898379970544919, + "step": 14030 + }, + { + "loss": 2.1417, + "grad_norm": 1.5940558910369873, + "learning_rate": 5e-05, + "epoch": 0.898700134468848, + "step": 14035 + }, + { + "loss": 2.1372, + "grad_norm": 1.5724999904632568, + "learning_rate": 5e-05, + "epoch": 0.8990202983927771, + "step": 14040 + }, + { + "loss": 2.1628, + "grad_norm": 1.611562728881836, + "learning_rate": 5e-05, + "epoch": 0.8993404623167062, + "step": 14045 + }, + { + "loss": 2.1253, + "grad_norm": 1.5111039876937866, + "learning_rate": 5e-05, + "epoch": 0.8996606262406353, + "step": 14050 + }, + { + "loss": 2.17, + "grad_norm": 1.5524070262908936, + "learning_rate": 5e-05, + "epoch": 0.8999807901645642, + "step": 14055 + }, + { + "loss": 2.1206, + "grad_norm": 1.5320378541946411, + "learning_rate": 4.999919096867105e-05, + "epoch": 0.9003009540884933, + "step": 14060 + }, + { + "loss": 2.1422, + "grad_norm": 1.5573452711105347, + "learning_rate": 4.99959043686394e-05, + "epoch": 0.9006211180124224, + "step": 14065 + }, + { + "loss": 2.1634, + "grad_norm": 1.5653362274169922, + "learning_rate": 4.999008996756062e-05, + "epoch": 0.9009412819363514, + "step": 14070 + }, + { + "loss": 2.129, + "grad_norm": 1.6500377655029297, + "learning_rate": 4.998174835343699e-05, + "epoch": 0.9012614458602805, + "step": 14075 + }, + { + "loss": 2.1201, + "grad_norm": 1.5795848369598389, + "learning_rate": 4.9970880369844344e-05, + "epoch": 0.9015816097842095, + "step": 14080 + }, + { + "loss": 2.1409, + "grad_norm": 1.48246431350708, + "learning_rate": 4.995748711584676e-05, + "epoch": 0.9019017737081386, + "step": 14085 + }, + { + "loss": 2.1532, + "grad_norm": 1.6394940614700317, + "learning_rate": 4.9941569945885383e-05, + "epoch": 0.9022219376320676, + "step": 14090 + }, + { + "loss": 2.1489, + "grad_norm": 1.6183077096939087, + "learning_rate": 4.992313046964147e-05, + "epoch": 0.9025421015559967, + "step": 14095 + }, + { + "loss": 2.1459, + "grad_norm": 1.5216248035430908, + "learning_rate": 4.990217055187362e-05, + "epoch": 0.9028622654799258, + "step": 14100 + }, + { + "loss": 2.1391, + "grad_norm": 1.5191268920898438, + "learning_rate": 4.987869231222917e-05, + "epoch": 0.9031824294038547, + "step": 14105 + }, + { + "loss": 2.1464, + "grad_norm": 1.555911660194397, + "learning_rate": 4.985269812502983e-05, + "epoch": 0.9035025933277838, + "step": 14110 + }, + { + "loss": 2.1511, + "grad_norm": 1.577181339263916, + "learning_rate": 4.9824190619031616e-05, + "epoch": 0.9038227572517129, + "step": 14115 + }, + { + "loss": 2.1753, + "grad_norm": 1.580073595046997, + "learning_rate": 4.979317267715895e-05, + "epoch": 0.9041429211756419, + "step": 14120 + }, + { + "loss": 2.1283, + "grad_norm": 1.5809470415115356, + "learning_rate": 4.975964743621318e-05, + "epoch": 0.904463085099571, + "step": 14125 + }, + { + "loss": 2.1146, + "grad_norm": 1.4928776025772095, + "learning_rate": 4.972361828655526e-05, + "epoch": 0.9047832490235, + "step": 14130 + }, + { + "loss": 2.1686, + "grad_norm": 1.5114343166351318, + "learning_rate": 4.968508887176303e-05, + "epoch": 0.9051034129474291, + "step": 14135 + }, + { + "loss": 2.1386, + "grad_norm": 1.6162500381469727, + "learning_rate": 4.964406308826261e-05, + "epoch": 0.9054235768713581, + "step": 14140 + }, + { + "loss": 2.1504, + "grad_norm": 1.7190195322036743, + "learning_rate": 4.960054508493442e-05, + "epoch": 0.9057437407952872, + "step": 14145 + }, + { + "loss": 2.0969, + "grad_norm": 1.6564409732818604, + "learning_rate": 4.9554539262693636e-05, + "epoch": 0.9060639047192163, + "step": 14150 + }, + { + "loss": 2.1156, + "grad_norm": 1.5606110095977783, + "learning_rate": 4.9506050274045076e-05, + "epoch": 0.9063840686431452, + "step": 14155 + }, + { + "loss": 2.133, + "grad_norm": 1.4608800411224365, + "learning_rate": 4.945508302261271e-05, + "epoch": 0.9067042325670743, + "step": 14160 + }, + { + "loss": 2.113, + "grad_norm": 1.5593559741973877, + "learning_rate": 4.940164266264382e-05, + "epoch": 0.9070243964910034, + "step": 14165 + }, + { + "loss": 2.1405, + "grad_norm": 1.5246703624725342, + "learning_rate": 4.934573459848768e-05, + "epoch": 0.9073445604149325, + "step": 14170 + }, + { + "loss": 2.1462, + "grad_norm": 1.6130108833312988, + "learning_rate": 4.928736448404907e-05, + "epoch": 0.9076647243388615, + "step": 14175 + }, + { + "loss": 2.1721, + "grad_norm": 1.5642423629760742, + "learning_rate": 4.9226538222216476e-05, + "epoch": 0.9079848882627906, + "step": 14180 + }, + { + "loss": 2.1491, + "grad_norm": 1.6237996816635132, + "learning_rate": 4.9163261964265184e-05, + "epoch": 0.9083050521867196, + "step": 14185 + }, + { + "loss": 2.1562, + "grad_norm": 1.5854694843292236, + "learning_rate": 4.909754210923515e-05, + "epoch": 0.9086252161106486, + "step": 14190 + }, + { + "loss": 2.1569, + "grad_norm": 1.578338861465454, + "learning_rate": 4.902938530328393e-05, + "epoch": 0.9089453800345777, + "step": 14195 + }, + { + "loss": 2.119, + "grad_norm": 1.6499290466308594, + "learning_rate": 4.895879843901451e-05, + "epoch": 0.9092655439585068, + "step": 14200 + }, + { + "eval_loss": 1.9983034133911133, + "eval_runtime": 9.4635, + "eval_samples_per_second": 216.41, + "eval_steps_per_second": 27.051, + "epoch": 0.9092655439585068, + "step": 14200 + }, + { + "loss": 2.1602, + "grad_norm": 1.5854569673538208, + "learning_rate": 4.888578865477831e-05, + "epoch": 0.9095857078824358, + "step": 14205 + }, + { + "loss": 2.1274, + "grad_norm": 1.5863550901412964, + "learning_rate": 4.881036333395329e-05, + "epoch": 0.9099058718063648, + "step": 14210 + }, + { + "loss": 2.1412, + "grad_norm": 1.5058035850524902, + "learning_rate": 4.873253010419724e-05, + "epoch": 0.9102260357302939, + "step": 14215 + }, + { + "loss": 2.133, + "grad_norm": 1.5665518045425415, + "learning_rate": 4.8652296836676435e-05, + "epoch": 0.910546199654223, + "step": 14220 + }, + { + "loss": 2.1322, + "grad_norm": 1.5273241996765137, + "learning_rate": 4.856967164526966e-05, + "epoch": 0.910866363578152, + "step": 14225 + }, + { + "loss": 2.1584, + "grad_norm": 2.971830129623413, + "learning_rate": 4.84846628857476e-05, + "epoch": 0.9111865275020811, + "step": 14230 + }, + { + "loss": 2.1475, + "grad_norm": 1.5898001194000244, + "learning_rate": 4.83972791549279e-05, + "epoch": 0.9115066914260102, + "step": 14235 + }, + { + "loss": 2.1378, + "grad_norm": 1.674157977104187, + "learning_rate": 4.8307529289805706e-05, + "epoch": 0.9118268553499391, + "step": 14240 + }, + { + "loss": 2.1262, + "grad_norm": 1.5460021495819092, + "learning_rate": 4.821542236666009e-05, + "epoch": 0.9121470192738682, + "step": 14245 + }, + { + "loss": 2.1465, + "grad_norm": 1.5996989011764526, + "learning_rate": 4.812096770013609e-05, + "epoch": 0.9124671831977973, + "step": 14250 + }, + { + "loss": 2.1211, + "grad_norm": 1.5312010049819946, + "learning_rate": 4.802417484230277e-05, + "epoch": 0.9127873471217264, + "step": 14255 + }, + { + "loss": 2.1254, + "grad_norm": 1.5802662372589111, + "learning_rate": 4.792505358168723e-05, + "epoch": 0.9131075110456554, + "step": 14260 + }, + { + "loss": 2.1521, + "grad_norm": 1.5600544214248657, + "learning_rate": 4.782361394228472e-05, + "epoch": 0.9134276749695844, + "step": 14265 + }, + { + "loss": 2.1041, + "grad_norm": 1.5304535627365112, + "learning_rate": 4.7719866182544894e-05, + "epoch": 0.9137478388935135, + "step": 14270 + }, + { + "loss": 2.1418, + "grad_norm": 1.540677785873413, + "learning_rate": 4.761382079433441e-05, + "epoch": 0.9140680028174425, + "step": 14275 + }, + { + "loss": 2.1354, + "grad_norm": 1.5304155349731445, + "learning_rate": 4.7505488501875907e-05, + "epoch": 0.9143881667413716, + "step": 14280 + }, + { + "loss": 2.1853, + "grad_norm": 1.581351637840271, + "learning_rate": 4.739488026066347e-05, + "epoch": 0.9147083306653007, + "step": 14285 + }, + { + "loss": 2.1387, + "grad_norm": 1.5637761354446411, + "learning_rate": 4.728200725635469e-05, + "epoch": 0.9150284945892296, + "step": 14290 + }, + { + "loss": 2.1256, + "grad_norm": 1.5060229301452637, + "learning_rate": 4.716688090363953e-05, + "epoch": 0.9153486585131587, + "step": 14295 + }, + { + "loss": 2.1226, + "grad_norm": 1.548572301864624, + "learning_rate": 4.7049512845085954e-05, + "epoch": 0.9156688224370878, + "step": 14300 + }, + { + "loss": 2.1307, + "grad_norm": 1.6175365447998047, + "learning_rate": 4.692991494996247e-05, + "epoch": 0.9159889863610169, + "step": 14305 + }, + { + "loss": 2.1273, + "grad_norm": 1.5475963354110718, + "learning_rate": 4.680809931303792e-05, + "epoch": 0.9163091502849459, + "step": 14310 + }, + { + "loss": 2.159, + "grad_norm": 1.644508719444275, + "learning_rate": 4.668407825335823e-05, + "epoch": 0.916629314208875, + "step": 14315 + }, + { + "loss": 2.1625, + "grad_norm": 1.582242727279663, + "learning_rate": 4.6557864313000695e-05, + "epoch": 0.916949478132804, + "step": 14320 + }, + { + "loss": 2.1283, + "grad_norm": 1.6576043367385864, + "learning_rate": 4.642947025580559e-05, + "epoch": 0.917269642056733, + "step": 14325 + }, + { + "loss": 2.1409, + "grad_norm": 1.511075735092163, + "learning_rate": 4.629890906608536e-05, + "epoch": 0.9175898059806621, + "step": 14330 + }, + { + "loss": 2.1281, + "grad_norm": 1.5506491661071777, + "learning_rate": 4.6166193947311544e-05, + "epoch": 0.9179099699045912, + "step": 14335 + }, + { + "loss": 2.1398, + "grad_norm": 1.5191504955291748, + "learning_rate": 4.6031338320779534e-05, + "epoch": 0.9182301338285203, + "step": 14340 + }, + { + "loss": 2.1247, + "grad_norm": 1.588230848312378, + "learning_rate": 4.589435582425131e-05, + "epoch": 0.9185502977524492, + "step": 14345 + }, + { + "loss": 2.1537, + "grad_norm": 1.5594102144241333, + "learning_rate": 4.5755260310576234e-05, + "epoch": 0.9188704616763783, + "step": 14350 + }, + { + "loss": 2.1339, + "grad_norm": 1.5035042762756348, + "learning_rate": 4.561406584629018e-05, + "epoch": 0.9191906256003074, + "step": 14355 + }, + { + "loss": 2.1301, + "grad_norm": 1.648313283920288, + "learning_rate": 4.547078671019294e-05, + "epoch": 0.9195107895242364, + "step": 14360 + }, + { + "loss": 2.1223, + "grad_norm": 1.5859969854354858, + "learning_rate": 4.5325437391904316e-05, + "epoch": 0.9198309534481655, + "step": 14365 + }, + { + "loss": 2.1238, + "grad_norm": 1.5037835836410522, + "learning_rate": 4.51780325903987e-05, + "epoch": 0.9201511173720945, + "step": 14370 + }, + { + "loss": 2.1529, + "grad_norm": 1.5427237749099731, + "learning_rate": 4.5028587212518705e-05, + "epoch": 0.9204712812960235, + "step": 14375 + }, + { + "loss": 2.1016, + "grad_norm": 1.4729586839675903, + "learning_rate": 4.487711637146754e-05, + "epoch": 0.9207914452199526, + "step": 14380 + }, + { + "loss": 2.1257, + "grad_norm": 1.5223755836486816, + "learning_rate": 4.4723635385280724e-05, + "epoch": 0.9211116091438817, + "step": 14385 + }, + { + "loss": 2.1473, + "grad_norm": 1.5194729566574097, + "learning_rate": 4.456815977527694e-05, + "epoch": 0.9214317730678108, + "step": 14390 + }, + { + "loss": 2.141, + "grad_norm": 1.5059860944747925, + "learning_rate": 4.4410705264488415e-05, + "epoch": 0.9217519369917397, + "step": 14395 + }, + { + "loss": 2.1138, + "grad_norm": 1.543553113937378, + "learning_rate": 4.425128777607084e-05, + "epoch": 0.9220721009156688, + "step": 14400 + }, + { + "eval_loss": 2.00039005279541, + "eval_runtime": 13.1104, + "eval_samples_per_second": 156.212, + "eval_steps_per_second": 19.526, + "epoch": 0.9220721009156688, + "step": 14400 + }, + { + "loss": 2.1468, + "grad_norm": 1.5583864450454712, + "learning_rate": 4.4089923431693136e-05, + "epoch": 0.9223922648395979, + "step": 14405 + }, + { + "loss": 2.1244, + "grad_norm": 1.5649158954620361, + "learning_rate": 4.392662854990702e-05, + "epoch": 0.9227124287635269, + "step": 14410 + }, + { + "loss": 2.1579, + "grad_norm": 1.5519460439682007, + "learning_rate": 4.376141964449681e-05, + "epoch": 0.923032592687456, + "step": 14415 + }, + { + "loss": 2.1185, + "grad_norm": 1.6381224393844604, + "learning_rate": 4.359431342280935e-05, + "epoch": 0.923352756611385, + "step": 14420 + }, + { + "loss": 2.1166, + "grad_norm": 1.5199573040008545, + "learning_rate": 4.342532678406444e-05, + "epoch": 0.9236729205353141, + "step": 14425 + }, + { + "loss": 2.1598, + "grad_norm": 1.5303763151168823, + "learning_rate": 4.325447681764586e-05, + "epoch": 0.9239930844592431, + "step": 14430 + }, + { + "loss": 2.1426, + "grad_norm": 1.6057510375976562, + "learning_rate": 4.3081780801373104e-05, + "epoch": 0.9243132483831722, + "step": 14435 + }, + { + "loss": 2.0851, + "grad_norm": 1.566265344619751, + "learning_rate": 4.290725619975413e-05, + "epoch": 0.9246334123071013, + "step": 14440 + }, + { + "loss": 2.118, + "grad_norm": 1.5874476432800293, + "learning_rate": 4.27309206622192e-05, + "epoch": 0.9249535762310303, + "step": 14445 + }, + { + "loss": 2.1075, + "grad_norm": 1.531272530555725, + "learning_rate": 4.255279202133598e-05, + "epoch": 0.9252737401549593, + "step": 14450 + }, + { + "loss": 2.1417, + "grad_norm": 1.5390820503234863, + "learning_rate": 4.237288829100622e-05, + "epoch": 0.9255939040788884, + "step": 14455 + }, + { + "loss": 2.1287, + "grad_norm": 1.5704469680786133, + "learning_rate": 4.219122766464396e-05, + "epoch": 0.9259140680028174, + "step": 14460 + }, + { + "loss": 2.1114, + "grad_norm": 1.4786990880966187, + "learning_rate": 4.200782851333571e-05, + "epoch": 0.9262342319267465, + "step": 14465 + }, + { + "loss": 2.1369, + "grad_norm": 1.518187403678894, + "learning_rate": 4.1822709383982607e-05, + "epoch": 0.9265543958506756, + "step": 14470 + }, + { + "loss": 2.1398, + "grad_norm": 1.5425680875778198, + "learning_rate": 4.163588899742474e-05, + "epoch": 0.9268745597746046, + "step": 14475 + }, + { + "loss": 2.1393, + "grad_norm": 1.5290255546569824, + "learning_rate": 4.1447386246547995e-05, + "epoch": 0.9271947236985336, + "step": 14480 + }, + { + "loss": 2.1433, + "grad_norm": 1.6013634204864502, + "learning_rate": 4.1257220194373424e-05, + "epoch": 0.9275148876224627, + "step": 14485 + }, + { + "loss": 2.1047, + "grad_norm": 1.5584988594055176, + "learning_rate": 4.106541007212942e-05, + "epoch": 0.9278350515463918, + "step": 14490 + }, + { + "loss": 2.1444, + "grad_norm": 1.5811387300491333, + "learning_rate": 4.0871975277306894e-05, + "epoch": 0.9281552154703208, + "step": 14495 + }, + { + "loss": 2.1093, + "grad_norm": 1.5312178134918213, + "learning_rate": 4.067693537169764e-05, + "epoch": 0.9284753793942498, + "step": 14500 + }, + { + "loss": 2.1307, + "grad_norm": 1.6398024559020996, + "learning_rate": 4.048031007941607e-05, + "epoch": 0.9287955433181789, + "step": 14505 + }, + { + "loss": 2.1452, + "grad_norm": 1.535327434539795, + "learning_rate": 4.028211928490454e-05, + "epoch": 0.929115707242108, + "step": 14510 + }, + { + "loss": 2.1035, + "grad_norm": 1.545807123184204, + "learning_rate": 4.008238303092249e-05, + "epoch": 0.929435871166037, + "step": 14515 + }, + { + "loss": 2.1376, + "grad_norm": 1.4855430126190186, + "learning_rate": 3.98811215165195e-05, + "epoch": 0.9297560350899661, + "step": 14520 + }, + { + "loss": 2.1208, + "grad_norm": 1.5296348333358765, + "learning_rate": 3.9678355094992644e-05, + "epoch": 0.9300761990138952, + "step": 14525 + }, + { + "loss": 2.1218, + "grad_norm": 1.5518286228179932, + "learning_rate": 3.9474104271828126e-05, + "epoch": 0.9303963629378241, + "step": 14530 + }, + { + "loss": 2.1161, + "grad_norm": 1.6967196464538574, + "learning_rate": 3.926838970262765e-05, + "epoch": 0.9307165268617532, + "step": 14535 + }, + { + "loss": 2.124, + "grad_norm": 1.6289931535720825, + "learning_rate": 3.906123219101952e-05, + "epoch": 0.9310366907856823, + "step": 14540 + }, + { + "loss": 2.1255, + "grad_norm": 1.6077406406402588, + "learning_rate": 3.885265268655478e-05, + "epoch": 0.9313568547096113, + "step": 14545 + }, + { + "loss": 2.1284, + "grad_norm": 1.5821962356567383, + "learning_rate": 3.864267228258866e-05, + "epoch": 0.9316770186335404, + "step": 14550 + }, + { + "loss": 2.1048, + "grad_norm": 1.696872591972351, + "learning_rate": 3.843131221414738e-05, + "epoch": 0.9319971825574694, + "step": 14555 + }, + { + "loss": 2.1074, + "grad_norm": 1.5824540853500366, + "learning_rate": 3.8218593855780746e-05, + "epoch": 0.9323173464813985, + "step": 14560 + }, + { + "loss": 2.1297, + "grad_norm": 1.553502082824707, + "learning_rate": 3.800453871940049e-05, + "epoch": 0.9326375104053275, + "step": 14565 + }, + { + "loss": 2.106, + "grad_norm": 1.4843323230743408, + "learning_rate": 3.778916845210487e-05, + "epoch": 0.9329576743292566, + "step": 14570 + }, + { + "loss": 2.1326, + "grad_norm": 1.5960259437561035, + "learning_rate": 3.757250483398952e-05, + "epoch": 0.9332778382531857, + "step": 14575 + }, + { + "loss": 2.1133, + "grad_norm": 1.5438355207443237, + "learning_rate": 3.735456977594481e-05, + "epoch": 0.9335980021771146, + "step": 14580 + }, + { + "loss": 2.1361, + "grad_norm": 1.5123685598373413, + "learning_rate": 3.71353853174401e-05, + "epoch": 0.9339181661010437, + "step": 14585 + }, + { + "loss": 2.1555, + "grad_norm": 1.6334389448165894, + "learning_rate": 3.691497362429485e-05, + "epoch": 0.9342383300249728, + "step": 14590 + }, + { + "loss": 2.1254, + "grad_norm": 1.5371829271316528, + "learning_rate": 3.669335698643704e-05, + "epoch": 0.9345584939489019, + "step": 14595 + }, + { + "loss": 2.1017, + "grad_norm": 1.5454310178756714, + "learning_rate": 3.64705578156491e-05, + "epoch": 0.9348786578728309, + "step": 14600 + }, + { + "eval_loss": 1.994555950164795, + "eval_runtime": 9.3529, + "eval_samples_per_second": 218.97, + "eval_steps_per_second": 27.371, + "epoch": 0.9348786578728309, + "step": 14600 + }, + { + "loss": 2.1203, + "grad_norm": 1.554782509803772, + "learning_rate": 3.624659864330129e-05, + "epoch": 0.93519882179676, + "step": 14605 + }, + { + "loss": 2.1085, + "grad_norm": 1.4939922094345093, + "learning_rate": 3.602150211807326e-05, + "epoch": 0.935518985720689, + "step": 14610 + }, + { + "loss": 2.1079, + "grad_norm": 1.5748850107192993, + "learning_rate": 3.5795291003663575e-05, + "epoch": 0.935839149644618, + "step": 14615 + }, + { + "loss": 2.109, + "grad_norm": 1.5259861946105957, + "learning_rate": 3.556798817648763e-05, + "epoch": 0.9361593135685471, + "step": 14620 + }, + { + "loss": 2.1514, + "grad_norm": 1.5462098121643066, + "learning_rate": 3.533961662336424e-05, + "epoch": 0.9364794774924762, + "step": 14625 + }, + { + "loss": 2.1115, + "grad_norm": 1.5656838417053223, + "learning_rate": 3.511019943919098e-05, + "epoch": 0.9367996414164051, + "step": 14630 + }, + { + "loss": 2.123, + "grad_norm": 1.528436541557312, + "learning_rate": 3.487975982460863e-05, + "epoch": 0.9371198053403342, + "step": 14635 + }, + { + "loss": 2.1343, + "grad_norm": 1.5504084825515747, + "learning_rate": 3.4648321083654935e-05, + "epoch": 0.9374399692642633, + "step": 14640 + }, + { + "loss": 2.1361, + "grad_norm": 1.524232029914856, + "learning_rate": 3.441590662140792e-05, + "epoch": 0.9377601331881924, + "step": 14645 + }, + { + "loss": 2.1085, + "grad_norm": 1.4713977575302124, + "learning_rate": 3.418253994161892e-05, + "epoch": 0.9380802971121214, + "step": 14650 + }, + { + "loss": 2.115, + "grad_norm": 1.5077587366104126, + "learning_rate": 3.3948244644335735e-05, + "epoch": 0.9384004610360505, + "step": 14655 + }, + { + "loss": 2.1197, + "grad_norm": 1.5058708190917969, + "learning_rate": 3.3713044423515946e-05, + "epoch": 0.9387206249599795, + "step": 14660 + }, + { + "loss": 2.1231, + "grad_norm": 1.5846128463745117, + "learning_rate": 3.3476963064630786e-05, + "epoch": 0.9390407888839085, + "step": 14665 + }, + { + "loss": 2.0968, + "grad_norm": 1.511231541633606, + "learning_rate": 3.324002444225976e-05, + "epoch": 0.9393609528078376, + "step": 14670 + }, + { + "loss": 2.1139, + "grad_norm": 1.600589632987976, + "learning_rate": 3.3002252517676244e-05, + "epoch": 0.9396811167317667, + "step": 14675 + }, + { + "loss": 2.1021, + "grad_norm": 1.4865034818649292, + "learning_rate": 3.27636713364243e-05, + "epoch": 0.9400012806556958, + "step": 14680 + }, + { + "loss": 2.1081, + "grad_norm": 1.5039657354354858, + "learning_rate": 3.2524305025887e-05, + "epoch": 0.9403214445796247, + "step": 14685 + }, + { + "loss": 2.1285, + "grad_norm": 1.5387564897537231, + "learning_rate": 3.228417779284643e-05, + "epoch": 0.9406416085035538, + "step": 14690 + }, + { + "loss": 2.1274, + "grad_norm": 1.550644040107727, + "learning_rate": 3.2043313921035743e-05, + "epoch": 0.9409617724274829, + "step": 14695 + }, + { + "loss": 2.1279, + "grad_norm": 1.5030955076217651, + "learning_rate": 3.180173776868331e-05, + "epoch": 0.9412819363514119, + "step": 14700 + }, + { + "loss": 2.1246, + "grad_norm": 1.5117207765579224, + "learning_rate": 3.155947376604948e-05, + "epoch": 0.941602100275341, + "step": 14705 + }, + { + "loss": 2.1481, + "grad_norm": 1.5794004201889038, + "learning_rate": 3.13165464129559e-05, + "epoch": 0.94192226419927, + "step": 14710 + }, + { + "loss": 2.1415, + "grad_norm": 1.4979078769683838, + "learning_rate": 3.107298027630797e-05, + "epoch": 0.942242428123199, + "step": 14715 + }, + { + "loss": 2.1283, + "grad_norm": 1.5171302556991577, + "learning_rate": 3.082879998761035e-05, + "epoch": 0.9425625920471281, + "step": 14720 + }, + { + "loss": 2.1219, + "grad_norm": 1.5499953031539917, + "learning_rate": 3.058403024047607e-05, + "epoch": 0.9428827559710572, + "step": 14725 + }, + { + "loss": 2.111, + "grad_norm": 1.4895777702331543, + "learning_rate": 3.033869578812924e-05, + "epoch": 0.9432029198949863, + "step": 14730 + }, + { + "loss": 2.1037, + "grad_norm": 1.5428434610366821, + "learning_rate": 3.0092821440901857e-05, + "epoch": 0.9435230838189153, + "step": 14735 + }, + { + "loss": 2.1368, + "grad_norm": 1.5094553232192993, + "learning_rate": 2.984643206372471e-05, + "epoch": 0.9438432477428443, + "step": 14740 + }, + { + "loss": 2.1381, + "grad_norm": 1.5818499326705933, + "learning_rate": 2.959955257361286e-05, + "epoch": 0.9441634116667734, + "step": 14745 + }, + { + "loss": 2.0922, + "grad_norm": 1.513429880142212, + "learning_rate": 2.935220793714582e-05, + "epoch": 0.9444835755907024, + "step": 14750 + }, + { + "loss": 2.1022, + "grad_norm": 1.556632161140442, + "learning_rate": 2.9104423167942678e-05, + "epoch": 0.9448037395146315, + "step": 14755 + }, + { + "loss": 2.1063, + "grad_norm": 1.5335612297058105, + "learning_rate": 2.885622332413256e-05, + "epoch": 0.9451239034385606, + "step": 14760 + }, + { + "loss": 2.1308, + "grad_norm": 1.5133774280548096, + "learning_rate": 2.8607633505820504e-05, + "epoch": 0.9454440673624896, + "step": 14765 + }, + { + "loss": 2.0859, + "grad_norm": 1.4861382246017456, + "learning_rate": 2.835867885254912e-05, + "epoch": 0.9457642312864186, + "step": 14770 + }, + { + "loss": 2.1228, + "grad_norm": 1.5895521640777588, + "learning_rate": 2.8109384540756267e-05, + "epoch": 0.9460843952103477, + "step": 14775 + }, + { + "loss": 2.0916, + "grad_norm": 1.5556671619415283, + "learning_rate": 2.7859775781229013e-05, + "epoch": 0.9464045591342768, + "step": 14780 + }, + { + "loss": 2.1495, + "grad_norm": 1.494764804840088, + "learning_rate": 2.7609877816554085e-05, + "epoch": 0.9467247230582058, + "step": 14785 + }, + { + "loss": 2.1671, + "grad_norm": 1.5063127279281616, + "learning_rate": 2.7359715918565103e-05, + "epoch": 0.9470448869821348, + "step": 14790 + }, + { + "loss": 2.1287, + "grad_norm": 1.612107515335083, + "learning_rate": 2.710931538578692e-05, + "epoch": 0.9473650509060639, + "step": 14795 + }, + { + "loss": 2.1083, + "grad_norm": 1.5210696458816528, + "learning_rate": 2.6858701540877185e-05, + "epoch": 0.9476852148299929, + "step": 14800 + }, + { + "eval_loss": 1.9744480848312378, + "eval_runtime": 9.307, + "eval_samples_per_second": 220.049, + "eval_steps_per_second": 27.506, + "epoch": 0.9476852148299929, + "step": 14800 + }, + { + "loss": 2.1447, + "grad_norm": 1.492371916770935, + "learning_rate": 2.660789972806551e-05, + "epoch": 0.948005378753922, + "step": 14805 + }, + { + "loss": 2.1099, + "grad_norm": 1.541387677192688, + "learning_rate": 2.635693531059043e-05, + "epoch": 0.9483255426778511, + "step": 14810 + }, + { + "loss": 2.1105, + "grad_norm": 1.4616005420684814, + "learning_rate": 2.6105833668134473e-05, + "epoch": 0.9486457066017802, + "step": 14815 + }, + { + "loss": 2.1132, + "grad_norm": 1.4997398853302002, + "learning_rate": 2.5854620194257533e-05, + "epoch": 0.9489658705257091, + "step": 14820 + }, + { + "loss": 2.104, + "grad_norm": 1.5401780605316162, + "learning_rate": 2.5603320293828866e-05, + "epoch": 0.9492860344496382, + "step": 14825 + }, + { + "loss": 2.1319, + "grad_norm": 1.4212836027145386, + "learning_rate": 2.535195938045791e-05, + "epoch": 0.9496061983735673, + "step": 14830 + }, + { + "loss": 2.1123, + "grad_norm": 1.498403549194336, + "learning_rate": 2.5100562873924283e-05, + "epoch": 0.9499263622974963, + "step": 14835 + }, + { + "loss": 2.1251, + "grad_norm": 1.5054161548614502, + "learning_rate": 2.484915619760707e-05, + "epoch": 0.9502465262214254, + "step": 14840 + }, + { + "loss": 2.1148, + "grad_norm": 1.5165938138961792, + "learning_rate": 2.4597764775913813e-05, + "epoch": 0.9505666901453544, + "step": 14845 + }, + { + "loss": 2.1301, + "grad_norm": 1.4614144563674927, + "learning_rate": 2.4346414031709386e-05, + "epoch": 0.9508868540692835, + "step": 14850 + }, + { + "loss": 2.0923, + "grad_norm": 1.522401213645935, + "learning_rate": 2.409512938374499e-05, + "epoch": 0.9512070179932125, + "step": 14855 + }, + { + "loss": 2.1349, + "grad_norm": 1.5170820951461792, + "learning_rate": 2.384393624408761e-05, + "epoch": 0.9515271819171416, + "step": 14860 + }, + { + "loss": 2.0957, + "grad_norm": 1.4755724668502808, + "learning_rate": 2.3592860015550146e-05, + "epoch": 0.9518473458410707, + "step": 14865 + }, + { + "loss": 2.1389, + "grad_norm": 1.517958164215088, + "learning_rate": 2.334192608912241e-05, + "epoch": 0.9521675097649996, + "step": 14870 + }, + { + "loss": 2.1245, + "grad_norm": 1.4963104724884033, + "learning_rate": 2.3091159841403398e-05, + "epoch": 0.9524876736889287, + "step": 14875 + }, + { + "loss": 2.1243, + "grad_norm": 1.4569158554077148, + "learning_rate": 2.2840586632035014e-05, + "epoch": 0.9528078376128578, + "step": 14880 + }, + { + "loss": 2.1134, + "grad_norm": 1.542040467262268, + "learning_rate": 2.2590231801137447e-05, + "epoch": 0.9531280015367868, + "step": 14885 + }, + { + "loss": 2.1003, + "grad_norm": 1.4777690172195435, + "learning_rate": 2.2340120666746577e-05, + "epoch": 0.9534481654607159, + "step": 14890 + }, + { + "loss": 2.1317, + "grad_norm": 1.4963622093200684, + "learning_rate": 2.2090278522253604e-05, + "epoch": 0.953768329384645, + "step": 14895 + }, + { + "loss": 2.1372, + "grad_norm": 1.5573043823242188, + "learning_rate": 2.1840730633847156e-05, + "epoch": 0.954088493308574, + "step": 14900 + }, + { + "loss": 2.1405, + "grad_norm": 1.5820651054382324, + "learning_rate": 2.1591502237958115e-05, + "epoch": 0.954408657232503, + "step": 14905 + }, + { + "loss": 2.1104, + "grad_norm": 1.5473122596740723, + "learning_rate": 2.134261853870757e-05, + "epoch": 0.9547288211564321, + "step": 14910 + }, + { + "loss": 2.1043, + "grad_norm": 1.5598843097686768, + "learning_rate": 2.1094104705357908e-05, + "epoch": 0.9550489850803612, + "step": 14915 + }, + { + "loss": 2.1012, + "grad_norm": 1.5229307413101196, + "learning_rate": 2.0845985869767487e-05, + "epoch": 0.9553691490042902, + "step": 14920 + }, + { + "loss": 2.0979, + "grad_norm": 1.5292434692382812, + "learning_rate": 2.0598287123849095e-05, + "epoch": 0.9556893129282192, + "step": 14925 + }, + { + "loss": 2.1127, + "grad_norm": 1.5072777271270752, + "learning_rate": 2.0351033517032427e-05, + "epoch": 0.9560094768521483, + "step": 14930 + }, + { + "loss": 2.1393, + "grad_norm": 1.4828133583068848, + "learning_rate": 2.0104250053730905e-05, + "epoch": 0.9563296407760774, + "step": 14935 + }, + { + "loss": 2.1392, + "grad_norm": 1.572771668434143, + "learning_rate": 1.9857961690812945e-05, + "epoch": 0.9566498047000064, + "step": 14940 + }, + { + "loss": 2.0975, + "grad_norm": 1.4665296077728271, + "learning_rate": 1.9612193335078193e-05, + "epoch": 0.9569699686239355, + "step": 14945 + }, + { + "loss": 2.1348, + "grad_norm": 1.47166109085083, + "learning_rate": 1.936696984073867e-05, + "epoch": 0.9572901325478645, + "step": 14950 + }, + { + "loss": 2.0937, + "grad_norm": 1.4156910181045532, + "learning_rate": 1.9122316006905333e-05, + "epoch": 0.9576102964717935, + "step": 14955 + }, + { + "loss": 2.1046, + "grad_norm": 1.5509635210037231, + "learning_rate": 1.887825657508016e-05, + "epoch": 0.9579304603957226, + "step": 14960 + }, + { + "loss": 2.1508, + "grad_norm": 1.4977760314941406, + "learning_rate": 1.8634816226654074e-05, + "epoch": 0.9582506243196517, + "step": 14965 + }, + { + "loss": 2.1104, + "grad_norm": 1.4559874534606934, + "learning_rate": 1.839201958041096e-05, + "epoch": 0.9585707882435807, + "step": 14970 + }, + { + "loss": 2.1156, + "grad_norm": 1.5095617771148682, + "learning_rate": 1.8149891190038e-05, + "epoch": 0.9588909521675097, + "step": 14975 + }, + { + "loss": 2.0877, + "grad_norm": 1.4174879789352417, + "learning_rate": 1.7908455541642584e-05, + "epoch": 0.9592111160914388, + "step": 14980 + }, + { + "loss": 2.1116, + "grad_norm": 1.5217187404632568, + "learning_rate": 1.7667737051276076e-05, + "epoch": 0.9595312800153679, + "step": 14985 + }, + { + "loss": 2.1417, + "grad_norm": 1.4732062816619873, + "learning_rate": 1.742776006246463e-05, + "epoch": 0.9598514439392969, + "step": 14990 + }, + { + "loss": 2.1204, + "grad_norm": 1.4818220138549805, + "learning_rate": 1.71885488437474e-05, + "epoch": 0.960171607863226, + "step": 14995 + }, + { + "loss": 2.0826, + "grad_norm": 1.4875595569610596, + "learning_rate": 1.695012758622226e-05, + "epoch": 0.9604917717871551, + "step": 15000 + }, + { + "eval_loss": 1.9857242107391357, + "eval_runtime": 15.0438, + "eval_samples_per_second": 136.136, + "eval_steps_per_second": 17.017, + "epoch": 0.9604917717871551, + "step": 15000 + }, + { + "loss": 2.0994, + "grad_norm": 1.5387358665466309, + "learning_rate": 1.6712520401099422e-05, + "epoch": 0.960811935711084, + "step": 15005 + }, + { + "loss": 2.1318, + "grad_norm": 1.4918915033340454, + "learning_rate": 1.6475751317263063e-05, + "epoch": 0.9611320996350131, + "step": 15010 + }, + { + "loss": 2.122, + "grad_norm": 1.4722011089324951, + "learning_rate": 1.6239844278841366e-05, + "epoch": 0.9614522635589422, + "step": 15015 + }, + { + "loss": 2.1167, + "grad_norm": 1.4381043910980225, + "learning_rate": 1.600482314278505e-05, + "epoch": 0.9617724274828713, + "step": 15020 + }, + { + "loss": 2.0594, + "grad_norm": 1.5075242519378662, + "learning_rate": 1.5770711676454767e-05, + "epoch": 0.9620925914068003, + "step": 15025 + }, + { + "loss": 2.1349, + "grad_norm": 1.49933660030365, + "learning_rate": 1.5537533555217525e-05, + "epoch": 0.9624127553307293, + "step": 15030 + }, + { + "loss": 2.1218, + "grad_norm": 1.4700450897216797, + "learning_rate": 1.5305312360052442e-05, + "epoch": 0.9627329192546584, + "step": 15035 + }, + { + "loss": 2.102, + "grad_norm": 1.5126475095748901, + "learning_rate": 1.5074071575166057e-05, + "epoch": 0.9630530831785874, + "step": 15040 + }, + { + "loss": 2.0933, + "grad_norm": 1.501659870147705, + "learning_rate": 1.4843834585617333e-05, + "epoch": 0.9633732471025165, + "step": 15045 + }, + { + "loss": 2.1065, + "grad_norm": 1.4653816223144531, + "learning_rate": 1.4614624674952842e-05, + "epoch": 0.9636934110264456, + "step": 15050 + }, + { + "loss": 2.1135, + "grad_norm": 1.5033470392227173, + "learning_rate": 1.4386465022852091e-05, + "epoch": 0.9640135749503745, + "step": 15055 + }, + { + "loss": 2.1284, + "grad_norm": 1.539259672164917, + "learning_rate": 1.4159378702783404e-05, + "epoch": 0.9643337388743036, + "step": 15060 + }, + { + "loss": 2.1003, + "grad_norm": 1.4910892248153687, + "learning_rate": 1.3933388679670506e-05, + "epoch": 0.9646539027982327, + "step": 15065 + }, + { + "loss": 2.1035, + "grad_norm": 1.4681600332260132, + "learning_rate": 1.3708517807570171e-05, + "epoch": 0.9649740667221618, + "step": 15070 + }, + { + "loss": 2.0992, + "grad_norm": 1.4621437788009644, + "learning_rate": 1.3484788827360955e-05, + "epoch": 0.9652942306460908, + "step": 15075 + }, + { + "loss": 2.1292, + "grad_norm": 1.438450813293457, + "learning_rate": 1.3262224364443493e-05, + "epoch": 0.9656143945700199, + "step": 15080 + }, + { + "loss": 2.0915, + "grad_norm": 1.4840413331985474, + "learning_rate": 1.3040846926452386e-05, + "epoch": 0.9659345584939489, + "step": 15085 + }, + { + "loss": 2.1174, + "grad_norm": 1.4679360389709473, + "learning_rate": 1.2820678900980093e-05, + "epoch": 0.9662547224178779, + "step": 15090 + }, + { + "loss": 2.1146, + "grad_norm": 1.46363365650177, + "learning_rate": 1.260174255331282e-05, + "epoch": 0.966574886341807, + "step": 15095 + }, + { + "loss": 2.1146, + "grad_norm": 1.5261812210083008, + "learning_rate": 1.2384060024178956e-05, + "epoch": 0.9668950502657361, + "step": 15100 + }, + { + "loss": 2.1373, + "grad_norm": 1.5290420055389404, + "learning_rate": 1.2167653327509926e-05, + "epoch": 0.9672152141896652, + "step": 15105 + }, + { + "loss": 2.0911, + "grad_norm": 1.4681881666183472, + "learning_rate": 1.1952544348214028e-05, + "epoch": 0.9675353781135941, + "step": 15110 + }, + { + "loss": 2.1215, + "grad_norm": 1.5597201585769653, + "learning_rate": 1.1738754839963159e-05, + "epoch": 0.9678555420375232, + "step": 15115 + }, + { + "loss": 2.0924, + "grad_norm": 1.4340606927871704, + "learning_rate": 1.1526306422992994e-05, + "epoch": 0.9681757059614523, + "step": 15120 + }, + { + "loss": 2.1189, + "grad_norm": 1.51260244846344, + "learning_rate": 1.1315220581916477e-05, + "epoch": 0.9684958698853813, + "step": 15125 + }, + { + "loss": 2.117, + "grad_norm": 1.4482096433639526, + "learning_rate": 1.1105518663551176e-05, + "epoch": 0.9688160338093104, + "step": 15130 + }, + { + "loss": 2.1359, + "grad_norm": 1.4650722742080688, + "learning_rate": 1.0897221874760444e-05, + "epoch": 0.9691361977332394, + "step": 15135 + }, + { + "loss": 2.1113, + "grad_norm": 1.5193512439727783, + "learning_rate": 1.0690351280308877e-05, + "epoch": 0.9694563616571684, + "step": 15140 + }, + { + "loss": 2.1186, + "grad_norm": 1.5083808898925781, + "learning_rate": 1.0484927800731984e-05, + "epoch": 0.9697765255810975, + "step": 15145 + }, + { + "loss": 2.088, + "grad_norm": 1.4598770141601562, + "learning_rate": 1.0280972210220578e-05, + "epoch": 0.9700966895050266, + "step": 15150 + }, + { + "loss": 2.091, + "grad_norm": 1.4730453491210938, + "learning_rate": 1.0078505134519874e-05, + "epoch": 0.9704168534289557, + "step": 15155 + }, + { + "loss": 2.0936, + "grad_norm": 1.4336142539978027, + "learning_rate": 9.87754704884369e-06, + "epoch": 0.9707370173528846, + "step": 15160 + }, + { + "loss": 2.099, + "grad_norm": 1.4537841081619263, + "learning_rate": 9.678118275803749e-06, + "epoch": 0.9710571812768137, + "step": 15165 + }, + { + "loss": 2.0901, + "grad_norm": 1.4845715761184692, + "learning_rate": 9.480238983354515e-06, + "epoch": 0.9713773452007428, + "step": 15170 + }, + { + "loss": 2.1275, + "grad_norm": 1.5654568672180176, + "learning_rate": 9.283929182753659e-06, + "epoch": 0.9716975091246718, + "step": 15175 + }, + { + "loss": 2.1, + "grad_norm": 1.4920252561569214, + "learning_rate": 9.089208726538304e-06, + "epoch": 0.9720176730486009, + "step": 15180 + }, + { + "loss": 2.1293, + "grad_norm": 1.5260412693023682, + "learning_rate": 8.896097306517388e-06, + "epoch": 0.97233783697253, + "step": 15185 + }, + { + "loss": 2.1188, + "grad_norm": 1.475035548210144, + "learning_rate": 8.70461445178025e-06, + "epoch": 0.972658000896459, + "step": 15190 + }, + { + "loss": 2.1094, + "grad_norm": 1.465287208557129, + "learning_rate": 8.514779526721713e-06, + "epoch": 0.972978164820388, + "step": 15195 + }, + { + "loss": 2.1036, + "grad_norm": 1.4647490978240967, + "learning_rate": 8.32661172908373e-06, + "epoch": 0.9732983287443171, + "step": 15200 + }, + { + "eval_loss": 1.9738588333129883, + "eval_runtime": 13.379, + "eval_samples_per_second": 153.076, + "eval_steps_per_second": 19.134, + "epoch": 0.9732983287443171, + "step": 15200 + }, + { + "loss": 2.112, + "grad_norm": 1.4496986865997314, + "learning_rate": 8.140130088014008e-06, + "epoch": 0.9736184926682462, + "step": 15205 + }, + { + "loss": 2.0984, + "grad_norm": 1.439038634300232, + "learning_rate": 7.955353462141554e-06, + "epoch": 0.9739386565921752, + "step": 15210 + }, + { + "loss": 2.0996, + "grad_norm": 1.4874390363693237, + "learning_rate": 7.7723005376696e-06, + "epoch": 0.9742588205161042, + "step": 15215 + }, + { + "loss": 2.0868, + "grad_norm": 1.4709018468856812, + "learning_rate": 7.5909898264857895e-06, + "epoch": 0.9745789844400333, + "step": 15220 + }, + { + "loss": 2.1179, + "grad_norm": 1.4825295209884644, + "learning_rate": 7.411439664290226e-06, + "epoch": 0.9748991483639623, + "step": 15225 + }, + { + "loss": 2.0792, + "grad_norm": 1.4520797729492188, + "learning_rate": 7.2336682087410985e-06, + "epoch": 0.9752193122878914, + "step": 15230 + }, + { + "loss": 2.0882, + "grad_norm": 1.4495720863342285, + "learning_rate": 7.05769343761849e-06, + "epoch": 0.9755394762118205, + "step": 15235 + }, + { + "loss": 2.1166, + "grad_norm": 1.4639912843704224, + "learning_rate": 6.883533147006266e-06, + "epoch": 0.9758596401357496, + "step": 15240 + }, + { + "loss": 2.0994, + "grad_norm": 1.4839041233062744, + "learning_rate": 6.7112049494924364e-06, + "epoch": 0.9761798040596785, + "step": 15245 + }, + { + "loss": 2.0832, + "grad_norm": 1.4434202909469604, + "learning_rate": 6.540726272387926e-06, + "epoch": 0.9764999679836076, + "step": 15250 + }, + { + "loss": 2.0947, + "grad_norm": 1.4894647598266602, + "learning_rate": 6.372114355964293e-06, + "epoch": 0.9768201319075367, + "step": 15255 + }, + { + "loss": 2.1189, + "grad_norm": 1.4748380184173584, + "learning_rate": 6.205386251710138e-06, + "epoch": 0.9771402958314657, + "step": 15260 + }, + { + "loss": 2.0734, + "grad_norm": 1.4855523109436035, + "learning_rate": 6.040558820606795e-06, + "epoch": 0.9774604597553948, + "step": 15265 + }, + { + "loss": 2.1203, + "grad_norm": 1.4771287441253662, + "learning_rate": 5.877648731423133e-06, + "epoch": 0.9777806236793238, + "step": 15270 + }, + { + "loss": 2.0971, + "grad_norm": 1.4301459789276123, + "learning_rate": 5.716672459029926e-06, + "epoch": 0.9781007876032529, + "step": 15275 + }, + { + "loss": 2.0943, + "grad_norm": 1.4706257581710815, + "learning_rate": 5.557646282733725e-06, + "epoch": 0.9784209515271819, + "step": 15280 + }, + { + "loss": 2.0998, + "grad_norm": 1.485478401184082, + "learning_rate": 5.400586284630579e-06, + "epoch": 0.978741115451111, + "step": 15285 + }, + { + "loss": 2.0991, + "grad_norm": 1.4847251176834106, + "learning_rate": 5.245508347979675e-06, + "epoch": 0.9790612793750401, + "step": 15290 + }, + { + "loss": 2.1203, + "grad_norm": 1.4288967847824097, + "learning_rate": 5.092428155597084e-06, + "epoch": 0.979381443298969, + "step": 15295 + }, + { + "loss": 2.1178, + "grad_norm": 1.4488484859466553, + "learning_rate": 4.941361188269775e-06, + "epoch": 0.9797016072228981, + "step": 15300 + }, + { + "loss": 2.1052, + "grad_norm": 1.464503288269043, + "learning_rate": 4.792322723190057e-06, + "epoch": 0.9800217711468272, + "step": 15305 + }, + { + "loss": 2.0986, + "grad_norm": 1.5340265035629272, + "learning_rate": 4.645327832410648e-06, + "epoch": 0.9803419350707562, + "step": 15310 + }, + { + "loss": 2.096, + "grad_norm": 1.4559099674224854, + "learning_rate": 4.500391381320421e-06, + "epoch": 0.9806620989946853, + "step": 15315 + }, + { + "loss": 2.1186, + "grad_norm": 1.4796267747879028, + "learning_rate": 4.357528027141117e-06, + "epoch": 0.9809822629186143, + "step": 15320 + }, + { + "loss": 2.1225, + "grad_norm": 1.4935765266418457, + "learning_rate": 4.216752217445052e-06, + "epoch": 0.9813024268425434, + "step": 15325 + }, + { + "loss": 2.0997, + "grad_norm": 1.4788649082183838, + "learning_rate": 4.078078188694101e-06, + "epoch": 0.9816225907664724, + "step": 15330 + }, + { + "loss": 2.1163, + "grad_norm": 1.52225661277771, + "learning_rate": 3.941519964799928e-06, + "epoch": 0.9819427546904015, + "step": 15335 + }, + { + "loss": 2.1381, + "grad_norm": 1.479812502861023, + "learning_rate": 3.807091355705811e-06, + "epoch": 0.9822629186143306, + "step": 15340 + }, + { + "loss": 2.0806, + "grad_norm": 1.448622226715088, + "learning_rate": 3.674805955990032e-06, + "epoch": 0.9825830825382595, + "step": 15345 + }, + { + "loss": 2.1262, + "grad_norm": 1.514609694480896, + "learning_rate": 3.5446771434911096e-06, + "epoch": 0.9829032464621886, + "step": 15350 + }, + { + "loss": 2.0836, + "grad_norm": 1.492081880569458, + "learning_rate": 3.416718077954864e-06, + "epoch": 0.9832234103861177, + "step": 15355 + }, + { + "loss": 2.113, + "grad_norm": 1.4686152935028076, + "learning_rate": 3.290941699703651e-06, + "epoch": 0.9835435743100468, + "step": 15360 + }, + { + "loss": 2.1112, + "grad_norm": 1.521332859992981, + "learning_rate": 3.1673607283276813e-06, + "epoch": 0.9838637382339758, + "step": 15365 + }, + { + "loss": 2.1045, + "grad_norm": 1.4462871551513672, + "learning_rate": 3.045987661398736e-06, + "epoch": 0.9841839021579049, + "step": 15370 + }, + { + "loss": 2.1058, + "grad_norm": 1.4568146467208862, + "learning_rate": 2.926834773206269e-06, + "epoch": 0.9845040660818339, + "step": 15375 + }, + { + "loss": 2.123, + "grad_norm": 1.486159324645996, + "learning_rate": 2.809914113516171e-06, + "epoch": 0.9848242300057629, + "step": 15380 + }, + { + "loss": 2.1106, + "grad_norm": 1.4326839447021484, + "learning_rate": 2.6952375063521467e-06, + "epoch": 0.985144393929692, + "step": 15385 + }, + { + "loss": 2.1136, + "grad_norm": 1.4544931650161743, + "learning_rate": 2.582816548800002e-06, + "epoch": 0.9854645578536211, + "step": 15390 + }, + { + "loss": 2.0961, + "grad_norm": 1.5159916877746582, + "learning_rate": 2.472662609834825e-06, + "epoch": 0.98578472177755, + "step": 15395 + }, + { + "loss": 2.0906, + "grad_norm": 1.4797563552856445, + "learning_rate": 2.364786829171281e-06, + "epoch": 0.9861048857014791, + "step": 15400 + }, + { + "eval_loss": 1.969813346862793, + "eval_runtime": 9.427, + "eval_samples_per_second": 217.248, + "eval_steps_per_second": 27.156, + "epoch": 0.9861048857014791, + "step": 15400 + }, + { + "loss": 2.1251, + "grad_norm": 1.4745688438415527, + "learning_rate": 2.2592001161370392e-06, + "epoch": 0.9864250496254082, + "step": 15405 + }, + { + "loss": 2.0873, + "grad_norm": 1.4373728036880493, + "learning_rate": 2.155913148569558e-06, + "epoch": 0.9867452135493373, + "step": 15410 + }, + { + "loss": 2.0997, + "grad_norm": 1.454837679862976, + "learning_rate": 2.0549363717362215e-06, + "epoch": 0.9870653774732663, + "step": 15415 + }, + { + "loss": 2.1055, + "grad_norm": 1.472625970840454, + "learning_rate": 1.956279997278043e-06, + "epoch": 0.9873855413971954, + "step": 15420 + }, + { + "loss": 2.1318, + "grad_norm": 1.3973727226257324, + "learning_rate": 1.8599540021769695e-06, + "epoch": 0.9877057053211244, + "step": 15425 + }, + { + "loss": 2.0958, + "grad_norm": 1.4296380281448364, + "learning_rate": 1.765968127746928e-06, + "epoch": 0.9880258692450534, + "step": 15430 + }, + { + "loss": 2.0831, + "grad_norm": 1.433281421661377, + "learning_rate": 1.6743318786486966e-06, + "epoch": 0.9883460331689825, + "step": 15435 + }, + { + "loss": 2.0827, + "grad_norm": 1.4774094820022583, + "learning_rate": 1.5850545219287e-06, + "epoch": 0.9886661970929116, + "step": 15440 + }, + { + "loss": 2.125, + "grad_norm": 1.477018117904663, + "learning_rate": 1.4981450860818757e-06, + "epoch": 0.9889863610168407, + "step": 15445 + }, + { + "loss": 2.076, + "grad_norm": 1.5008845329284668, + "learning_rate": 1.4136123601385998e-06, + "epoch": 0.9893065249407696, + "step": 15450 + }, + { + "loss": 2.0926, + "grad_norm": 1.450234055519104, + "learning_rate": 1.3314648927758966e-06, + "epoch": 0.9896266888646987, + "step": 15455 + }, + { + "loss": 2.0658, + "grad_norm": 1.4952715635299683, + "learning_rate": 1.2517109914528841e-06, + "epoch": 0.9899468527886278, + "step": 15460 + }, + { + "loss": 2.1135, + "grad_norm": 1.434744954109192, + "learning_rate": 1.1743587215707007e-06, + "epoch": 0.9902670167125568, + "step": 15465 + }, + { + "loss": 2.0885, + "grad_norm": 1.4307448863983154, + "learning_rate": 1.0994159056568198e-06, + "epoch": 0.9905871806364859, + "step": 15470 + }, + { + "loss": 2.0971, + "grad_norm": 1.4121356010437012, + "learning_rate": 1.026890122573998e-06, + "epoch": 0.990907344560415, + "step": 15475 + }, + { + "loss": 2.1023, + "grad_norm": 1.4584524631500244, + "learning_rate": 9.56788706753814e-07, + "epoch": 0.9912275084843439, + "step": 15480 + }, + { + "loss": 2.1039, + "grad_norm": 1.4847089052200317, + "learning_rate": 8.891187474549617e-07, + "epoch": 0.991547672408273, + "step": 15485 + }, + { + "loss": 2.0808, + "grad_norm": 1.496368408203125, + "learning_rate": 8.238870880463157e-07, + "epoch": 0.9918678363322021, + "step": 15490 + }, + { + "loss": 2.0938, + "grad_norm": 1.4407403469085693, + "learning_rate": 7.611003253148757e-07, + "epoch": 0.9921880002561312, + "step": 15495 + }, + { + "loss": 2.0824, + "grad_norm": 1.4371376037597656, + "learning_rate": 7.007648087986374e-07, + "epoch": 0.9925081641800602, + "step": 15500 + }, + { + "loss": 2.0959, + "grad_norm": 1.4718334674835205, + "learning_rate": 6.428866401444799e-07, + "epoch": 0.9928283281039892, + "step": 15505 + }, + { + "loss": 2.1124, + "grad_norm": 1.5052322149276733, + "learning_rate": 5.874716724911078e-07, + "epoch": 0.9931484920279183, + "step": 15510 + }, + { + "loss": 2.0862, + "grad_norm": 1.485822319984436, + "learning_rate": 5.345255098771346e-07, + "epoch": 0.9934686559518473, + "step": 15515 + }, + { + "loss": 2.0994, + "grad_norm": 1.4460045099258423, + "learning_rate": 4.840535066743506e-07, + "epoch": 0.9937888198757764, + "step": 15520 + }, + { + "loss": 2.093, + "grad_norm": 1.4056727886199951, + "learning_rate": 4.360607670462591e-07, + "epoch": 0.9941089837997055, + "step": 15525 + }, + { + "loss": 2.094, + "grad_norm": 1.4093832969665527, + "learning_rate": 3.905521444318605e-07, + "epoch": 0.9944291477236346, + "step": 15530 + }, + { + "loss": 2.1117, + "grad_norm": 1.445207953453064, + "learning_rate": 3.4753224105488204e-07, + "epoch": 0.9947493116475635, + "step": 15535 + }, + { + "loss": 2.1148, + "grad_norm": 1.4611190557479858, + "learning_rate": 3.070054074583162e-07, + "epoch": 0.9950694755714926, + "step": 15540 + }, + { + "loss": 2.1264, + "grad_norm": 1.5278282165527344, + "learning_rate": 2.689757420644951e-07, + "epoch": 0.9953896394954217, + "step": 15545 + }, + { + "loss": 2.1253, + "grad_norm": 1.4608923196792603, + "learning_rate": 2.334470907605829e-07, + "epoch": 0.9957098034193507, + "step": 15550 + }, + { + "loss": 2.0896, + "grad_norm": 1.4511940479278564, + "learning_rate": 2.004230465096818e-07, + "epoch": 0.9960299673432798, + "step": 15555 + }, + { + "loss": 2.0991, + "grad_norm": 1.4206980466842651, + "learning_rate": 1.699069489874583e-07, + "epoch": 0.9963501312672088, + "step": 15560 + }, + { + "loss": 2.0942, + "grad_norm": 1.4339975118637085, + "learning_rate": 1.419018842444164e-07, + "epoch": 0.9966702951911378, + "step": 15565 + }, + { + "loss": 2.1004, + "grad_norm": 1.4129743576049805, + "learning_rate": 1.1641068439380842e-07, + "epoch": 0.9969904591150669, + "step": 15570 + }, + { + "loss": 2.1244, + "grad_norm": 1.4470487833023071, + "learning_rate": 9.343592732521944e-08, + "epoch": 0.997310623038996, + "step": 15575 + }, + { + "loss": 2.0782, + "grad_norm": 1.457018256187439, + "learning_rate": 7.29799364438899e-08, + "epoch": 0.9976307869629251, + "step": 15580 + }, + { + "loss": 2.1337, + "grad_norm": 1.4190618991851807, + "learning_rate": 5.5044780435722923e-08, + "epoch": 0.997950950886854, + "step": 15585 + }, + { + "loss": 2.1195, + "grad_norm": 1.4926364421844482, + "learning_rate": 3.963227305810724e-08, + "epoch": 0.9982711148107831, + "step": 15590 + }, + { + "loss": 2.0792, + "grad_norm": 1.4838751554489136, + "learning_rate": 2.6743972956475016e-08, + "epoch": 0.9985912787347122, + "step": 15595 + }, + { + "loss": 2.0793, + "grad_norm": 1.4116333723068237, + "learning_rate": 1.6381183506697374e-08, + "epoch": 0.9989114426586412, + "step": 15600 + }, + { + "eval_loss": 1.9771896600723267, + "eval_runtime": 12.6984, + "eval_samples_per_second": 161.281, + "eval_steps_per_second": 20.16, + "epoch": 0.9989114426586412, + "step": 15600 + }, + { + "loss": 2.1191, + "grad_norm": 1.5354454517364502, + "learning_rate": 8.544952683253726e-09, + "epoch": 0.9992316065825703, + "step": 15605 + }, + { + "loss": 2.097, + "grad_norm": 1.3956998586654663, + "learning_rate": 3.2360729532776578e-09, + "epoch": 0.9995517705064993, + "step": 15610 + }, + { + "loss": 2.0965, + "grad_norm": 1.407570481300354, + "learning_rate": 4.550811963849322e-10, + "epoch": 0.9998719344304284, + "step": 15615 + }, + { + "train_runtime": 71.4624, + "train_samples_per_second": 55944.819, + "train_steps_per_second": 218.534, + "total_flos": 5.2748789856731136e+17, + "train_loss": 0.0022892785559331377, + "epoch": 1.0, + "step": 15617 + }, + { + "eval_loss": 1.9724633693695068, + "eval_runtime": 9.3608, + "eval_samples_per_second": 218.784, + "eval_steps_per_second": 27.348, + "epoch": 1.0, + "step": 15617 + }, + { + "train_runtime": 0.0087, + "train_samples_per_second": 461932896.212, + "train_steps_per_second": 1804425.376, + "total_flos": 5.2748789856731136e+17, + "train_loss": 0.0, + "epoch": 1.0, + "step": 15617 + }, + { + "eval_loss": 1.9763054847717285, + "eval_runtime": 13.0331, + "eval_samples_per_second": 157.138, + "eval_steps_per_second": 19.642, + "epoch": 1.0, + "step": 15617 + } + ], + "best_metric": null, + "best_model_checkpoint": null, + "is_local_process_zero": true, + "is_world_process_zero": true, + "is_hyper_param_search": false, + "trial_name": null, + "trial_params": null, + "stateful_callbacks": { + "TrainerControl": { + "args": { + "should_training_stop": false, + "should_epoch_stop": false, + "should_save": false, + "should_evaluate": false, + "should_log": false + }, + "attributes": {} + } + } +} \ No newline at end of file