vila-ewm-qwen2-1.5b / trainer_state.json
Seerkfang's picture
Upload files with huggingface_hub
827be9e verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 3.710144927536232,
"eval_steps": 500,
"global_step": 1024,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0,
"grad_norm": 46.72853930172847,
"learning_rate": 4.0000000000000003e-07,
"loss": 0.6225,
"step": 1
},
{
"epoch": 0.01,
"grad_norm": 52.07096639492344,
"learning_rate": 8.000000000000001e-07,
"loss": 0.649,
"step": 2
},
{
"epoch": 0.01,
"grad_norm": 53.48678929212073,
"learning_rate": 1.2000000000000002e-06,
"loss": 0.6198,
"step": 3
},
{
"epoch": 0.01,
"grad_norm": 40.93037505319626,
"learning_rate": 1.6000000000000001e-06,
"loss": 0.5608,
"step": 4
},
{
"epoch": 0.02,
"grad_norm": 52.14947742859088,
"learning_rate": 2.0000000000000003e-06,
"loss": 0.5664,
"step": 5
},
{
"epoch": 0.02,
"grad_norm": 39.68797135248386,
"learning_rate": 2.4000000000000003e-06,
"loss": 0.5766,
"step": 6
},
{
"epoch": 0.03,
"grad_norm": 27.608786716750426,
"learning_rate": 2.8000000000000003e-06,
"loss": 0.3961,
"step": 7
},
{
"epoch": 0.03,
"grad_norm": 5.843193562709772,
"learning_rate": 3.2000000000000003e-06,
"loss": 0.2716,
"step": 8
},
{
"epoch": 0.03,
"grad_norm": 5.204064082882707,
"learning_rate": 3.6000000000000003e-06,
"loss": 0.2625,
"step": 9
},
{
"epoch": 0.04,
"grad_norm": 7.24061080880167,
"learning_rate": 4.000000000000001e-06,
"loss": 0.2859,
"step": 10
},
{
"epoch": 0.04,
"grad_norm": 4.127471947114424,
"learning_rate": 4.4e-06,
"loss": 0.2321,
"step": 11
},
{
"epoch": 0.04,
"grad_norm": 5.14969705214205,
"learning_rate": 4.800000000000001e-06,
"loss": 0.223,
"step": 12
},
{
"epoch": 0.05,
"grad_norm": 7.332899725870617,
"learning_rate": 5.2e-06,
"loss": 0.25,
"step": 13
},
{
"epoch": 0.05,
"grad_norm": 3.231171781798085,
"learning_rate": 5.600000000000001e-06,
"loss": 0.2154,
"step": 14
},
{
"epoch": 0.05,
"grad_norm": 2.5854830947431044,
"learning_rate": 6e-06,
"loss": 0.2381,
"step": 15
},
{
"epoch": 0.06,
"grad_norm": 9.840138857999184,
"learning_rate": 6.4000000000000006e-06,
"loss": 0.1981,
"step": 16
},
{
"epoch": 0.06,
"grad_norm": 8.377814173650734,
"learning_rate": 6.800000000000001e-06,
"loss": 0.2692,
"step": 17
},
{
"epoch": 0.07,
"grad_norm": 12.205565266596127,
"learning_rate": 7.2000000000000005e-06,
"loss": 0.2197,
"step": 18
},
{
"epoch": 0.07,
"grad_norm": 7.0540544766233415,
"learning_rate": 7.600000000000001e-06,
"loss": 0.2349,
"step": 19
},
{
"epoch": 0.07,
"grad_norm": 11.65295971356695,
"learning_rate": 8.000000000000001e-06,
"loss": 0.3208,
"step": 20
},
{
"epoch": 0.08,
"grad_norm": 9.52181319610902,
"learning_rate": 8.400000000000001e-06,
"loss": 0.2307,
"step": 21
},
{
"epoch": 0.08,
"grad_norm": 2.482819938202367,
"learning_rate": 8.8e-06,
"loss": 0.2024,
"step": 22
},
{
"epoch": 0.08,
"grad_norm": 4.609864061676336,
"learning_rate": 9.200000000000002e-06,
"loss": 0.2463,
"step": 23
},
{
"epoch": 0.09,
"grad_norm": 4.245726835362501,
"learning_rate": 9.600000000000001e-06,
"loss": 0.2328,
"step": 24
},
{
"epoch": 0.09,
"grad_norm": 6.138546706803432,
"learning_rate": 1e-05,
"loss": 0.2496,
"step": 25
},
{
"epoch": 0.09,
"grad_norm": 4.781197388695438,
"learning_rate": 1.04e-05,
"loss": 0.2714,
"step": 26
},
{
"epoch": 0.1,
"grad_norm": 4.108329488753126,
"learning_rate": 1.0800000000000002e-05,
"loss": 0.2507,
"step": 27
},
{
"epoch": 0.1,
"grad_norm": 3.800437254625411,
"learning_rate": 1.1200000000000001e-05,
"loss": 0.2105,
"step": 28
},
{
"epoch": 0.11,
"grad_norm": 1.649524899716385,
"learning_rate": 1.16e-05,
"loss": 0.2352,
"step": 29
},
{
"epoch": 0.11,
"grad_norm": 2.3212480454181823,
"learning_rate": 1.2e-05,
"loss": 0.2121,
"step": 30
},
{
"epoch": 0.11,
"grad_norm": 1.8259124298359497,
"learning_rate": 1.2400000000000002e-05,
"loss": 0.2102,
"step": 31
},
{
"epoch": 0.12,
"grad_norm": 8.370375637151021,
"learning_rate": 1.2800000000000001e-05,
"loss": 0.2299,
"step": 32
},
{
"epoch": 0.12,
"grad_norm": 3.8976045378474216,
"learning_rate": 1.3200000000000002e-05,
"loss": 0.1925,
"step": 33
},
{
"epoch": 0.12,
"grad_norm": 4.546522390784352,
"learning_rate": 1.3600000000000002e-05,
"loss": 0.221,
"step": 34
},
{
"epoch": 0.13,
"grad_norm": 2.0450418382891864,
"learning_rate": 1.4e-05,
"loss": 0.2493,
"step": 35
},
{
"epoch": 0.13,
"grad_norm": 2.689732934577616,
"learning_rate": 1.4400000000000001e-05,
"loss": 0.2286,
"step": 36
},
{
"epoch": 0.13,
"grad_norm": 2.0181283532549883,
"learning_rate": 1.48e-05,
"loss": 0.2058,
"step": 37
},
{
"epoch": 0.14,
"grad_norm": 3.0290096617307323,
"learning_rate": 1.5200000000000002e-05,
"loss": 0.2133,
"step": 38
},
{
"epoch": 0.14,
"grad_norm": 5.3763932465021425,
"learning_rate": 1.5600000000000003e-05,
"loss": 0.2596,
"step": 39
},
{
"epoch": 0.14,
"grad_norm": 1.4603343566227587,
"learning_rate": 1.6000000000000003e-05,
"loss": 0.2122,
"step": 40
},
{
"epoch": 0.15,
"grad_norm": 3.332369451595958,
"learning_rate": 1.64e-05,
"loss": 0.2208,
"step": 41
},
{
"epoch": 0.15,
"grad_norm": 3.7861266281997907,
"learning_rate": 1.6800000000000002e-05,
"loss": 0.2456,
"step": 42
},
{
"epoch": 0.16,
"grad_norm": 3.7225805400820935,
"learning_rate": 1.72e-05,
"loss": 0.2465,
"step": 43
},
{
"epoch": 0.16,
"grad_norm": 1.9056402543182682,
"learning_rate": 1.76e-05,
"loss": 0.22,
"step": 44
},
{
"epoch": 0.16,
"grad_norm": 1.8766590187035856,
"learning_rate": 1.8e-05,
"loss": 0.1729,
"step": 45
},
{
"epoch": 0.17,
"grad_norm": 7.788754664844443,
"learning_rate": 1.8400000000000003e-05,
"loss": 0.2463,
"step": 46
},
{
"epoch": 0.17,
"grad_norm": 3.44553452940045,
"learning_rate": 1.88e-05,
"loss": 0.1861,
"step": 47
},
{
"epoch": 0.17,
"grad_norm": 3.0748717088951483,
"learning_rate": 1.9200000000000003e-05,
"loss": 0.2392,
"step": 48
},
{
"epoch": 0.18,
"grad_norm": 3.1429857670987684,
"learning_rate": 1.9600000000000002e-05,
"loss": 0.196,
"step": 49
},
{
"epoch": 0.18,
"grad_norm": 2.6694741818907692,
"learning_rate": 2e-05,
"loss": 0.1938,
"step": 50
},
{
"epoch": 0.18,
"grad_norm": 3.075853192374163,
"learning_rate": 1.9999980867200107e-05,
"loss": 0.2176,
"step": 51
},
{
"epoch": 0.19,
"grad_norm": 2.5881096938435726,
"learning_rate": 1.9999923468873635e-05,
"loss": 0.2713,
"step": 52
},
{
"epoch": 0.19,
"grad_norm": 2.041830913696081,
"learning_rate": 1.9999827805240225e-05,
"loss": 0.2162,
"step": 53
},
{
"epoch": 0.2,
"grad_norm": 1.5182608103998274,
"learning_rate": 1.999969387666594e-05,
"loss": 0.1877,
"step": 54
},
{
"epoch": 0.2,
"grad_norm": 2.300761589056313,
"learning_rate": 1.9999521683663265e-05,
"loss": 0.2189,
"step": 55
},
{
"epoch": 0.2,
"grad_norm": 1.8419792972213038,
"learning_rate": 1.9999311226891104e-05,
"loss": 0.2094,
"step": 56
},
{
"epoch": 0.21,
"grad_norm": 3.219928784202957,
"learning_rate": 1.9999062507154786e-05,
"loss": 0.2236,
"step": 57
},
{
"epoch": 0.21,
"grad_norm": 4.091917760353657,
"learning_rate": 1.999877552540605e-05,
"loss": 0.1977,
"step": 58
},
{
"epoch": 0.21,
"grad_norm": 3.3233992848215195,
"learning_rate": 1.9998450282743055e-05,
"loss": 0.2333,
"step": 59
},
{
"epoch": 0.22,
"grad_norm": 2.964219225360724,
"learning_rate": 1.9998086780410353e-05,
"loss": 0.2208,
"step": 60
},
{
"epoch": 0.22,
"grad_norm": 4.219456222024613,
"learning_rate": 1.9997685019798913e-05,
"loss": 0.2002,
"step": 61
},
{
"epoch": 0.22,
"grad_norm": 4.084361648388864,
"learning_rate": 1.999724500244609e-05,
"loss": 0.2443,
"step": 62
},
{
"epoch": 0.23,
"grad_norm": 5.875576520826541,
"learning_rate": 1.9996766730035642e-05,
"loss": 0.2263,
"step": 63
},
{
"epoch": 0.23,
"grad_norm": 1.803152179024348,
"learning_rate": 1.999625020439771e-05,
"loss": 0.1829,
"step": 64
},
{
"epoch": 0.24,
"grad_norm": 5.487803901369769,
"learning_rate": 1.99956954275088e-05,
"loss": 0.247,
"step": 65
},
{
"epoch": 0.24,
"grad_norm": 1.330507485677968,
"learning_rate": 1.999510240149181e-05,
"loss": 0.1726,
"step": 66
},
{
"epoch": 0.24,
"grad_norm": 6.750879407303498,
"learning_rate": 1.9994471128615984e-05,
"loss": 0.2313,
"step": 67
},
{
"epoch": 0.25,
"grad_norm": 4.390482378065639,
"learning_rate": 1.9993801611296923e-05,
"loss": 0.193,
"step": 68
},
{
"epoch": 0.25,
"grad_norm": 2.192715936136871,
"learning_rate": 1.9993093852096582e-05,
"loss": 0.1794,
"step": 69
},
{
"epoch": 0.25,
"grad_norm": 3.367128095928201,
"learning_rate": 1.999234785372324e-05,
"loss": 0.2131,
"step": 70
},
{
"epoch": 0.26,
"grad_norm": 4.050893259478687,
"learning_rate": 1.9991563619031508e-05,
"loss": 0.1899,
"step": 71
},
{
"epoch": 0.26,
"grad_norm": 2.120204599576964,
"learning_rate": 1.9990741151022302e-05,
"loss": 0.2087,
"step": 72
},
{
"epoch": 0.26,
"grad_norm": 4.29023535186417,
"learning_rate": 1.9989880452842847e-05,
"loss": 0.1858,
"step": 73
},
{
"epoch": 0.27,
"grad_norm": 4.564103746693168,
"learning_rate": 1.9988981527786656e-05,
"loss": 0.1837,
"step": 74
},
{
"epoch": 0.27,
"grad_norm": 1.468247385963039,
"learning_rate": 1.9988044379293523e-05,
"loss": 0.2097,
"step": 75
},
{
"epoch": 0.28,
"grad_norm": 6.339814904044548,
"learning_rate": 1.99870690109495e-05,
"loss": 0.1997,
"step": 76
},
{
"epoch": 0.28,
"grad_norm": 2.753844502932187,
"learning_rate": 1.9986055426486888e-05,
"loss": 0.199,
"step": 77
},
{
"epoch": 0.28,
"grad_norm": 8.614473813172529,
"learning_rate": 1.9985003629784237e-05,
"loss": 0.2653,
"step": 78
},
{
"epoch": 0.29,
"grad_norm": 6.272374952434112,
"learning_rate": 1.9983913624866304e-05,
"loss": 0.1559,
"step": 79
},
{
"epoch": 0.29,
"grad_norm": 2.4341615932729357,
"learning_rate": 1.9982785415904063e-05,
"loss": 0.1937,
"step": 80
},
{
"epoch": 0.29,
"grad_norm": 5.231118772027732,
"learning_rate": 1.9981619007214675e-05,
"loss": 0.2214,
"step": 81
},
{
"epoch": 0.3,
"grad_norm": 13.460575468938803,
"learning_rate": 1.998041440326146e-05,
"loss": 0.2901,
"step": 82
},
{
"epoch": 0.3,
"grad_norm": 2.3535481254926514,
"learning_rate": 1.9979171608653926e-05,
"loss": 0.1895,
"step": 83
},
{
"epoch": 0.3,
"grad_norm": 5.095485056227556,
"learning_rate": 1.9977890628147684e-05,
"loss": 0.2148,
"step": 84
},
{
"epoch": 0.31,
"grad_norm": 11.074389998820713,
"learning_rate": 1.9976571466644493e-05,
"loss": 0.2083,
"step": 85
},
{
"epoch": 0.31,
"grad_norm": 7.453386007839937,
"learning_rate": 1.99752141291922e-05,
"loss": 0.2142,
"step": 86
},
{
"epoch": 0.32,
"grad_norm": 6.1609452514142085,
"learning_rate": 1.9973818620984737e-05,
"loss": 0.1934,
"step": 87
},
{
"epoch": 0.32,
"grad_norm": 4.1487715446979685,
"learning_rate": 1.99723849473621e-05,
"loss": 0.2345,
"step": 88
},
{
"epoch": 0.32,
"grad_norm": 3.2634858893508945,
"learning_rate": 1.9970913113810336e-05,
"loss": 0.1982,
"step": 89
},
{
"epoch": 0.33,
"grad_norm": 6.523213533930424,
"learning_rate": 1.996940312596149e-05,
"loss": 0.2248,
"step": 90
},
{
"epoch": 0.33,
"grad_norm": 5.773562252049304,
"learning_rate": 1.9967854989593634e-05,
"loss": 0.213,
"step": 91
},
{
"epoch": 0.33,
"grad_norm": 7.830843979970007,
"learning_rate": 1.9966268710630795e-05,
"loss": 0.2478,
"step": 92
},
{
"epoch": 0.34,
"grad_norm": 3.636412312338061,
"learning_rate": 1.996464429514297e-05,
"loss": 0.2259,
"step": 93
},
{
"epoch": 0.34,
"grad_norm": 8.644965516136683,
"learning_rate": 1.996298174934608e-05,
"loss": 0.1852,
"step": 94
},
{
"epoch": 0.34,
"grad_norm": 8.186163466631733,
"learning_rate": 1.9961281079601957e-05,
"loss": 0.2509,
"step": 95
},
{
"epoch": 0.35,
"grad_norm": 9.540579065302634,
"learning_rate": 1.9959542292418317e-05,
"loss": 0.199,
"step": 96
},
{
"epoch": 0.35,
"grad_norm": 1.8497364574995268,
"learning_rate": 1.995776539444873e-05,
"loss": 0.2237,
"step": 97
},
{
"epoch": 0.36,
"grad_norm": 2.3100586831605106,
"learning_rate": 1.9955950392492604e-05,
"loss": 0.2275,
"step": 98
},
{
"epoch": 0.36,
"grad_norm": 6.974566132033935,
"learning_rate": 1.9954097293495155e-05,
"loss": 0.2426,
"step": 99
},
{
"epoch": 0.36,
"grad_norm": 4.945067016519828,
"learning_rate": 1.9952206104547378e-05,
"loss": 0.212,
"step": 100
},
{
"epoch": 0.37,
"grad_norm": 5.347489899552784,
"learning_rate": 1.995027683288602e-05,
"loss": 0.1971,
"step": 101
},
{
"epoch": 0.37,
"grad_norm": 5.1269065904933235,
"learning_rate": 1.994830948589355e-05,
"loss": 0.2092,
"step": 102
},
{
"epoch": 0.37,
"grad_norm": 2.4384730018546996,
"learning_rate": 1.9946304071098143e-05,
"loss": 0.203,
"step": 103
},
{
"epoch": 0.38,
"grad_norm": 1.3363092433164303,
"learning_rate": 1.9944260596173642e-05,
"loss": 0.2092,
"step": 104
},
{
"epoch": 0.38,
"grad_norm": 2.4273522949310387,
"learning_rate": 1.994217906893952e-05,
"loss": 0.1938,
"step": 105
},
{
"epoch": 0.38,
"grad_norm": 5.938357565849595,
"learning_rate": 1.9940059497360874e-05,
"loss": 0.1935,
"step": 106
},
{
"epoch": 0.39,
"grad_norm": 5.912638673049391,
"learning_rate": 1.993790188954836e-05,
"loss": 0.2313,
"step": 107
},
{
"epoch": 0.39,
"grad_norm": 1.4222878204576312,
"learning_rate": 1.9935706253758206e-05,
"loss": 0.2192,
"step": 108
},
{
"epoch": 0.39,
"grad_norm": 1.4706157047642812,
"learning_rate": 1.993347259839214e-05,
"loss": 0.2038,
"step": 109
},
{
"epoch": 0.4,
"grad_norm": 2.552023589176243,
"learning_rate": 1.9931200931997372e-05,
"loss": 0.1944,
"step": 110
},
{
"epoch": 0.4,
"grad_norm": 2.6398901043384204,
"learning_rate": 1.9928891263266578e-05,
"loss": 0.1801,
"step": 111
},
{
"epoch": 0.41,
"grad_norm": 2.5704568250317315,
"learning_rate": 1.9926543601037843e-05,
"loss": 0.2268,
"step": 112
},
{
"epoch": 0.41,
"grad_norm": 3.1956913784627607,
"learning_rate": 1.992415795429463e-05,
"loss": 0.2252,
"step": 113
},
{
"epoch": 0.41,
"grad_norm": 2.5341556196077897,
"learning_rate": 1.992173433216577e-05,
"loss": 0.1716,
"step": 114
},
{
"epoch": 0.42,
"grad_norm": 2.9626316933204295,
"learning_rate": 1.9919272743925386e-05,
"loss": 0.187,
"step": 115
},
{
"epoch": 0.42,
"grad_norm": 3.7049021117634253,
"learning_rate": 1.99167731989929e-05,
"loss": 0.188,
"step": 116
},
{
"epoch": 0.42,
"grad_norm": 4.613297684884965,
"learning_rate": 1.9914235706932975e-05,
"loss": 0.1915,
"step": 117
},
{
"epoch": 0.43,
"grad_norm": 2.538398454776101,
"learning_rate": 1.9911660277455473e-05,
"loss": 0.1815,
"step": 118
},
{
"epoch": 0.43,
"grad_norm": 1.6517117154934629,
"learning_rate": 1.990904692041542e-05,
"loss": 0.2048,
"step": 119
},
{
"epoch": 0.43,
"grad_norm": 5.519157487836765,
"learning_rate": 1.9906395645813e-05,
"loss": 0.2435,
"step": 120
},
{
"epoch": 0.44,
"grad_norm": 3.551984088038686,
"learning_rate": 1.9903706463793463e-05,
"loss": 0.2155,
"step": 121
},
{
"epoch": 0.44,
"grad_norm": 2.2040044547253967,
"learning_rate": 1.990097938464713e-05,
"loss": 0.2238,
"step": 122
},
{
"epoch": 0.45,
"grad_norm": 1.410778671062537,
"learning_rate": 1.989821441880933e-05,
"loss": 0.1958,
"step": 123
},
{
"epoch": 0.45,
"grad_norm": 1.19869128373236,
"learning_rate": 1.989541157686037e-05,
"loss": 0.2028,
"step": 124
},
{
"epoch": 0.45,
"grad_norm": 1.536087636301242,
"learning_rate": 1.9892570869525495e-05,
"loss": 0.2,
"step": 125
},
{
"epoch": 0.46,
"grad_norm": 3.2379922220159085,
"learning_rate": 1.9889692307674847e-05,
"loss": 0.1823,
"step": 126
},
{
"epoch": 0.46,
"grad_norm": 2.7508465935428106,
"learning_rate": 1.9886775902323405e-05,
"loss": 0.2064,
"step": 127
},
{
"epoch": 0.46,
"grad_norm": 1.6835865398958427,
"learning_rate": 1.9883821664630977e-05,
"loss": 0.2229,
"step": 128
},
{
"epoch": 0.47,
"grad_norm": 3.0100393765483524,
"learning_rate": 1.988082960590213e-05,
"loss": 0.1546,
"step": 129
},
{
"epoch": 0.47,
"grad_norm": 2.3477760338260234,
"learning_rate": 1.987779973758615e-05,
"loss": 0.1789,
"step": 130
},
{
"epoch": 0.47,
"grad_norm": 5.458849610334373,
"learning_rate": 1.9874732071277015e-05,
"loss": 0.2424,
"step": 131
},
{
"epoch": 0.48,
"grad_norm": 1.3606318777601056,
"learning_rate": 1.987162661871333e-05,
"loss": 0.1741,
"step": 132
},
{
"epoch": 0.48,
"grad_norm": 2.909771599382989,
"learning_rate": 1.9868483391778303e-05,
"loss": 0.1713,
"step": 133
},
{
"epoch": 0.49,
"grad_norm": 2.139911494220399,
"learning_rate": 1.986530240249968e-05,
"loss": 0.1794,
"step": 134
},
{
"epoch": 0.49,
"grad_norm": 1.9818599307655007,
"learning_rate": 1.9862083663049698e-05,
"loss": 0.2265,
"step": 135
},
{
"epoch": 0.49,
"grad_norm": 1.8642280226326846,
"learning_rate": 1.985882718574506e-05,
"loss": 0.2317,
"step": 136
},
{
"epoch": 0.5,
"grad_norm": 1.4019059864946621,
"learning_rate": 1.9855532983046876e-05,
"loss": 0.1804,
"step": 137
},
{
"epoch": 0.5,
"grad_norm": 3.937781886771039,
"learning_rate": 1.9852201067560607e-05,
"loss": 0.1904,
"step": 138
},
{
"epoch": 0.5,
"grad_norm": 1.3537933164261284,
"learning_rate": 1.984883145203603e-05,
"loss": 0.1719,
"step": 139
},
{
"epoch": 0.51,
"grad_norm": 4.953870998813679,
"learning_rate": 1.984542414936718e-05,
"loss": 0.2321,
"step": 140
},
{
"epoch": 0.51,
"grad_norm": 3.778644025099447,
"learning_rate": 1.98419791725923e-05,
"loss": 0.2287,
"step": 141
},
{
"epoch": 0.51,
"grad_norm": 1.0978810240658883,
"learning_rate": 1.9838496534893807e-05,
"loss": 0.1946,
"step": 142
},
{
"epoch": 0.52,
"grad_norm": 2.4833931529998075,
"learning_rate": 1.983497624959822e-05,
"loss": 0.2447,
"step": 143
},
{
"epoch": 0.52,
"grad_norm": 0.9962594108181742,
"learning_rate": 1.9831418330176127e-05,
"loss": 0.1613,
"step": 144
},
{
"epoch": 0.53,
"grad_norm": 2.1099326800106075,
"learning_rate": 1.982782279024211e-05,
"loss": 0.1758,
"step": 145
},
{
"epoch": 0.53,
"grad_norm": 0.9917248634127067,
"learning_rate": 1.9824189643554724e-05,
"loss": 0.1832,
"step": 146
},
{
"epoch": 0.53,
"grad_norm": 1.9093376030904632,
"learning_rate": 1.9820518904016425e-05,
"loss": 0.2125,
"step": 147
},
{
"epoch": 0.54,
"grad_norm": 3.8284205946412198,
"learning_rate": 1.9816810585673515e-05,
"loss": 0.2274,
"step": 148
},
{
"epoch": 0.54,
"grad_norm": 1.3817574265830486,
"learning_rate": 1.9813064702716094e-05,
"loss": 0.1811,
"step": 149
},
{
"epoch": 0.54,
"grad_norm": 6.411906280970247,
"learning_rate": 1.9809281269478015e-05,
"loss": 0.1909,
"step": 150
},
{
"epoch": 0.55,
"grad_norm": 3.074349965257892,
"learning_rate": 1.9805460300436805e-05,
"loss": 0.2112,
"step": 151
},
{
"epoch": 0.55,
"grad_norm": 2.3421132125177087,
"learning_rate": 1.9801601810213634e-05,
"loss": 0.2584,
"step": 152
},
{
"epoch": 0.55,
"grad_norm": 1.4208905656674993,
"learning_rate": 1.9797705813573247e-05,
"loss": 0.1921,
"step": 153
},
{
"epoch": 0.56,
"grad_norm": 2.1939035088667658,
"learning_rate": 1.979377232542391e-05,
"loss": 0.1541,
"step": 154
},
{
"epoch": 0.56,
"grad_norm": 4.2384977607692,
"learning_rate": 1.9789801360817348e-05,
"loss": 0.1978,
"step": 155
},
{
"epoch": 0.57,
"grad_norm": 5.4439444275988365,
"learning_rate": 1.9785792934948697e-05,
"loss": 0.2153,
"step": 156
},
{
"epoch": 0.57,
"grad_norm": 1.2451798232206324,
"learning_rate": 1.9781747063156436e-05,
"loss": 0.177,
"step": 157
},
{
"epoch": 0.57,
"grad_norm": 2.967272591808535,
"learning_rate": 1.9777663760922342e-05,
"loss": 0.192,
"step": 158
},
{
"epoch": 0.58,
"grad_norm": 6.104903104955908,
"learning_rate": 1.977354304387141e-05,
"loss": 0.2066,
"step": 159
},
{
"epoch": 0.58,
"grad_norm": 1.8069115347552653,
"learning_rate": 1.976938492777182e-05,
"loss": 0.1982,
"step": 160
},
{
"epoch": 0.58,
"grad_norm": 2.7788359549191153,
"learning_rate": 1.976518942853484e-05,
"loss": 0.189,
"step": 161
},
{
"epoch": 0.59,
"grad_norm": 2.211094265354669,
"learning_rate": 1.9760956562214808e-05,
"loss": 0.1832,
"step": 162
},
{
"epoch": 0.59,
"grad_norm": 1.5547979175056448,
"learning_rate": 1.975668634500904e-05,
"loss": 0.1643,
"step": 163
},
{
"epoch": 0.59,
"grad_norm": 2.3883926579039336,
"learning_rate": 1.9752378793257777e-05,
"loss": 0.2382,
"step": 164
},
{
"epoch": 0.6,
"grad_norm": 1.7224510071144223,
"learning_rate": 1.9748033923444123e-05,
"loss": 0.1834,
"step": 165
},
{
"epoch": 0.6,
"grad_norm": 4.561364149201268,
"learning_rate": 1.9743651752193983e-05,
"loss": 0.2058,
"step": 166
},
{
"epoch": 0.61,
"grad_norm": 1.3829318882111994,
"learning_rate": 1.9739232296276004e-05,
"loss": 0.2104,
"step": 167
},
{
"epoch": 0.61,
"grad_norm": 1.4674065600010064,
"learning_rate": 1.9734775572601487e-05,
"loss": 0.1665,
"step": 168
},
{
"epoch": 0.61,
"grad_norm": 1.8431949486041035,
"learning_rate": 1.9730281598224366e-05,
"loss": 0.1685,
"step": 169
},
{
"epoch": 0.62,
"grad_norm": 3.7012831942032336,
"learning_rate": 1.9725750390341093e-05,
"loss": 0.2276,
"step": 170
},
{
"epoch": 0.62,
"grad_norm": 1.824165248665086,
"learning_rate": 1.9721181966290614e-05,
"loss": 0.1914,
"step": 171
},
{
"epoch": 0.62,
"grad_norm": 2.0423081016544904,
"learning_rate": 1.9716576343554274e-05,
"loss": 0.1748,
"step": 172
},
{
"epoch": 0.63,
"grad_norm": 1.879666187913567,
"learning_rate": 1.9711933539755764e-05,
"loss": 0.1746,
"step": 173
},
{
"epoch": 0.63,
"grad_norm": 2.6209642404840574,
"learning_rate": 1.9707253572661057e-05,
"loss": 0.1475,
"step": 174
},
{
"epoch": 0.63,
"grad_norm": 1.8343240393881313,
"learning_rate": 1.9702536460178318e-05,
"loss": 0.1903,
"step": 175
},
{
"epoch": 0.64,
"grad_norm": 1.513338761986029,
"learning_rate": 1.969778222035787e-05,
"loss": 0.1733,
"step": 176
},
{
"epoch": 0.64,
"grad_norm": 3.681664720505642,
"learning_rate": 1.969299087139209e-05,
"loss": 0.1774,
"step": 177
},
{
"epoch": 0.64,
"grad_norm": 2.371858975566763,
"learning_rate": 1.9688162431615367e-05,
"loss": 0.2187,
"step": 178
},
{
"epoch": 0.65,
"grad_norm": 3.1607291066632204,
"learning_rate": 1.9683296919504013e-05,
"loss": 0.2059,
"step": 179
},
{
"epoch": 0.65,
"grad_norm": 5.2653264740906485,
"learning_rate": 1.9678394353676203e-05,
"loss": 0.1524,
"step": 180
},
{
"epoch": 0.66,
"grad_norm": 2.3029091047043004,
"learning_rate": 1.9673454752891898e-05,
"loss": 0.159,
"step": 181
},
{
"epoch": 0.66,
"grad_norm": 1.6003395131617928,
"learning_rate": 1.9668478136052776e-05,
"loss": 0.1665,
"step": 182
},
{
"epoch": 0.66,
"grad_norm": 4.366100268481903,
"learning_rate": 1.9663464522202162e-05,
"loss": 0.162,
"step": 183
},
{
"epoch": 0.67,
"grad_norm": 4.787342810380197,
"learning_rate": 1.9658413930524955e-05,
"loss": 0.1807,
"step": 184
},
{
"epoch": 0.67,
"grad_norm": 2.349030904122291,
"learning_rate": 1.9653326380347532e-05,
"loss": 0.2119,
"step": 185
},
{
"epoch": 0.67,
"grad_norm": 2.6889625324614177,
"learning_rate": 1.9648201891137725e-05,
"loss": 0.1599,
"step": 186
},
{
"epoch": 0.68,
"grad_norm": 3.1263858368560586,
"learning_rate": 1.964304048250469e-05,
"loss": 0.1907,
"step": 187
},
{
"epoch": 0.68,
"grad_norm": 5.597521679201683,
"learning_rate": 1.963784217419887e-05,
"loss": 0.1747,
"step": 188
},
{
"epoch": 0.68,
"grad_norm": 2.3137721457903386,
"learning_rate": 1.96326069861119e-05,
"loss": 0.2071,
"step": 189
},
{
"epoch": 0.69,
"grad_norm": 2.4519505366650374,
"learning_rate": 1.9627334938276547e-05,
"loss": 0.1674,
"step": 190
},
{
"epoch": 0.69,
"grad_norm": 1.1148683625073919,
"learning_rate": 1.9622026050866613e-05,
"loss": 0.1467,
"step": 191
},
{
"epoch": 0.7,
"grad_norm": 3.2690170842798527,
"learning_rate": 1.961668034419688e-05,
"loss": 0.2129,
"step": 192
},
{
"epoch": 0.7,
"grad_norm": 2.619889210397982,
"learning_rate": 1.961129783872301e-05,
"loss": 0.1622,
"step": 193
},
{
"epoch": 0.7,
"grad_norm": 1.9136904827036365,
"learning_rate": 1.9605878555041484e-05,
"loss": 0.2006,
"step": 194
},
{
"epoch": 0.71,
"grad_norm": 1.20031747350585,
"learning_rate": 1.9600422513889515e-05,
"loss": 0.1566,
"step": 195
},
{
"epoch": 0.71,
"grad_norm": 2.7166231249244475,
"learning_rate": 1.9594929736144978e-05,
"loss": 0.2185,
"step": 196
},
{
"epoch": 0.71,
"grad_norm": 3.795589774468367,
"learning_rate": 1.9589400242826307e-05,
"loss": 0.1703,
"step": 197
},
{
"epoch": 0.72,
"grad_norm": 2.5740329013856154,
"learning_rate": 1.9583834055092446e-05,
"loss": 0.2231,
"step": 198
},
{
"epoch": 0.72,
"grad_norm": 3.5969245391305824,
"learning_rate": 1.9578231194242744e-05,
"loss": 0.183,
"step": 199
},
{
"epoch": 0.72,
"grad_norm": 1.0963714016675181,
"learning_rate": 1.9572591681716888e-05,
"loss": 0.175,
"step": 200
},
{
"epoch": 0.73,
"grad_norm": 3.1527309757053024,
"learning_rate": 1.9566915539094805e-05,
"loss": 0.1612,
"step": 201
},
{
"epoch": 0.73,
"grad_norm": 6.11802400882258,
"learning_rate": 1.95612027880966e-05,
"loss": 0.1739,
"step": 202
},
{
"epoch": 0.74,
"grad_norm": 2.1003663250353997,
"learning_rate": 1.9555453450582453e-05,
"loss": 0.24,
"step": 203
},
{
"epoch": 0.74,
"grad_norm": 3.1350893178704067,
"learning_rate": 1.9549667548552557e-05,
"loss": 0.2067,
"step": 204
},
{
"epoch": 0.74,
"grad_norm": 2.4276457497833874,
"learning_rate": 1.9543845104147e-05,
"loss": 0.1765,
"step": 205
},
{
"epoch": 0.75,
"grad_norm": 10.165213308146818,
"learning_rate": 1.9537986139645724e-05,
"loss": 0.223,
"step": 206
},
{
"epoch": 0.75,
"grad_norm": 5.441589230972257,
"learning_rate": 1.9532090677468408e-05,
"loss": 0.2,
"step": 207
},
{
"epoch": 0.75,
"grad_norm": 2.18549945843444,
"learning_rate": 1.9526158740174392e-05,
"loss": 0.1921,
"step": 208
},
{
"epoch": 0.76,
"grad_norm": 2.9825782386080286,
"learning_rate": 1.9520190350462585e-05,
"loss": 0.2034,
"step": 209
},
{
"epoch": 0.76,
"grad_norm": 3.3463873892037093,
"learning_rate": 1.951418553117139e-05,
"loss": 0.1935,
"step": 210
},
{
"epoch": 0.76,
"grad_norm": 5.385084120106734,
"learning_rate": 1.950814430527861e-05,
"loss": 0.2351,
"step": 211
},
{
"epoch": 0.77,
"grad_norm": 2.8919707500813345,
"learning_rate": 1.950206669590136e-05,
"loss": 0.1415,
"step": 212
},
{
"epoch": 0.77,
"grad_norm": 2.3002484179864218,
"learning_rate": 1.949595272629597e-05,
"loss": 0.2166,
"step": 213
},
{
"epoch": 0.78,
"grad_norm": 1.1625160748683374,
"learning_rate": 1.9489802419857918e-05,
"loss": 0.1766,
"step": 214
},
{
"epoch": 0.78,
"grad_norm": 3.619701192742547,
"learning_rate": 1.9483615800121717e-05,
"loss": 0.1698,
"step": 215
},
{
"epoch": 0.78,
"grad_norm": 6.1829379252643495,
"learning_rate": 1.947739289076084e-05,
"loss": 0.142,
"step": 216
},
{
"epoch": 0.79,
"grad_norm": 2.766727320600325,
"learning_rate": 1.947113371558762e-05,
"loss": 0.1819,
"step": 217
},
{
"epoch": 0.79,
"grad_norm": 8.399502285949815,
"learning_rate": 1.9464838298553172e-05,
"loss": 0.2344,
"step": 218
},
{
"epoch": 0.79,
"grad_norm": 5.110766225428027,
"learning_rate": 1.9458506663747286e-05,
"loss": 0.2011,
"step": 219
},
{
"epoch": 0.8,
"grad_norm": 4.051592837631265,
"learning_rate": 1.9452138835398333e-05,
"loss": 0.1914,
"step": 220
},
{
"epoch": 0.8,
"grad_norm": 2.3725438569751955,
"learning_rate": 1.9445734837873203e-05,
"loss": 0.1765,
"step": 221
},
{
"epoch": 0.8,
"grad_norm": 2.1705247736939817,
"learning_rate": 1.9439294695677168e-05,
"loss": 0.1807,
"step": 222
},
{
"epoch": 0.81,
"grad_norm": 3.3017606951651333,
"learning_rate": 1.943281843345382e-05,
"loss": 0.1905,
"step": 223
},
{
"epoch": 0.81,
"grad_norm": 1.4309126557975618,
"learning_rate": 1.9426306075984968e-05,
"loss": 0.1944,
"step": 224
},
{
"epoch": 0.82,
"grad_norm": 1.9415676897308591,
"learning_rate": 1.9419757648190532e-05,
"loss": 0.1838,
"step": 225
},
{
"epoch": 0.82,
"grad_norm": 1.3799577712400077,
"learning_rate": 1.9413173175128472e-05,
"loss": 0.1506,
"step": 226
},
{
"epoch": 0.82,
"grad_norm": 3.270902216094506,
"learning_rate": 1.9406552681994663e-05,
"loss": 0.1633,
"step": 227
},
{
"epoch": 0.83,
"grad_norm": 5.684043789647969,
"learning_rate": 1.9399896194122824e-05,
"loss": 0.2286,
"step": 228
},
{
"epoch": 0.83,
"grad_norm": 2.708958534514703,
"learning_rate": 1.93932037369844e-05,
"loss": 0.1855,
"step": 229
},
{
"epoch": 0.83,
"grad_norm": 1.7125886875419585,
"learning_rate": 1.9386475336188484e-05,
"loss": 0.1873,
"step": 230
},
{
"epoch": 0.84,
"grad_norm": 1.8059962309471966,
"learning_rate": 1.9379711017481703e-05,
"loss": 0.1835,
"step": 231
},
{
"epoch": 0.84,
"grad_norm": 1.9096318404555614,
"learning_rate": 1.9372910806748124e-05,
"loss": 0.1928,
"step": 232
},
{
"epoch": 0.84,
"grad_norm": 1.6880354115265441,
"learning_rate": 1.936607473000917e-05,
"loss": 0.2217,
"step": 233
},
{
"epoch": 0.85,
"grad_norm": 4.590097931857785,
"learning_rate": 1.935920281342349e-05,
"loss": 0.1751,
"step": 234
},
{
"epoch": 0.85,
"grad_norm": 3.3170870647428825,
"learning_rate": 1.9352295083286897e-05,
"loss": 0.1923,
"step": 235
},
{
"epoch": 0.86,
"grad_norm": 3.399731385689797,
"learning_rate": 1.934535156603222e-05,
"loss": 0.1687,
"step": 236
},
{
"epoch": 0.86,
"grad_norm": 1.879018288953777,
"learning_rate": 1.9338372288229253e-05,
"loss": 0.1754,
"step": 237
},
{
"epoch": 0.86,
"grad_norm": 1.7500404375244818,
"learning_rate": 1.933135727658462e-05,
"loss": 0.1724,
"step": 238
},
{
"epoch": 0.87,
"grad_norm": 2.4685486773525307,
"learning_rate": 1.9324306557941684e-05,
"loss": 0.2298,
"step": 239
},
{
"epoch": 0.87,
"grad_norm": 3.93522263930692,
"learning_rate": 1.931722015928044e-05,
"loss": 0.1679,
"step": 240
},
{
"epoch": 0.87,
"grad_norm": 2.618587281781777,
"learning_rate": 1.9310098107717418e-05,
"loss": 0.2414,
"step": 241
},
{
"epoch": 0.88,
"grad_norm": 2.6014521447134005,
"learning_rate": 1.930294043050558e-05,
"loss": 0.2271,
"step": 242
},
{
"epoch": 0.88,
"grad_norm": 1.7444357606743905,
"learning_rate": 1.9295747155034203e-05,
"loss": 0.1611,
"step": 243
},
{
"epoch": 0.88,
"grad_norm": 3.90614869544953,
"learning_rate": 1.928851830882879e-05,
"loss": 0.1621,
"step": 244
},
{
"epoch": 0.89,
"grad_norm": 2.2866019796864734,
"learning_rate": 1.928125391955095e-05,
"loss": 0.2127,
"step": 245
},
{
"epoch": 0.89,
"grad_norm": 2.4492994823583496,
"learning_rate": 1.9273954014998307e-05,
"loss": 0.1873,
"step": 246
},
{
"epoch": 0.89,
"grad_norm": 2.4875080022090086,
"learning_rate": 1.9266618623104384e-05,
"loss": 0.1898,
"step": 247
},
{
"epoch": 0.9,
"grad_norm": 5.028262963276311,
"learning_rate": 1.92592477719385e-05,
"loss": 0.228,
"step": 248
},
{
"epoch": 0.9,
"grad_norm": 3.682312273260727,
"learning_rate": 1.9251841489705657e-05,
"loss": 0.2226,
"step": 249
},
{
"epoch": 0.91,
"grad_norm": 2.1022001680261764,
"learning_rate": 1.9244399804746436e-05,
"loss": 0.1901,
"step": 250
},
{
"epoch": 0.91,
"grad_norm": 4.329486372814356,
"learning_rate": 1.923692274553689e-05,
"loss": 0.2105,
"step": 251
},
{
"epoch": 0.91,
"grad_norm": 4.806818514585948,
"learning_rate": 1.9229410340688442e-05,
"loss": 0.2075,
"step": 252
},
{
"epoch": 0.92,
"grad_norm": 4.495243683594372,
"learning_rate": 1.922186261894775e-05,
"loss": 0.2092,
"step": 253
},
{
"epoch": 0.92,
"grad_norm": 1.075041098782442,
"learning_rate": 1.9214279609196632e-05,
"loss": 0.1941,
"step": 254
},
{
"epoch": 0.92,
"grad_norm": 1.0341228117597683,
"learning_rate": 1.9206661340451927e-05,
"loss": 0.1555,
"step": 255
},
{
"epoch": 0.93,
"grad_norm": 2.1119610276967546,
"learning_rate": 1.9199007841865395e-05,
"loss": 0.1792,
"step": 256
},
{
"epoch": 0.93,
"grad_norm": 2.930768434457091,
"learning_rate": 1.919131914272361e-05,
"loss": 0.1916,
"step": 257
},
{
"epoch": 0.93,
"grad_norm": 6.188828842690194,
"learning_rate": 1.9183595272447843e-05,
"loss": 0.2002,
"step": 258
},
{
"epoch": 0.94,
"grad_norm": 5.415222515014791,
"learning_rate": 1.9175836260593937e-05,
"loss": 0.2208,
"step": 259
},
{
"epoch": 0.94,
"grad_norm": 2.4738850204790706,
"learning_rate": 1.9168042136852228e-05,
"loss": 0.1406,
"step": 260
},
{
"epoch": 0.95,
"grad_norm": 5.329131640007567,
"learning_rate": 1.916021293104739e-05,
"loss": 0.2008,
"step": 261
},
{
"epoch": 0.95,
"grad_norm": 2.3257765001120143,
"learning_rate": 1.9152348673138355e-05,
"loss": 0.1554,
"step": 262
},
{
"epoch": 0.95,
"grad_norm": 2.693530182633664,
"learning_rate": 1.914444939321817e-05,
"loss": 0.1566,
"step": 263
},
{
"epoch": 0.96,
"grad_norm": 3.245725077616388,
"learning_rate": 1.913651512151391e-05,
"loss": 0.1848,
"step": 264
},
{
"epoch": 0.96,
"grad_norm": 1.9134572204267684,
"learning_rate": 1.9128545888386537e-05,
"loss": 0.1812,
"step": 265
},
{
"epoch": 0.96,
"grad_norm": 3.4660094621483957,
"learning_rate": 1.9120541724330802e-05,
"loss": 0.2319,
"step": 266
},
{
"epoch": 0.97,
"grad_norm": 6.2377157513624635,
"learning_rate": 1.9112502659975122e-05,
"loss": 0.2421,
"step": 267
},
{
"epoch": 0.97,
"grad_norm": 2.2389817823232567,
"learning_rate": 1.910442872608145e-05,
"loss": 0.1975,
"step": 268
},
{
"epoch": 0.97,
"grad_norm": 1.7644043566808019,
"learning_rate": 1.9096319953545186e-05,
"loss": 0.143,
"step": 269
},
{
"epoch": 0.98,
"grad_norm": 1.6256754670976832,
"learning_rate": 1.908817637339503e-05,
"loss": 0.1733,
"step": 270
},
{
"epoch": 0.98,
"grad_norm": 2.8315145141270555,
"learning_rate": 1.9079998016792884e-05,
"loss": 0.1925,
"step": 271
},
{
"epoch": 0.99,
"grad_norm": 2.404672759197368,
"learning_rate": 1.9071784915033717e-05,
"loss": 0.1981,
"step": 272
},
{
"epoch": 0.99,
"grad_norm": 5.141620203041048,
"learning_rate": 1.9063537099545456e-05,
"loss": 0.2028,
"step": 273
},
{
"epoch": 0.99,
"grad_norm": 1.3122940568496484,
"learning_rate": 1.9055254601888867e-05,
"loss": 0.1891,
"step": 274
},
{
"epoch": 1.0,
"grad_norm": 2.8860834704818434,
"learning_rate": 1.9046937453757413e-05,
"loss": 0.2066,
"step": 275
},
{
"epoch": 1.0,
"grad_norm": 1.474444058656227,
"learning_rate": 1.9038585686977168e-05,
"loss": 0.1903,
"step": 276
},
{
"epoch": 1.0,
"grad_norm": 6.220834314967025,
"learning_rate": 1.9030199333506667e-05,
"loss": 0.2024,
"step": 277
},
{
"epoch": 1.01,
"grad_norm": 1.4267945413252041,
"learning_rate": 1.9021778425436797e-05,
"loss": 0.1615,
"step": 278
},
{
"epoch": 1.01,
"grad_norm": 1.7879083169277576,
"learning_rate": 1.9013322994990663e-05,
"loss": 0.1873,
"step": 279
},
{
"epoch": 1.01,
"grad_norm": 3.1195876950952917,
"learning_rate": 1.9004833074523478e-05,
"loss": 0.2165,
"step": 280
},
{
"epoch": 1.02,
"grad_norm": 4.854144089703857,
"learning_rate": 1.8996308696522435e-05,
"loss": 0.1893,
"step": 281
},
{
"epoch": 1.02,
"grad_norm": 3.70882944826412,
"learning_rate": 1.8987749893606575e-05,
"loss": 0.2176,
"step": 282
},
{
"epoch": 1.03,
"grad_norm": 3.7796610032532536,
"learning_rate": 1.897915669852667e-05,
"loss": 0.2176,
"step": 283
},
{
"epoch": 1.03,
"grad_norm": 1.9855057847958348,
"learning_rate": 1.8970529144165103e-05,
"loss": 0.2027,
"step": 284
},
{
"epoch": 1.03,
"grad_norm": 0.9758605320008039,
"learning_rate": 1.8961867263535716e-05,
"loss": 0.1694,
"step": 285
},
{
"epoch": 1.04,
"grad_norm": 5.102642644944852,
"learning_rate": 1.8953171089783725e-05,
"loss": 0.1985,
"step": 286
},
{
"epoch": 1.04,
"grad_norm": 3.2543005950166566,
"learning_rate": 1.8944440656185556e-05,
"loss": 0.1696,
"step": 287
},
{
"epoch": 1.04,
"grad_norm": 3.5584636258947406,
"learning_rate": 1.8935675996148738e-05,
"loss": 0.183,
"step": 288
},
{
"epoch": 1.05,
"grad_norm": 2.407017198203034,
"learning_rate": 1.892687714321177e-05,
"loss": 0.1962,
"step": 289
},
{
"epoch": 1.05,
"grad_norm": 4.043682911740225,
"learning_rate": 1.8918044131043987e-05,
"loss": 0.1702,
"step": 290
},
{
"epoch": 1.05,
"grad_norm": 3.6780900744494582,
"learning_rate": 1.890917699344544e-05,
"loss": 0.2027,
"step": 291
},
{
"epoch": 1.06,
"grad_norm": 5.164665837636065,
"learning_rate": 1.890027576434677e-05,
"loss": 0.1587,
"step": 292
},
{
"epoch": 1.06,
"grad_norm": 1.1398407090298532,
"learning_rate": 1.8891340477809055e-05,
"loss": 0.1847,
"step": 293
},
{
"epoch": 1.07,
"grad_norm": 1.852078257346115,
"learning_rate": 1.8882371168023708e-05,
"loss": 0.143,
"step": 294
},
{
"epoch": 1.07,
"grad_norm": 3.6919711611156467,
"learning_rate": 1.887336786931233e-05,
"loss": 0.2032,
"step": 295
},
{
"epoch": 1.07,
"grad_norm": 4.556863118840815,
"learning_rate": 1.8864330616126586e-05,
"loss": 0.217,
"step": 296
},
{
"epoch": 1.08,
"grad_norm": 3.3765282389059856,
"learning_rate": 1.8855259443048067e-05,
"loss": 0.1626,
"step": 297
},
{
"epoch": 1.08,
"grad_norm": 2.5829969852710155,
"learning_rate": 1.8846154384788162e-05,
"loss": 0.1788,
"step": 298
},
{
"epoch": 1.08,
"grad_norm": 5.075805270867687,
"learning_rate": 1.8837015476187917e-05,
"loss": 0.2225,
"step": 299
},
{
"epoch": 1.09,
"grad_norm": 1.8880177157758162,
"learning_rate": 1.8827842752217917e-05,
"loss": 0.1716,
"step": 300
},
{
"epoch": 1.09,
"grad_norm": 1.1521649968736736,
"learning_rate": 1.8818636247978146e-05,
"loss": 0.1715,
"step": 301
},
{
"epoch": 1.09,
"grad_norm": 1.9626798729665158,
"learning_rate": 1.8809395998697835e-05,
"loss": 0.2013,
"step": 302
},
{
"epoch": 1.1,
"grad_norm": 3.058369631517267,
"learning_rate": 1.880012203973536e-05,
"loss": 0.1943,
"step": 303
},
{
"epoch": 1.1,
"grad_norm": 5.488270353933477,
"learning_rate": 1.8790814406578073e-05,
"loss": 0.1833,
"step": 304
},
{
"epoch": 1.11,
"grad_norm": 4.685311512196503,
"learning_rate": 1.87814731348422e-05,
"loss": 0.2316,
"step": 305
},
{
"epoch": 1.11,
"grad_norm": 3.7320384197766616,
"learning_rate": 1.877209826027267e-05,
"loss": 0.1603,
"step": 306
},
{
"epoch": 1.11,
"grad_norm": 1.6481891382723979,
"learning_rate": 1.8762689818743008e-05,
"loss": 0.1622,
"step": 307
},
{
"epoch": 1.12,
"grad_norm": 5.644998190465421,
"learning_rate": 1.8753247846255175e-05,
"loss": 0.1855,
"step": 308
},
{
"epoch": 1.12,
"grad_norm": 2.2321393392076536,
"learning_rate": 1.874377237893945e-05,
"loss": 0.1754,
"step": 309
},
{
"epoch": 1.12,
"grad_norm": 2.8834280167969224,
"learning_rate": 1.8734263453054274e-05,
"loss": 0.1835,
"step": 310
},
{
"epoch": 1.13,
"grad_norm": 5.492233853467942,
"learning_rate": 1.8724721104986123e-05,
"loss": 0.2232,
"step": 311
},
{
"epoch": 1.13,
"grad_norm": 1.4242250524390119,
"learning_rate": 1.871514537124936e-05,
"loss": 0.2018,
"step": 312
},
{
"epoch": 1.13,
"grad_norm": 1.1249221000549425,
"learning_rate": 1.8705536288486118e-05,
"loss": 0.1849,
"step": 313
},
{
"epoch": 1.14,
"grad_norm": 2.0843956925311917,
"learning_rate": 1.869589389346611e-05,
"loss": 0.1728,
"step": 314
},
{
"epoch": 1.14,
"grad_norm": 1.785800054399146,
"learning_rate": 1.868621822308655e-05,
"loss": 0.2017,
"step": 315
},
{
"epoch": 1.14,
"grad_norm": 4.249640134708495,
"learning_rate": 1.8676509314371977e-05,
"loss": 0.2105,
"step": 316
},
{
"epoch": 1.15,
"grad_norm": 1.511072289018074,
"learning_rate": 1.8666767204474094e-05,
"loss": 0.1906,
"step": 317
},
{
"epoch": 1.15,
"grad_norm": 1.8096550921548866,
"learning_rate": 1.8656991930671687e-05,
"loss": 0.1985,
"step": 318
},
{
"epoch": 1.16,
"grad_norm": 1.7248204418661588,
"learning_rate": 1.8647183530370415e-05,
"loss": 0.1934,
"step": 319
},
{
"epoch": 1.16,
"grad_norm": 1.1999611680627995,
"learning_rate": 1.863734204110272e-05,
"loss": 0.1963,
"step": 320
},
{
"epoch": 1.16,
"grad_norm": 3.883573530691994,
"learning_rate": 1.862746750052764e-05,
"loss": 0.1542,
"step": 321
},
{
"epoch": 1.17,
"grad_norm": 3.3431228063402654,
"learning_rate": 1.861755994643071e-05,
"loss": 0.1942,
"step": 322
},
{
"epoch": 1.17,
"grad_norm": 2.4330036856731683,
"learning_rate": 1.8607619416723767e-05,
"loss": 0.1424,
"step": 323
},
{
"epoch": 1.17,
"grad_norm": 1.5436726427593486,
"learning_rate": 1.859764594944485e-05,
"loss": 0.1894,
"step": 324
},
{
"epoch": 1.18,
"grad_norm": 1.9100137445854823,
"learning_rate": 1.8587639582758032e-05,
"loss": 0.1626,
"step": 325
},
{
"epoch": 1.18,
"grad_norm": 1.7635046945827246,
"learning_rate": 1.8577600354953273e-05,
"loss": 0.1338,
"step": 326
},
{
"epoch": 1.18,
"grad_norm": 1.901137989162943,
"learning_rate": 1.856752830444628e-05,
"loss": 0.1627,
"step": 327
},
{
"epoch": 1.19,
"grad_norm": 1.907571641908837,
"learning_rate": 1.8557423469778356e-05,
"loss": 0.2286,
"step": 328
},
{
"epoch": 1.19,
"grad_norm": 2.6357641602746575,
"learning_rate": 1.854728588961626e-05,
"loss": 0.1862,
"step": 329
},
{
"epoch": 1.2,
"grad_norm": 1.2297582716959188,
"learning_rate": 1.8537115602752054e-05,
"loss": 0.1598,
"step": 330
},
{
"epoch": 1.2,
"grad_norm": 2.756509393456029,
"learning_rate": 1.8526912648102943e-05,
"loss": 0.1943,
"step": 331
},
{
"epoch": 1.2,
"grad_norm": 1.9929307269781298,
"learning_rate": 1.851667706471115e-05,
"loss": 0.1716,
"step": 332
},
{
"epoch": 1.21,
"grad_norm": 2.4494045336334973,
"learning_rate": 1.850640889174375e-05,
"loss": 0.1869,
"step": 333
},
{
"epoch": 1.21,
"grad_norm": 2.3399930651589775,
"learning_rate": 1.8496108168492518e-05,
"loss": 0.148,
"step": 334
},
{
"epoch": 1.21,
"grad_norm": 3.2799543214110143,
"learning_rate": 1.848577493437379e-05,
"loss": 0.2033,
"step": 335
},
{
"epoch": 1.22,
"grad_norm": 2.1138964961618587,
"learning_rate": 1.8475409228928314e-05,
"loss": 0.1749,
"step": 336
},
{
"epoch": 1.22,
"grad_norm": 4.242446831879502,
"learning_rate": 1.8465011091821072e-05,
"loss": 0.2122,
"step": 337
},
{
"epoch": 1.22,
"grad_norm": 4.695309052803132,
"learning_rate": 1.8454580562841165e-05,
"loss": 0.2316,
"step": 338
},
{
"epoch": 1.23,
"grad_norm": 2.435856865520849,
"learning_rate": 1.844411768190164e-05,
"loss": 0.203,
"step": 339
},
{
"epoch": 1.23,
"grad_norm": 1.9001456672745267,
"learning_rate": 1.8433622489039333e-05,
"loss": 0.1451,
"step": 340
},
{
"epoch": 1.24,
"grad_norm": 4.549439234979374,
"learning_rate": 1.842309502441473e-05,
"loss": 0.2038,
"step": 341
},
{
"epoch": 1.24,
"grad_norm": 4.385192686383965,
"learning_rate": 1.8412535328311813e-05,
"loss": 0.1586,
"step": 342
},
{
"epoch": 1.24,
"grad_norm": 1.7513922789924818,
"learning_rate": 1.8401943441137885e-05,
"loss": 0.168,
"step": 343
},
{
"epoch": 1.25,
"grad_norm": 1.9285666796307979,
"learning_rate": 1.839131940342344e-05,
"loss": 0.1634,
"step": 344
},
{
"epoch": 1.25,
"grad_norm": 1.224049868781548,
"learning_rate": 1.8380663255821996e-05,
"loss": 0.1359,
"step": 345
},
{
"epoch": 1.25,
"grad_norm": 1.4503345293569696,
"learning_rate": 1.8369975039109937e-05,
"loss": 0.163,
"step": 346
},
{
"epoch": 1.26,
"grad_norm": 1.3812989577289319,
"learning_rate": 1.8359254794186368e-05,
"loss": 0.1529,
"step": 347
},
{
"epoch": 1.26,
"grad_norm": 1.5773982243086926,
"learning_rate": 1.8348502562072955e-05,
"loss": 0.1815,
"step": 348
},
{
"epoch": 1.26,
"grad_norm": 2.6216961252274693,
"learning_rate": 1.833771838391375e-05,
"loss": 0.1544,
"step": 349
},
{
"epoch": 1.27,
"grad_norm": 1.6715459139127302,
"learning_rate": 1.8326902300975063e-05,
"loss": 0.1321,
"step": 350
},
{
"epoch": 1.27,
"grad_norm": 1.9867901775532317,
"learning_rate": 1.831605435464528e-05,
"loss": 0.1791,
"step": 351
},
{
"epoch": 1.28,
"grad_norm": 4.5435987253093,
"learning_rate": 1.8305174586434724e-05,
"loss": 0.1515,
"step": 352
},
{
"epoch": 1.28,
"grad_norm": 2.591922944422485,
"learning_rate": 1.8294263037975476e-05,
"loss": 0.1705,
"step": 353
},
{
"epoch": 1.28,
"grad_norm": 6.804612650593382,
"learning_rate": 1.828331975102123e-05,
"loss": 0.2053,
"step": 354
},
{
"epoch": 1.29,
"grad_norm": 4.489415156525158,
"learning_rate": 1.8272344767447136e-05,
"loss": 0.1113,
"step": 355
},
{
"epoch": 1.29,
"grad_norm": 1.980091389672242,
"learning_rate": 1.8261338129249623e-05,
"loss": 0.1738,
"step": 356
},
{
"epoch": 1.29,
"grad_norm": 3.5329647835174973,
"learning_rate": 1.8250299878546247e-05,
"loss": 0.1633,
"step": 357
},
{
"epoch": 1.3,
"grad_norm": 9.669322574297373,
"learning_rate": 1.8239230057575542e-05,
"loss": 0.2105,
"step": 358
},
{
"epoch": 1.3,
"grad_norm": 4.629218320442219,
"learning_rate": 1.8228128708696843e-05,
"loss": 0.1475,
"step": 359
},
{
"epoch": 1.3,
"grad_norm": 4.2683344830305066,
"learning_rate": 1.8216995874390128e-05,
"loss": 0.1766,
"step": 360
},
{
"epoch": 1.31,
"grad_norm": 5.259612939815896,
"learning_rate": 1.820583159725585e-05,
"loss": 0.1653,
"step": 361
},
{
"epoch": 1.31,
"grad_norm": 7.874360229733074,
"learning_rate": 1.819463592001479e-05,
"loss": 0.2051,
"step": 362
},
{
"epoch": 1.32,
"grad_norm": 12.290094203185955,
"learning_rate": 1.8183408885507875e-05,
"loss": 0.2252,
"step": 363
},
{
"epoch": 1.32,
"grad_norm": 4.16333993532805,
"learning_rate": 1.817215053669603e-05,
"loss": 0.2108,
"step": 364
},
{
"epoch": 1.32,
"grad_norm": 3.0903366930930707,
"learning_rate": 1.8160860916659992e-05,
"loss": 0.1633,
"step": 365
},
{
"epoch": 1.33,
"grad_norm": 3.7314448836454175,
"learning_rate": 1.814954006860018e-05,
"loss": 0.1782,
"step": 366
},
{
"epoch": 1.33,
"grad_norm": 4.894045709612284,
"learning_rate": 1.81381880358365e-05,
"loss": 0.1879,
"step": 367
},
{
"epoch": 1.33,
"grad_norm": 8.950871898351894,
"learning_rate": 1.8126804861808175e-05,
"loss": 0.244,
"step": 368
},
{
"epoch": 1.34,
"grad_norm": 8.935243161337477,
"learning_rate": 1.8115390590073612e-05,
"loss": 0.2373,
"step": 369
},
{
"epoch": 1.34,
"grad_norm": 1.0892858965142254,
"learning_rate": 1.81039452643102e-05,
"loss": 0.1375,
"step": 370
},
{
"epoch": 1.34,
"grad_norm": 1.515794039091583,
"learning_rate": 1.8092468928314174e-05,
"loss": 0.1906,
"step": 371
},
{
"epoch": 1.35,
"grad_norm": 6.421281762051045,
"learning_rate": 1.808096162600041e-05,
"loss": 0.1788,
"step": 372
},
{
"epoch": 1.35,
"grad_norm": 5.868435345616694,
"learning_rate": 1.80694234014023e-05,
"loss": 0.2106,
"step": 373
},
{
"epoch": 1.36,
"grad_norm": 6.43009286592627,
"learning_rate": 1.8057854298671545e-05,
"loss": 0.2174,
"step": 374
},
{
"epoch": 1.36,
"grad_norm": 1.8999377814562108,
"learning_rate": 1.804625436207802e-05,
"loss": 0.2107,
"step": 375
},
{
"epoch": 1.36,
"grad_norm": 1.4891862257518893,
"learning_rate": 1.803462363600957e-05,
"loss": 0.1765,
"step": 376
},
{
"epoch": 1.37,
"grad_norm": 2.8148290153185176,
"learning_rate": 1.8022962164971867e-05,
"loss": 0.1718,
"step": 377
},
{
"epoch": 1.37,
"grad_norm": 5.283593621364449,
"learning_rate": 1.8011269993588234e-05,
"loss": 0.2016,
"step": 378
},
{
"epoch": 1.37,
"grad_norm": 2.7240607903837484,
"learning_rate": 1.799954716659946e-05,
"loss": 0.1616,
"step": 379
},
{
"epoch": 1.38,
"grad_norm": 7.249514152232641,
"learning_rate": 1.798779372886365e-05,
"loss": 0.2167,
"step": 380
},
{
"epoch": 1.38,
"grad_norm": 4.823177269746868,
"learning_rate": 1.7976009725356038e-05,
"loss": 0.1705,
"step": 381
},
{
"epoch": 1.38,
"grad_norm": 2.0737152931895473,
"learning_rate": 1.796419520116882e-05,
"loss": 0.1362,
"step": 382
},
{
"epoch": 1.39,
"grad_norm": 1.799994738461166,
"learning_rate": 1.795235020151098e-05,
"loss": 0.1902,
"step": 383
},
{
"epoch": 1.39,
"grad_norm": 3.154863855560098,
"learning_rate": 1.7940474771708118e-05,
"loss": 0.1785,
"step": 384
},
{
"epoch": 1.39,
"grad_norm": 7.260410750937351,
"learning_rate": 1.792856895720228e-05,
"loss": 0.1899,
"step": 385
},
{
"epoch": 1.4,
"grad_norm": 3.789354975114773,
"learning_rate": 1.791663280355178e-05,
"loss": 0.1398,
"step": 386
},
{
"epoch": 1.4,
"grad_norm": 2.471019931110548,
"learning_rate": 1.790466635643103e-05,
"loss": 0.1435,
"step": 387
},
{
"epoch": 1.41,
"grad_norm": 2.7578364315313038,
"learning_rate": 1.789266966163035e-05,
"loss": 0.2226,
"step": 388
},
{
"epoch": 1.41,
"grad_norm": 5.486957706332273,
"learning_rate": 1.7880642765055816e-05,
"loss": 0.2131,
"step": 389
},
{
"epoch": 1.41,
"grad_norm": 5.818537789023371,
"learning_rate": 1.786858571272907e-05,
"loss": 0.1607,
"step": 390
},
{
"epoch": 1.42,
"grad_norm": 2.2136227115170777,
"learning_rate": 1.7856498550787144e-05,
"loss": 0.1343,
"step": 391
},
{
"epoch": 1.42,
"grad_norm": 1.5229015464461961,
"learning_rate": 1.7844381325482293e-05,
"loss": 0.1439,
"step": 392
},
{
"epoch": 1.42,
"grad_norm": 3.7097316114681247,
"learning_rate": 1.7832234083181798e-05,
"loss": 0.1705,
"step": 393
},
{
"epoch": 1.43,
"grad_norm": 4.142154874900152,
"learning_rate": 1.7820056870367813e-05,
"loss": 0.1675,
"step": 394
},
{
"epoch": 1.43,
"grad_norm": 4.856745555144273,
"learning_rate": 1.7807849733637177e-05,
"loss": 0.1743,
"step": 395
},
{
"epoch": 1.43,
"grad_norm": 2.2781705347105574,
"learning_rate": 1.7795612719701228e-05,
"loss": 0.2098,
"step": 396
},
{
"epoch": 1.44,
"grad_norm": 1.6131174517225353,
"learning_rate": 1.7783345875385636e-05,
"loss": 0.1858,
"step": 397
},
{
"epoch": 1.44,
"grad_norm": 1.3350288472401863,
"learning_rate": 1.7771049247630215e-05,
"loss": 0.1797,
"step": 398
},
{
"epoch": 1.45,
"grad_norm": 2.2632289769561353,
"learning_rate": 1.7758722883488747e-05,
"loss": 0.1883,
"step": 399
},
{
"epoch": 1.45,
"grad_norm": 2.7542308866562655,
"learning_rate": 1.7746366830128803e-05,
"loss": 0.183,
"step": 400
},
{
"epoch": 1.45,
"grad_norm": 2.7123868563789086,
"learning_rate": 1.773398113483157e-05,
"loss": 0.1631,
"step": 401
},
{
"epoch": 1.46,
"grad_norm": 1.1534890362072965,
"learning_rate": 1.7721565844991643e-05,
"loss": 0.1599,
"step": 402
},
{
"epoch": 1.46,
"grad_norm": 1.164814912872314,
"learning_rate": 1.7709121008116883e-05,
"loss": 0.1851,
"step": 403
},
{
"epoch": 1.46,
"grad_norm": 2.0030157197464,
"learning_rate": 1.76966466718282e-05,
"loss": 0.1942,
"step": 404
},
{
"epoch": 1.47,
"grad_norm": 3.488435796832804,
"learning_rate": 1.768414288385939e-05,
"loss": 0.1327,
"step": 405
},
{
"epoch": 1.47,
"grad_norm": 3.5011582713063167,
"learning_rate": 1.7671609692056946e-05,
"loss": 0.1652,
"step": 406
},
{
"epoch": 1.47,
"grad_norm": 2.2725214247403236,
"learning_rate": 1.765904714437988e-05,
"loss": 0.1891,
"step": 407
},
{
"epoch": 1.48,
"grad_norm": 2.827813302901743,
"learning_rate": 1.7646455288899535e-05,
"loss": 0.1432,
"step": 408
},
{
"epoch": 1.48,
"grad_norm": 1.371825459719137,
"learning_rate": 1.76338341737994e-05,
"loss": 0.1414,
"step": 409
},
{
"epoch": 1.49,
"grad_norm": 1.979991911142439,
"learning_rate": 1.7621183847374935e-05,
"loss": 0.1582,
"step": 410
},
{
"epoch": 1.49,
"grad_norm": 2.6953718527839974,
"learning_rate": 1.7608504358033363e-05,
"loss": 0.1791,
"step": 411
},
{
"epoch": 1.49,
"grad_norm": 2.372842866632226,
"learning_rate": 1.7595795754293514e-05,
"loss": 0.183,
"step": 412
},
{
"epoch": 1.5,
"grad_norm": 2.1462489900704242,
"learning_rate": 1.7583058084785626e-05,
"loss": 0.1525,
"step": 413
},
{
"epoch": 1.5,
"grad_norm": 2.794684903380181,
"learning_rate": 1.7570291398251153e-05,
"loss": 0.1563,
"step": 414
},
{
"epoch": 1.5,
"grad_norm": 1.5536847761281407,
"learning_rate": 1.7557495743542586e-05,
"loss": 0.1498,
"step": 415
},
{
"epoch": 1.51,
"grad_norm": 5.033146747767831,
"learning_rate": 1.7544671169623263e-05,
"loss": 0.2027,
"step": 416
},
{
"epoch": 1.51,
"grad_norm": 4.627482726440702,
"learning_rate": 1.753181772556719e-05,
"loss": 0.2272,
"step": 417
},
{
"epoch": 1.51,
"grad_norm": 1.8280182578141009,
"learning_rate": 1.751893546055884e-05,
"loss": 0.1536,
"step": 418
},
{
"epoch": 1.52,
"grad_norm": 1.8824484299005066,
"learning_rate": 1.750602442389297e-05,
"loss": 0.2159,
"step": 419
},
{
"epoch": 1.52,
"grad_norm": 0.8759220400623319,
"learning_rate": 1.749308466497444e-05,
"loss": 0.1189,
"step": 420
},
{
"epoch": 1.53,
"grad_norm": 1.9833046214559626,
"learning_rate": 1.7480116233318013e-05,
"loss": 0.1366,
"step": 421
},
{
"epoch": 1.53,
"grad_norm": 1.7395056435824792,
"learning_rate": 1.746711917854817e-05,
"loss": 0.1538,
"step": 422
},
{
"epoch": 1.53,
"grad_norm": 3.5305473680481865,
"learning_rate": 1.745409355039892e-05,
"loss": 0.1892,
"step": 423
},
{
"epoch": 1.54,
"grad_norm": 4.258477367145113,
"learning_rate": 1.744103939871361e-05,
"loss": 0.2052,
"step": 424
},
{
"epoch": 1.54,
"grad_norm": 1.390083899989616,
"learning_rate": 1.7427956773444733e-05,
"loss": 0.1416,
"step": 425
},
{
"epoch": 1.54,
"grad_norm": 4.530447990557084,
"learning_rate": 1.7414845724653743e-05,
"loss": 0.1506,
"step": 426
},
{
"epoch": 1.55,
"grad_norm": 1.3749722590696383,
"learning_rate": 1.7401706302510854e-05,
"loss": 0.17,
"step": 427
},
{
"epoch": 1.55,
"grad_norm": 2.9766333202640345,
"learning_rate": 1.738853855729485e-05,
"loss": 0.2007,
"step": 428
},
{
"epoch": 1.55,
"grad_norm": 1.9436893098276,
"learning_rate": 1.7375342539392903e-05,
"loss": 0.147,
"step": 429
},
{
"epoch": 1.56,
"grad_norm": 2.258826675200108,
"learning_rate": 1.7362118299300363e-05,
"loss": 0.1288,
"step": 430
},
{
"epoch": 1.56,
"grad_norm": 2.691431274072751,
"learning_rate": 1.7348865887620574e-05,
"loss": 0.1721,
"step": 431
},
{
"epoch": 1.57,
"grad_norm": 2.446923649161082,
"learning_rate": 1.733558535506469e-05,
"loss": 0.1558,
"step": 432
},
{
"epoch": 1.57,
"grad_norm": 2.341545822778966,
"learning_rate": 1.732227675245147e-05,
"loss": 0.1407,
"step": 433
},
{
"epoch": 1.57,
"grad_norm": 2.3744345065715042,
"learning_rate": 1.730894013070707e-05,
"loss": 0.1684,
"step": 434
},
{
"epoch": 1.58,
"grad_norm": 4.788243952486983,
"learning_rate": 1.7295575540864878e-05,
"loss": 0.1598,
"step": 435
},
{
"epoch": 1.58,
"grad_norm": 2.120925937437422,
"learning_rate": 1.7282183034065296e-05,
"loss": 0.1729,
"step": 436
},
{
"epoch": 1.58,
"grad_norm": 2.993159907132356,
"learning_rate": 1.7268762661555557e-05,
"loss": 0.1589,
"step": 437
},
{
"epoch": 1.59,
"grad_norm": 2.7651575966235176,
"learning_rate": 1.7255314474689524e-05,
"loss": 0.1595,
"step": 438
},
{
"epoch": 1.59,
"grad_norm": 1.7469664941889762,
"learning_rate": 1.7241838524927485e-05,
"loss": 0.14,
"step": 439
},
{
"epoch": 1.59,
"grad_norm": 2.58000014180683,
"learning_rate": 1.7228334863835972e-05,
"loss": 0.2079,
"step": 440
},
{
"epoch": 1.6,
"grad_norm": 2.392841581472421,
"learning_rate": 1.7214803543087556e-05,
"loss": 0.1557,
"step": 441
},
{
"epoch": 1.6,
"grad_norm": 6.692545196051933,
"learning_rate": 1.7201244614460645e-05,
"loss": 0.1946,
"step": 442
},
{
"epoch": 1.61,
"grad_norm": 4.9639824306218445,
"learning_rate": 1.7187658129839295e-05,
"loss": 0.1934,
"step": 443
},
{
"epoch": 1.61,
"grad_norm": 4.600507727479392,
"learning_rate": 1.7174044141213e-05,
"loss": 0.1554,
"step": 444
},
{
"epoch": 1.61,
"grad_norm": 1.831446485269553,
"learning_rate": 1.716040270067651e-05,
"loss": 0.1493,
"step": 445
},
{
"epoch": 1.62,
"grad_norm": 3.6460841437042335,
"learning_rate": 1.7146733860429614e-05,
"loss": 0.2242,
"step": 446
},
{
"epoch": 1.62,
"grad_norm": 2.7349528609804215,
"learning_rate": 1.7133037672776943e-05,
"loss": 0.1657,
"step": 447
},
{
"epoch": 1.62,
"grad_norm": 2.909659149899981,
"learning_rate": 1.7119314190127786e-05,
"loss": 0.1416,
"step": 448
},
{
"epoch": 1.63,
"grad_norm": 3.800824146188335,
"learning_rate": 1.7105563464995873e-05,
"loss": 0.1596,
"step": 449
},
{
"epoch": 1.63,
"grad_norm": 2.6123862898271386,
"learning_rate": 1.7091785549999177e-05,
"loss": 0.1164,
"step": 450
},
{
"epoch": 1.63,
"grad_norm": 2.5702633045133734,
"learning_rate": 1.7077980497859715e-05,
"loss": 0.1657,
"step": 451
},
{
"epoch": 1.64,
"grad_norm": 1.8590199199348927,
"learning_rate": 1.7064148361403347e-05,
"loss": 0.1544,
"step": 452
},
{
"epoch": 1.64,
"grad_norm": 2.2010676694838485,
"learning_rate": 1.7050289193559576e-05,
"loss": 0.1552,
"step": 453
},
{
"epoch": 1.64,
"grad_norm": 3.4735777802822874,
"learning_rate": 1.7036403047361336e-05,
"loss": 0.1845,
"step": 454
},
{
"epoch": 1.65,
"grad_norm": 3.525410489743807,
"learning_rate": 1.70224899759448e-05,
"loss": 0.1757,
"step": 455
},
{
"epoch": 1.65,
"grad_norm": 5.245801333039583,
"learning_rate": 1.7008550032549167e-05,
"loss": 0.1303,
"step": 456
},
{
"epoch": 1.66,
"grad_norm": 2.7102854239542022,
"learning_rate": 1.699458327051647e-05,
"loss": 0.1493,
"step": 457
},
{
"epoch": 1.66,
"grad_norm": 1.7401112413904483,
"learning_rate": 1.6980589743291362e-05,
"loss": 0.145,
"step": 458
},
{
"epoch": 1.66,
"grad_norm": 4.34019844447212,
"learning_rate": 1.6966569504420913e-05,
"loss": 0.1506,
"step": 459
},
{
"epoch": 1.67,
"grad_norm": 5.020753282702713,
"learning_rate": 1.695252260755441e-05,
"loss": 0.1665,
"step": 460
},
{
"epoch": 1.67,
"grad_norm": 5.692611051423799,
"learning_rate": 1.693844910644314e-05,
"loss": 0.202,
"step": 461
},
{
"epoch": 1.67,
"grad_norm": 1.3944083840141086,
"learning_rate": 1.6924349054940204e-05,
"loss": 0.129,
"step": 462
},
{
"epoch": 1.68,
"grad_norm": 2.248559809005285,
"learning_rate": 1.6910222507000295e-05,
"loss": 0.1518,
"step": 463
},
{
"epoch": 1.68,
"grad_norm": 5.043263710574603,
"learning_rate": 1.6896069516679494e-05,
"loss": 0.147,
"step": 464
},
{
"epoch": 1.68,
"grad_norm": 4.744308803957159,
"learning_rate": 1.688189013813507e-05,
"loss": 0.1708,
"step": 465
},
{
"epoch": 1.69,
"grad_norm": 3.7010206909367422,
"learning_rate": 1.6867684425625265e-05,
"loss": 0.1399,
"step": 466
},
{
"epoch": 1.69,
"grad_norm": 4.972435061041302,
"learning_rate": 1.6853452433509087e-05,
"loss": 0.1423,
"step": 467
},
{
"epoch": 1.7,
"grad_norm": 1.827326748489026,
"learning_rate": 1.683919421624611e-05,
"loss": 0.1689,
"step": 468
},
{
"epoch": 1.7,
"grad_norm": 1.8240952506218318,
"learning_rate": 1.6824909828396254e-05,
"loss": 0.1335,
"step": 469
},
{
"epoch": 1.7,
"grad_norm": 3.8849118259684547,
"learning_rate": 1.681059932461959e-05,
"loss": 0.1741,
"step": 470
},
{
"epoch": 1.71,
"grad_norm": 5.192115777745488,
"learning_rate": 1.679626275967612e-05,
"loss": 0.1519,
"step": 471
},
{
"epoch": 1.71,
"grad_norm": 6.675697646151228,
"learning_rate": 1.6781900188425565e-05,
"loss": 0.2234,
"step": 472
},
{
"epoch": 1.71,
"grad_norm": 2.378328321963269,
"learning_rate": 1.6767511665827167e-05,
"loss": 0.1446,
"step": 473
},
{
"epoch": 1.72,
"grad_norm": 2.152827717749817,
"learning_rate": 1.6753097246939475e-05,
"loss": 0.1939,
"step": 474
},
{
"epoch": 1.72,
"grad_norm": 4.530064408209738,
"learning_rate": 1.6738656986920123e-05,
"loss": 0.174,
"step": 475
},
{
"epoch": 1.72,
"grad_norm": 4.147277341203001,
"learning_rate": 1.672419094102563e-05,
"loss": 0.1599,
"step": 476
},
{
"epoch": 1.73,
"grad_norm": 4.06626962419669,
"learning_rate": 1.670969916461119e-05,
"loss": 0.1363,
"step": 477
},
{
"epoch": 1.73,
"grad_norm": 2.2285257210803655,
"learning_rate": 1.6695181713130462e-05,
"loss": 0.1243,
"step": 478
},
{
"epoch": 1.74,
"grad_norm": 2.11800924466845,
"learning_rate": 1.6680638642135337e-05,
"loss": 0.1815,
"step": 479
},
{
"epoch": 1.74,
"grad_norm": 3.5710416042434225,
"learning_rate": 1.6666070007275746e-05,
"loss": 0.1618,
"step": 480
},
{
"epoch": 1.74,
"grad_norm": 3.4925284732723556,
"learning_rate": 1.6651475864299453e-05,
"loss": 0.1431,
"step": 481
},
{
"epoch": 1.75,
"grad_norm": 2.1151296227345755,
"learning_rate": 1.6636856269051813e-05,
"loss": 0.1711,
"step": 482
},
{
"epoch": 1.75,
"grad_norm": 2.211774961679525,
"learning_rate": 1.662221127747559e-05,
"loss": 0.1556,
"step": 483
},
{
"epoch": 1.75,
"grad_norm": 1.5191237192219527,
"learning_rate": 1.6607540945610722e-05,
"loss": 0.133,
"step": 484
},
{
"epoch": 1.76,
"grad_norm": 1.7691955437105935,
"learning_rate": 1.659284532959411e-05,
"loss": 0.171,
"step": 485
},
{
"epoch": 1.76,
"grad_norm": 1.7670255226792704,
"learning_rate": 1.6578124485659414e-05,
"loss": 0.1575,
"step": 486
},
{
"epoch": 1.76,
"grad_norm": 1.8490943163056435,
"learning_rate": 1.6563378470136823e-05,
"loss": 0.1991,
"step": 487
},
{
"epoch": 1.77,
"grad_norm": 4.3648865869808615,
"learning_rate": 1.6548607339452853e-05,
"loss": 0.1308,
"step": 488
},
{
"epoch": 1.77,
"grad_norm": 1.997485749829696,
"learning_rate": 1.653381115013012e-05,
"loss": 0.1599,
"step": 489
},
{
"epoch": 1.78,
"grad_norm": 2.240270800677183,
"learning_rate": 1.6518989958787126e-05,
"loss": 0.1334,
"step": 490
},
{
"epoch": 1.78,
"grad_norm": 2.4535988541945755,
"learning_rate": 1.6504143822138057e-05,
"loss": 0.1354,
"step": 491
},
{
"epoch": 1.78,
"grad_norm": 3.6949356337532597,
"learning_rate": 1.6489272796992536e-05,
"loss": 0.1218,
"step": 492
},
{
"epoch": 1.79,
"grad_norm": 2.6278736186790126,
"learning_rate": 1.6474376940255442e-05,
"loss": 0.1348,
"step": 493
},
{
"epoch": 1.79,
"grad_norm": 6.545343087933527,
"learning_rate": 1.6459456308926662e-05,
"loss": 0.1979,
"step": 494
},
{
"epoch": 1.79,
"grad_norm": 3.8448673196036114,
"learning_rate": 1.644451096010088e-05,
"loss": 0.1453,
"step": 495
},
{
"epoch": 1.8,
"grad_norm": 4.222708589993964,
"learning_rate": 1.642954095096737e-05,
"loss": 0.1607,
"step": 496
},
{
"epoch": 1.8,
"grad_norm": 2.0932849514016136,
"learning_rate": 1.6414546338809783e-05,
"loss": 0.1323,
"step": 497
},
{
"epoch": 1.8,
"grad_norm": 2.090129133667646,
"learning_rate": 1.639952718100589e-05,
"loss": 0.1374,
"step": 498
},
{
"epoch": 1.81,
"grad_norm": 3.9907789165574146,
"learning_rate": 1.63844835350274e-05,
"loss": 0.1673,
"step": 499
},
{
"epoch": 1.81,
"grad_norm": 3.476284562079525,
"learning_rate": 1.636941545843973e-05,
"loss": 0.1558,
"step": 500
},
{
"epoch": 1.82,
"grad_norm": 2.9177845569523475,
"learning_rate": 1.6354323008901774e-05,
"loss": 0.15,
"step": 501
},
{
"epoch": 1.82,
"grad_norm": 1.4575612480313431,
"learning_rate": 1.6339206244165705e-05,
"loss": 0.1188,
"step": 502
},
{
"epoch": 1.82,
"grad_norm": 2.717706895583365,
"learning_rate": 1.632406522207672e-05,
"loss": 0.1334,
"step": 503
},
{
"epoch": 1.83,
"grad_norm": 5.659401345162485,
"learning_rate": 1.630890000057285e-05,
"loss": 0.1779,
"step": 504
},
{
"epoch": 1.83,
"grad_norm": 2.491520253691608,
"learning_rate": 1.6293710637684733e-05,
"loss": 0.1338,
"step": 505
},
{
"epoch": 1.83,
"grad_norm": 3.501568193131719,
"learning_rate": 1.6278497191535364e-05,
"loss": 0.1632,
"step": 506
},
{
"epoch": 1.84,
"grad_norm": 4.368617842420329,
"learning_rate": 1.6263259720339917e-05,
"loss": 0.1522,
"step": 507
},
{
"epoch": 1.84,
"grad_norm": 2.9101425724438186,
"learning_rate": 1.6247998282405486e-05,
"loss": 0.1488,
"step": 508
},
{
"epoch": 1.84,
"grad_norm": 1.808283260496058,
"learning_rate": 1.623271293613088e-05,
"loss": 0.179,
"step": 509
},
{
"epoch": 1.85,
"grad_norm": 3.3428833027807503,
"learning_rate": 1.621740374000639e-05,
"loss": 0.1387,
"step": 510
},
{
"epoch": 1.85,
"grad_norm": 1.7989422431398234,
"learning_rate": 1.6202070752613578e-05,
"loss": 0.1447,
"step": 511
},
{
"epoch": 1.86,
"grad_norm": 5.55243784188933,
"learning_rate": 1.6186714032625036e-05,
"loss": 0.1494,
"step": 512
},
{
"epoch": 1.86,
"grad_norm": 2.5876168671231325,
"learning_rate": 1.6171333638804177e-05,
"loss": 0.1445,
"step": 513
},
{
"epoch": 1.86,
"grad_norm": 2.792462005235641,
"learning_rate": 1.6155929630004995e-05,
"loss": 0.1399,
"step": 514
},
{
"epoch": 1.87,
"grad_norm": 2.6737163218064905,
"learning_rate": 1.6140502065171865e-05,
"loss": 0.1911,
"step": 515
},
{
"epoch": 1.87,
"grad_norm": 3.288839494781435,
"learning_rate": 1.6125051003339277e-05,
"loss": 0.1426,
"step": 516
},
{
"epoch": 1.87,
"grad_norm": 3.488830712267379,
"learning_rate": 1.610957650363165e-05,
"loss": 0.1948,
"step": 517
},
{
"epoch": 1.88,
"grad_norm": 5.190300035091314,
"learning_rate": 1.6094078625263085e-05,
"loss": 0.202,
"step": 518
},
{
"epoch": 1.88,
"grad_norm": 2.8355095526941088,
"learning_rate": 1.6078557427537144e-05,
"loss": 0.1177,
"step": 519
},
{
"epoch": 1.88,
"grad_norm": 2.085477268858648,
"learning_rate": 1.6063012969846624e-05,
"loss": 0.1267,
"step": 520
},
{
"epoch": 1.89,
"grad_norm": 2.4529248520291653,
"learning_rate": 1.604744531167332e-05,
"loss": 0.1693,
"step": 521
},
{
"epoch": 1.89,
"grad_norm": 3.0178268735143807,
"learning_rate": 1.603185451258781e-05,
"loss": 0.1645,
"step": 522
},
{
"epoch": 1.89,
"grad_norm": 1.9260850388201756,
"learning_rate": 1.6016240632249224e-05,
"loss": 0.162,
"step": 523
},
{
"epoch": 1.9,
"grad_norm": 3.1705810235022667,
"learning_rate": 1.6000603730405013e-05,
"loss": 0.1814,
"step": 524
},
{
"epoch": 1.9,
"grad_norm": 2.083015779524835,
"learning_rate": 1.5984943866890718e-05,
"loss": 0.1824,
"step": 525
},
{
"epoch": 1.91,
"grad_norm": 1.8130349177336165,
"learning_rate": 1.5969261101629744e-05,
"loss": 0.1718,
"step": 526
},
{
"epoch": 1.91,
"grad_norm": 3.932867184382188,
"learning_rate": 1.5953555494633136e-05,
"loss": 0.2069,
"step": 527
},
{
"epoch": 1.91,
"grad_norm": 3.4904804321157124,
"learning_rate": 1.593782710599934e-05,
"loss": 0.1849,
"step": 528
},
{
"epoch": 1.92,
"grad_norm": 2.66805945239631,
"learning_rate": 1.5922075995913976e-05,
"loss": 0.2011,
"step": 529
},
{
"epoch": 1.92,
"grad_norm": 1.3140490522354895,
"learning_rate": 1.5906302224649613e-05,
"loss": 0.1611,
"step": 530
},
{
"epoch": 1.92,
"grad_norm": 1.6973615025326412,
"learning_rate": 1.589050585256554e-05,
"loss": 0.1263,
"step": 531
},
{
"epoch": 1.93,
"grad_norm": 2.0370354462908686,
"learning_rate": 1.5874686940107507e-05,
"loss": 0.169,
"step": 532
},
{
"epoch": 1.93,
"grad_norm": 2.288230600809746,
"learning_rate": 1.5858845547807545e-05,
"loss": 0.1618,
"step": 533
},
{
"epoch": 1.93,
"grad_norm": 5.0758124402179,
"learning_rate": 1.5842981736283686e-05,
"loss": 0.1651,
"step": 534
},
{
"epoch": 1.94,
"grad_norm": 4.119404936544872,
"learning_rate": 1.582709556623976e-05,
"loss": 0.1861,
"step": 535
},
{
"epoch": 1.94,
"grad_norm": 1.811183559427656,
"learning_rate": 1.581118709846514e-05,
"loss": 0.1233,
"step": 536
},
{
"epoch": 1.95,
"grad_norm": 2.4729194568089277,
"learning_rate": 1.5795256393834546e-05,
"loss": 0.1612,
"step": 537
},
{
"epoch": 1.95,
"grad_norm": 1.7394691177808947,
"learning_rate": 1.5779303513307765e-05,
"loss": 0.1328,
"step": 538
},
{
"epoch": 1.95,
"grad_norm": 2.053299955833243,
"learning_rate": 1.576332851792945e-05,
"loss": 0.1236,
"step": 539
},
{
"epoch": 1.96,
"grad_norm": 3.210936578739264,
"learning_rate": 1.574733146882889e-05,
"loss": 0.1507,
"step": 540
},
{
"epoch": 1.96,
"grad_norm": 2.5058993835342718,
"learning_rate": 1.5731312427219738e-05,
"loss": 0.1546,
"step": 541
},
{
"epoch": 1.96,
"grad_norm": 2.2199777398424327,
"learning_rate": 1.571527145439983e-05,
"loss": 0.2009,
"step": 542
},
{
"epoch": 1.97,
"grad_norm": 3.310778322843867,
"learning_rate": 1.5699208611750904e-05,
"loss": 0.187,
"step": 543
},
{
"epoch": 1.97,
"grad_norm": 2.424165488653433,
"learning_rate": 1.5683123960738395e-05,
"loss": 0.1595,
"step": 544
},
{
"epoch": 1.97,
"grad_norm": 1.5557389502573584,
"learning_rate": 1.566701756291118e-05,
"loss": 0.1161,
"step": 545
},
{
"epoch": 1.98,
"grad_norm": 2.0891723510913467,
"learning_rate": 1.5650889479901356e-05,
"loss": 0.1316,
"step": 546
},
{
"epoch": 1.98,
"grad_norm": 1.5710257315573026,
"learning_rate": 1.5634739773424007e-05,
"loss": 0.1625,
"step": 547
},
{
"epoch": 1.99,
"grad_norm": 2.588230137333489,
"learning_rate": 1.5618568505276948e-05,
"loss": 0.1624,
"step": 548
},
{
"epoch": 1.99,
"grad_norm": 3.6544371356800887,
"learning_rate": 1.5602375737340507e-05,
"loss": 0.1609,
"step": 549
},
{
"epoch": 1.99,
"grad_norm": 2.423586451954487,
"learning_rate": 1.558616153157728e-05,
"loss": 0.1657,
"step": 550
},
{
"epoch": 2.0,
"grad_norm": 2.5339695017155988,
"learning_rate": 1.5569925950031906e-05,
"loss": 0.1668,
"step": 551
},
{
"epoch": 2.0,
"grad_norm": 2.9781615139327298,
"learning_rate": 1.5553669054830806e-05,
"loss": 0.1493,
"step": 552
},
{
"epoch": 2.0,
"grad_norm": 3.6930188711451026,
"learning_rate": 1.553739090818196e-05,
"loss": 0.1542,
"step": 553
},
{
"epoch": 2.01,
"grad_norm": 2.7797364025493785,
"learning_rate": 1.552109157237468e-05,
"loss": 0.1464,
"step": 554
},
{
"epoch": 2.01,
"grad_norm": 2.696510654578581,
"learning_rate": 1.5504771109779348e-05,
"loss": 0.1525,
"step": 555
},
{
"epoch": 2.01,
"grad_norm": 6.6918895076554685,
"learning_rate": 1.5488429582847194e-05,
"loss": 0.2056,
"step": 556
},
{
"epoch": 2.02,
"grad_norm": 2.019023563423498,
"learning_rate": 1.547206705411005e-05,
"loss": 0.1386,
"step": 557
},
{
"epoch": 2.02,
"grad_norm": 1.9728321730671903,
"learning_rate": 1.5455683586180117e-05,
"loss": 0.1631,
"step": 558
},
{
"epoch": 2.03,
"grad_norm": 3.911889269659483,
"learning_rate": 1.5439279241749714e-05,
"loss": 0.1803,
"step": 559
},
{
"epoch": 2.03,
"grad_norm": 1.8394667273054477,
"learning_rate": 1.542285408359105e-05,
"loss": 0.1675,
"step": 560
},
{
"epoch": 2.03,
"grad_norm": 4.093274128064072,
"learning_rate": 1.5406408174555978e-05,
"loss": 0.156,
"step": 561
},
{
"epoch": 2.04,
"grad_norm": 1.848164435113447,
"learning_rate": 1.5389941577575753e-05,
"loss": 0.1569,
"step": 562
},
{
"epoch": 2.04,
"grad_norm": 1.6564858409846661,
"learning_rate": 1.5373454355660805e-05,
"loss": 0.1415,
"step": 563
},
{
"epoch": 2.04,
"grad_norm": 1.554610631406401,
"learning_rate": 1.5356946571900465e-05,
"loss": 0.1513,
"step": 564
},
{
"epoch": 2.05,
"grad_norm": 2.9867639979699985,
"learning_rate": 1.5340418289462765e-05,
"loss": 0.1582,
"step": 565
},
{
"epoch": 2.05,
"grad_norm": 1.643399853726559,
"learning_rate": 1.5323869571594166e-05,
"loss": 0.1421,
"step": 566
},
{
"epoch": 2.05,
"grad_norm": 2.1911260149017444,
"learning_rate": 1.5307300481619334e-05,
"loss": 0.1806,
"step": 567
},
{
"epoch": 2.06,
"grad_norm": 1.9052229931897104,
"learning_rate": 1.5290711082940883e-05,
"loss": 0.1276,
"step": 568
},
{
"epoch": 2.06,
"grad_norm": 3.178075384690503,
"learning_rate": 1.527410143903914e-05,
"loss": 0.1619,
"step": 569
},
{
"epoch": 2.07,
"grad_norm": 1.3933412605550552,
"learning_rate": 1.5257471613471908e-05,
"loss": 0.1258,
"step": 570
},
{
"epoch": 2.07,
"grad_norm": 3.929781120919883,
"learning_rate": 1.5240821669874204e-05,
"loss": 0.1738,
"step": 571
},
{
"epoch": 2.07,
"grad_norm": 2.368831991887992,
"learning_rate": 1.5224151671958045e-05,
"loss": 0.1855,
"step": 572
},
{
"epoch": 2.08,
"grad_norm": 1.5633002534137816,
"learning_rate": 1.5207461683512174e-05,
"loss": 0.1473,
"step": 573
},
{
"epoch": 2.08,
"grad_norm": 2.0564880232824487,
"learning_rate": 1.5190751768401835e-05,
"loss": 0.1546,
"step": 574
},
{
"epoch": 2.08,
"grad_norm": 3.3813174994385737,
"learning_rate": 1.5174021990568518e-05,
"loss": 0.1723,
"step": 575
},
{
"epoch": 2.09,
"grad_norm": 1.7458502122771002,
"learning_rate": 1.515727241402972e-05,
"loss": 0.146,
"step": 576
},
{
"epoch": 2.09,
"grad_norm": 1.3792766298864636,
"learning_rate": 1.51405031028787e-05,
"loss": 0.1452,
"step": 577
},
{
"epoch": 2.09,
"grad_norm": 5.154008898661378,
"learning_rate": 1.512371412128424e-05,
"loss": 0.189,
"step": 578
},
{
"epoch": 2.1,
"grad_norm": 2.2158356135773447,
"learning_rate": 1.5106905533490373e-05,
"loss": 0.1701,
"step": 579
},
{
"epoch": 2.1,
"grad_norm": 2.2802305847588675,
"learning_rate": 1.509007740381618e-05,
"loss": 0.1287,
"step": 580
},
{
"epoch": 2.11,
"grad_norm": 2.802517989281865,
"learning_rate": 1.5073229796655504e-05,
"loss": 0.1723,
"step": 581
},
{
"epoch": 2.11,
"grad_norm": 5.043367884459983,
"learning_rate": 1.505636277647672e-05,
"loss": 0.132,
"step": 582
},
{
"epoch": 2.11,
"grad_norm": 3.9364570447266467,
"learning_rate": 1.5039476407822501e-05,
"loss": 0.1316,
"step": 583
},
{
"epoch": 2.12,
"grad_norm": 1.790422402968305,
"learning_rate": 1.5022570755309542e-05,
"loss": 0.1438,
"step": 584
},
{
"epoch": 2.12,
"grad_norm": 2.139570124252977,
"learning_rate": 1.5005645883628342e-05,
"loss": 0.1249,
"step": 585
},
{
"epoch": 2.12,
"grad_norm": 1.7297604359039545,
"learning_rate": 1.4988701857542932e-05,
"loss": 0.1557,
"step": 586
},
{
"epoch": 2.13,
"grad_norm": 4.609583051574302,
"learning_rate": 1.497173874189065e-05,
"loss": 0.2113,
"step": 587
},
{
"epoch": 2.13,
"grad_norm": 2.065397185639641,
"learning_rate": 1.495475660158187e-05,
"loss": 0.1789,
"step": 588
},
{
"epoch": 2.13,
"grad_norm": 3.4534182242890794,
"learning_rate": 1.4937755501599774e-05,
"loss": 0.1966,
"step": 589
},
{
"epoch": 2.14,
"grad_norm": 3.7137088675703933,
"learning_rate": 1.492073550700009e-05,
"loss": 0.1558,
"step": 590
},
{
"epoch": 2.14,
"grad_norm": 2.905846213256674,
"learning_rate": 1.4903696682910847e-05,
"loss": 0.1723,
"step": 591
},
{
"epoch": 2.14,
"grad_norm": 2.47784663516246,
"learning_rate": 1.4886639094532129e-05,
"loss": 0.1869,
"step": 592
},
{
"epoch": 2.15,
"grad_norm": 2.0044679459734596,
"learning_rate": 1.486956280713582e-05,
"loss": 0.1655,
"step": 593
},
{
"epoch": 2.15,
"grad_norm": 2.4131117849348476,
"learning_rate": 1.4852467886065357e-05,
"loss": 0.1618,
"step": 594
},
{
"epoch": 2.16,
"grad_norm": 2.671333119059078,
"learning_rate": 1.4835354396735483e-05,
"loss": 0.1863,
"step": 595
},
{
"epoch": 2.16,
"grad_norm": 3.8424507020526653,
"learning_rate": 1.4818222404631993e-05,
"loss": 0.1798,
"step": 596
},
{
"epoch": 2.16,
"grad_norm": 4.951912325631532,
"learning_rate": 1.4801071975311478e-05,
"loss": 0.1444,
"step": 597
},
{
"epoch": 2.17,
"grad_norm": 2.1295985694856765,
"learning_rate": 1.4783903174401086e-05,
"loss": 0.1685,
"step": 598
},
{
"epoch": 2.17,
"grad_norm": 2.874237145559384,
"learning_rate": 1.4766716067598263e-05,
"loss": 0.1352,
"step": 599
},
{
"epoch": 2.17,
"grad_norm": 2.242083664103047,
"learning_rate": 1.4749510720670506e-05,
"loss": 0.1374,
"step": 600
},
{
"epoch": 2.18,
"grad_norm": 2.3038584984777737,
"learning_rate": 1.4732287199455103e-05,
"loss": 0.1463,
"step": 601
},
{
"epoch": 2.18,
"grad_norm": 5.0262258343937045,
"learning_rate": 1.4715045569858895e-05,
"loss": 0.1328,
"step": 602
},
{
"epoch": 2.18,
"grad_norm": 4.043534512425941,
"learning_rate": 1.4697785897858013e-05,
"loss": 0.1754,
"step": 603
},
{
"epoch": 2.19,
"grad_norm": 2.4226711679753263,
"learning_rate": 1.4680508249497622e-05,
"loss": 0.2144,
"step": 604
},
{
"epoch": 2.19,
"grad_norm": 2.6075633120754405,
"learning_rate": 1.4663212690891681e-05,
"loss": 0.149,
"step": 605
},
{
"epoch": 2.2,
"grad_norm": 1.9483058032740208,
"learning_rate": 1.4645899288222686e-05,
"loss": 0.1277,
"step": 606
},
{
"epoch": 2.2,
"grad_norm": 1.8634381599518843,
"learning_rate": 1.4628568107741408e-05,
"loss": 0.1398,
"step": 607
},
{
"epoch": 2.2,
"grad_norm": 2.8575537132659474,
"learning_rate": 1.461121921576665e-05,
"loss": 0.1436,
"step": 608
},
{
"epoch": 2.21,
"grad_norm": 2.9620752891015423,
"learning_rate": 1.4593852678684986e-05,
"loss": 0.1411,
"step": 609
},
{
"epoch": 2.21,
"grad_norm": 1.838103433532202,
"learning_rate": 1.457646856295051e-05,
"loss": 0.1204,
"step": 610
},
{
"epoch": 2.21,
"grad_norm": 3.260278555454537,
"learning_rate": 1.455906693508459e-05,
"loss": 0.1339,
"step": 611
},
{
"epoch": 2.22,
"grad_norm": 1.7172095087010821,
"learning_rate": 1.4541647861675592e-05,
"loss": 0.115,
"step": 612
},
{
"epoch": 2.22,
"grad_norm": 2.991597648856854,
"learning_rate": 1.452421140937865e-05,
"loss": 0.1325,
"step": 613
},
{
"epoch": 2.22,
"grad_norm": 3.5948078337979164,
"learning_rate": 1.4506757644915393e-05,
"loss": 0.1799,
"step": 614
},
{
"epoch": 2.23,
"grad_norm": 3.1725595869845393,
"learning_rate": 1.4489286635073695e-05,
"loss": 0.2037,
"step": 615
},
{
"epoch": 2.23,
"grad_norm": 3.3633370205327395,
"learning_rate": 1.4471798446707426e-05,
"loss": 0.1377,
"step": 616
},
{
"epoch": 2.24,
"grad_norm": 3.6515647608687654,
"learning_rate": 1.4454293146736187e-05,
"loss": 0.1853,
"step": 617
},
{
"epoch": 2.24,
"grad_norm": 2.1163407494968527,
"learning_rate": 1.443677080214506e-05,
"loss": 0.107,
"step": 618
},
{
"epoch": 2.24,
"grad_norm": 2.8246831591572374,
"learning_rate": 1.441923147998434e-05,
"loss": 0.1621,
"step": 619
},
{
"epoch": 2.25,
"grad_norm": 2.9466425861832106,
"learning_rate": 1.4401675247369307e-05,
"loss": 0.1303,
"step": 620
},
{
"epoch": 2.25,
"grad_norm": 3.5393100208262855,
"learning_rate": 1.4384102171479932e-05,
"loss": 0.1164,
"step": 621
},
{
"epoch": 2.25,
"grad_norm": 2.9109710089013174,
"learning_rate": 1.4366512319560642e-05,
"loss": 0.1333,
"step": 622
},
{
"epoch": 2.26,
"grad_norm": 1.9752913219647925,
"learning_rate": 1.434890575892006e-05,
"loss": 0.1439,
"step": 623
},
{
"epoch": 2.26,
"grad_norm": 3.230634064111403,
"learning_rate": 1.4331282556930753e-05,
"loss": 0.1642,
"step": 624
},
{
"epoch": 2.26,
"grad_norm": 2.246924034268435,
"learning_rate": 1.4313642781028953e-05,
"loss": 0.1201,
"step": 625
},
{
"epoch": 2.27,
"grad_norm": 2.6322864446435443,
"learning_rate": 1.4295986498714326e-05,
"loss": 0.1001,
"step": 626
},
{
"epoch": 2.27,
"grad_norm": 2.8890631930316624,
"learning_rate": 1.427831377754969e-05,
"loss": 0.1478,
"step": 627
},
{
"epoch": 2.28,
"grad_norm": 2.83222544176522,
"learning_rate": 1.4260624685160778e-05,
"loss": 0.1113,
"step": 628
},
{
"epoch": 2.28,
"grad_norm": 3.281562685100209,
"learning_rate": 1.4242919289235958e-05,
"loss": 0.1365,
"step": 629
},
{
"epoch": 2.28,
"grad_norm": 2.5058391781451417,
"learning_rate": 1.4225197657525996e-05,
"loss": 0.1388,
"step": 630
},
{
"epoch": 2.29,
"grad_norm": 3.198653024717054,
"learning_rate": 1.4207459857843771e-05,
"loss": 0.0818,
"step": 631
},
{
"epoch": 2.29,
"grad_norm": 4.026932943074049,
"learning_rate": 1.4189705958064041e-05,
"loss": 0.1551,
"step": 632
},
{
"epoch": 2.29,
"grad_norm": 2.5337607362274954,
"learning_rate": 1.417193602612317e-05,
"loss": 0.1136,
"step": 633
},
{
"epoch": 2.3,
"grad_norm": 4.125149342194021,
"learning_rate": 1.4154150130018867e-05,
"loss": 0.1267,
"step": 634
},
{
"epoch": 2.3,
"grad_norm": 2.6807972057361718,
"learning_rate": 1.413634833780993e-05,
"loss": 0.1162,
"step": 635
},
{
"epoch": 2.3,
"grad_norm": 6.5015463609549915,
"learning_rate": 1.4118530717615982e-05,
"loss": 0.1712,
"step": 636
},
{
"epoch": 2.31,
"grad_norm": 4.433843930215299,
"learning_rate": 1.4100697337617218e-05,
"loss": 0.1483,
"step": 637
},
{
"epoch": 2.31,
"grad_norm": 3.023664438844803,
"learning_rate": 1.4082848266054136e-05,
"loss": 0.144,
"step": 638
},
{
"epoch": 2.32,
"grad_norm": 3.8997338181314816,
"learning_rate": 1.4064983571227282e-05,
"loss": 0.1578,
"step": 639
},
{
"epoch": 2.32,
"grad_norm": 5.196782343567986,
"learning_rate": 1.4047103321496977e-05,
"loss": 0.2304,
"step": 640
},
{
"epoch": 2.32,
"grad_norm": 6.966578918531625,
"learning_rate": 1.4029207585283072e-05,
"loss": 0.1701,
"step": 641
},
{
"epoch": 2.33,
"grad_norm": 5.437415435237373,
"learning_rate": 1.4011296431064675e-05,
"loss": 0.1554,
"step": 642
},
{
"epoch": 2.33,
"grad_norm": 3.5287376283141723,
"learning_rate": 1.3993369927379891e-05,
"loss": 0.14,
"step": 643
},
{
"epoch": 2.33,
"grad_norm": 2.634767923381933,
"learning_rate": 1.3975428142825562e-05,
"loss": 0.1685,
"step": 644
},
{
"epoch": 2.34,
"grad_norm": 3.8673725956552065,
"learning_rate": 1.3957471146057e-05,
"loss": 0.1594,
"step": 645
},
{
"epoch": 2.34,
"grad_norm": 2.0633317372185687,
"learning_rate": 1.3939499005787735e-05,
"loss": 0.1252,
"step": 646
},
{
"epoch": 2.34,
"grad_norm": 4.104068349515131,
"learning_rate": 1.3921511790789235e-05,
"loss": 0.1892,
"step": 647
},
{
"epoch": 2.35,
"grad_norm": 1.4003136635922295,
"learning_rate": 1.3903509569890663e-05,
"loss": 0.1454,
"step": 648
},
{
"epoch": 2.35,
"grad_norm": 3.0059666624586203,
"learning_rate": 1.388549241197859e-05,
"loss": 0.1944,
"step": 649
},
{
"epoch": 2.36,
"grad_norm": 1.7332874811258845,
"learning_rate": 1.3867460385996756e-05,
"loss": 0.1556,
"step": 650
},
{
"epoch": 2.36,
"grad_norm": 2.4249766244944784,
"learning_rate": 1.3849413560945788e-05,
"loss": 0.1684,
"step": 651
},
{
"epoch": 2.36,
"grad_norm": 2.7624132464699303,
"learning_rate": 1.3831352005882947e-05,
"loss": 0.1691,
"step": 652
},
{
"epoch": 2.37,
"grad_norm": 2.6462363499749473,
"learning_rate": 1.3813275789921856e-05,
"loss": 0.1531,
"step": 653
},
{
"epoch": 2.37,
"grad_norm": 1.9098892115932387,
"learning_rate": 1.3795184982232234e-05,
"loss": 0.1559,
"step": 654
},
{
"epoch": 2.37,
"grad_norm": 4.737845980438906,
"learning_rate": 1.3777079652039649e-05,
"loss": 0.1493,
"step": 655
},
{
"epoch": 2.38,
"grad_norm": 2.96373225232142,
"learning_rate": 1.3758959868625233e-05,
"loss": 0.1547,
"step": 656
},
{
"epoch": 2.38,
"grad_norm": 1.6573947051585216,
"learning_rate": 1.3740825701325417e-05,
"loss": 0.1407,
"step": 657
},
{
"epoch": 2.38,
"grad_norm": 1.2430731743842967,
"learning_rate": 1.3722677219531684e-05,
"loss": 0.1199,
"step": 658
},
{
"epoch": 2.39,
"grad_norm": 1.9306302053944655,
"learning_rate": 1.370451449269029e-05,
"loss": 0.1629,
"step": 659
},
{
"epoch": 2.39,
"grad_norm": 2.166845655575119,
"learning_rate": 1.3686337590301997e-05,
"loss": 0.1672,
"step": 660
},
{
"epoch": 2.39,
"grad_norm": 2.7289112859521425,
"learning_rate": 1.366814658192181e-05,
"loss": 0.1494,
"step": 661
},
{
"epoch": 2.4,
"grad_norm": 1.5117108734297187,
"learning_rate": 1.364994153715872e-05,
"loss": 0.1134,
"step": 662
},
{
"epoch": 2.4,
"grad_norm": 2.016864300473532,
"learning_rate": 1.3631722525675412e-05,
"loss": 0.1292,
"step": 663
},
{
"epoch": 2.41,
"grad_norm": 1.9808047084535987,
"learning_rate": 1.361348961718804e-05,
"loss": 0.1404,
"step": 664
},
{
"epoch": 2.41,
"grad_norm": 2.2189165012578695,
"learning_rate": 1.359524288146591e-05,
"loss": 0.1594,
"step": 665
},
{
"epoch": 2.41,
"grad_norm": 3.587067010599109,
"learning_rate": 1.3576982388331258e-05,
"loss": 0.1048,
"step": 666
},
{
"epoch": 2.42,
"grad_norm": 1.872852383025508,
"learning_rate": 1.3558708207658948e-05,
"loss": 0.1292,
"step": 667
},
{
"epoch": 2.42,
"grad_norm": 1.1099352315229647,
"learning_rate": 1.3540420409376237e-05,
"loss": 0.0937,
"step": 668
},
{
"epoch": 2.42,
"grad_norm": 2.072876069571932,
"learning_rate": 1.3522119063462482e-05,
"loss": 0.1318,
"step": 669
},
{
"epoch": 2.43,
"grad_norm": 2.680823510834941,
"learning_rate": 1.3503804239948874e-05,
"loss": 0.1397,
"step": 670
},
{
"epoch": 2.43,
"grad_norm": 2.8436852709115086,
"learning_rate": 1.3485476008918186e-05,
"loss": 0.1376,
"step": 671
},
{
"epoch": 2.43,
"grad_norm": 2.483810788507097,
"learning_rate": 1.3467134440504497e-05,
"loss": 0.2099,
"step": 672
},
{
"epoch": 2.44,
"grad_norm": 2.024202118173722,
"learning_rate": 1.3448779604892916e-05,
"loss": 0.1631,
"step": 673
},
{
"epoch": 2.44,
"grad_norm": 3.3874042816937453,
"learning_rate": 1.3430411572319323e-05,
"loss": 0.1549,
"step": 674
},
{
"epoch": 2.45,
"grad_norm": 2.0445973830792226,
"learning_rate": 1.3412030413070096e-05,
"loss": 0.1459,
"step": 675
},
{
"epoch": 2.45,
"grad_norm": 2.2567358837105926,
"learning_rate": 1.3393636197481842e-05,
"loss": 0.1509,
"step": 676
},
{
"epoch": 2.45,
"grad_norm": 2.2499068792437322,
"learning_rate": 1.3375228995941135e-05,
"loss": 0.1435,
"step": 677
},
{
"epoch": 2.46,
"grad_norm": 2.6864374390893127,
"learning_rate": 1.335680887888423e-05,
"loss": 0.1579,
"step": 678
},
{
"epoch": 2.46,
"grad_norm": 2.11068958107654,
"learning_rate": 1.3338375916796813e-05,
"loss": 0.1688,
"step": 679
},
{
"epoch": 2.46,
"grad_norm": 3.2643755262296548,
"learning_rate": 1.3319930180213713e-05,
"loss": 0.1757,
"step": 680
},
{
"epoch": 2.47,
"grad_norm": 2.1849128315834347,
"learning_rate": 1.3301471739718659e-05,
"loss": 0.0955,
"step": 681
},
{
"epoch": 2.47,
"grad_norm": 1.395893452706116,
"learning_rate": 1.3283000665943972e-05,
"loss": 0.1269,
"step": 682
},
{
"epoch": 2.47,
"grad_norm": 3.8435765299606652,
"learning_rate": 1.3264517029570325e-05,
"loss": 0.1759,
"step": 683
},
{
"epoch": 2.48,
"grad_norm": 1.344144583853651,
"learning_rate": 1.3246020901326465e-05,
"loss": 0.1203,
"step": 684
},
{
"epoch": 2.48,
"grad_norm": 3.18453194642565,
"learning_rate": 1.3227512351988927e-05,
"loss": 0.1507,
"step": 685
},
{
"epoch": 2.49,
"grad_norm": 2.273708225796424,
"learning_rate": 1.3208991452381798e-05,
"loss": 0.1451,
"step": 686
},
{
"epoch": 2.49,
"grad_norm": 2.4991174581863214,
"learning_rate": 1.3190458273376403e-05,
"loss": 0.1629,
"step": 687
},
{
"epoch": 2.49,
"grad_norm": 2.2252484353713458,
"learning_rate": 1.3171912885891063e-05,
"loss": 0.1625,
"step": 688
},
{
"epoch": 2.5,
"grad_norm": 1.736884623960957,
"learning_rate": 1.3153355360890815e-05,
"loss": 0.1282,
"step": 689
},
{
"epoch": 2.5,
"grad_norm": 2.15785290638828,
"learning_rate": 1.3134785769387147e-05,
"loss": 0.1391,
"step": 690
},
{
"epoch": 2.5,
"grad_norm": 2.9811024522964154,
"learning_rate": 1.3116204182437711e-05,
"loss": 0.142,
"step": 691
},
{
"epoch": 2.51,
"grad_norm": 3.662387340221045,
"learning_rate": 1.3097610671146063e-05,
"loss": 0.1779,
"step": 692
},
{
"epoch": 2.51,
"grad_norm": 3.3860466495572146,
"learning_rate": 1.307900530666139e-05,
"loss": 0.1509,
"step": 693
},
{
"epoch": 2.51,
"grad_norm": 2.28060851336785,
"learning_rate": 1.3060388160178237e-05,
"loss": 0.149,
"step": 694
},
{
"epoch": 2.52,
"grad_norm": 1.8513128405544685,
"learning_rate": 1.304175930293623e-05,
"loss": 0.1709,
"step": 695
},
{
"epoch": 2.52,
"grad_norm": 1.4462263633502537,
"learning_rate": 1.302311880621981e-05,
"loss": 0.1194,
"step": 696
},
{
"epoch": 2.53,
"grad_norm": 1.9548570447274782,
"learning_rate": 1.3004466741357953e-05,
"loss": 0.1237,
"step": 697
},
{
"epoch": 2.53,
"grad_norm": 1.4806136831831311,
"learning_rate": 1.2985803179723903e-05,
"loss": 0.1269,
"step": 698
},
{
"epoch": 2.53,
"grad_norm": 2.9715702991593127,
"learning_rate": 1.2967128192734903e-05,
"loss": 0.1349,
"step": 699
},
{
"epoch": 2.54,
"grad_norm": 3.466274211369672,
"learning_rate": 1.294844185185191e-05,
"loss": 0.1688,
"step": 700
},
{
"epoch": 2.54,
"grad_norm": 1.7553058535020245,
"learning_rate": 1.2929744228579325e-05,
"loss": 0.1097,
"step": 701
},
{
"epoch": 2.54,
"grad_norm": 3.0216851671552094,
"learning_rate": 1.2911035394464724e-05,
"loss": 0.1119,
"step": 702
},
{
"epoch": 2.55,
"grad_norm": 2.0733992908118593,
"learning_rate": 1.2892315421098587e-05,
"loss": 0.1498,
"step": 703
},
{
"epoch": 2.55,
"grad_norm": 1.9082569188886227,
"learning_rate": 1.2873584380114012e-05,
"loss": 0.1466,
"step": 704
},
{
"epoch": 2.55,
"grad_norm": 2.1693782252874465,
"learning_rate": 1.2854842343186455e-05,
"loss": 0.1188,
"step": 705
},
{
"epoch": 2.56,
"grad_norm": 1.8457394355525738,
"learning_rate": 1.283608938203344e-05,
"loss": 0.0983,
"step": 706
},
{
"epoch": 2.56,
"grad_norm": 3.9391659305171975,
"learning_rate": 1.2817325568414299e-05,
"loss": 0.1319,
"step": 707
},
{
"epoch": 2.57,
"grad_norm": 3.9424785468417647,
"learning_rate": 1.2798550974129888e-05,
"loss": 0.1374,
"step": 708
},
{
"epoch": 2.57,
"grad_norm": 2.620705577547139,
"learning_rate": 1.2779765671022327e-05,
"loss": 0.1311,
"step": 709
},
{
"epoch": 2.57,
"grad_norm": 2.7842863143216023,
"learning_rate": 1.2760969730974692e-05,
"loss": 0.1369,
"step": 710
},
{
"epoch": 2.58,
"grad_norm": 3.4504806028599027,
"learning_rate": 1.274216322591078e-05,
"loss": 0.1343,
"step": 711
},
{
"epoch": 2.58,
"grad_norm": 2.548126380604583,
"learning_rate": 1.2723346227794817e-05,
"loss": 0.1321,
"step": 712
},
{
"epoch": 2.58,
"grad_norm": 4.370878901486762,
"learning_rate": 1.2704518808631168e-05,
"loss": 0.1326,
"step": 713
},
{
"epoch": 2.59,
"grad_norm": 3.774946256872503,
"learning_rate": 1.2685681040464081e-05,
"loss": 0.1352,
"step": 714
},
{
"epoch": 2.59,
"grad_norm": 3.1517657140996853,
"learning_rate": 1.2666832995377412e-05,
"loss": 0.1001,
"step": 715
},
{
"epoch": 2.59,
"grad_norm": 3.0860083371990448,
"learning_rate": 1.264797474549433e-05,
"loss": 0.1631,
"step": 716
},
{
"epoch": 2.6,
"grad_norm": 1.9332984003257843,
"learning_rate": 1.2629106362977064e-05,
"loss": 0.1014,
"step": 717
},
{
"epoch": 2.6,
"grad_norm": 3.1029076169572742,
"learning_rate": 1.2610227920026608e-05,
"loss": 0.1436,
"step": 718
},
{
"epoch": 2.61,
"grad_norm": 3.7906759875527136,
"learning_rate": 1.2591339488882458e-05,
"loss": 0.1615,
"step": 719
},
{
"epoch": 2.61,
"grad_norm": 4.934283918080738,
"learning_rate": 1.2572441141822322e-05,
"loss": 0.136,
"step": 720
},
{
"epoch": 2.61,
"grad_norm": 5.096116343618296,
"learning_rate": 1.255353295116187e-05,
"loss": 0.1435,
"step": 721
},
{
"epoch": 2.62,
"grad_norm": 4.146936550181582,
"learning_rate": 1.2534614989254423e-05,
"loss": 0.2009,
"step": 722
},
{
"epoch": 2.62,
"grad_norm": 4.085178260856516,
"learning_rate": 1.2515687328490696e-05,
"loss": 0.1425,
"step": 723
},
{
"epoch": 2.62,
"grad_norm": 2.4774821694630833,
"learning_rate": 1.2496750041298515e-05,
"loss": 0.1202,
"step": 724
},
{
"epoch": 2.63,
"grad_norm": 4.005381342341569,
"learning_rate": 1.2477803200142551e-05,
"loss": 0.1252,
"step": 725
},
{
"epoch": 2.63,
"grad_norm": 3.117326777003886,
"learning_rate": 1.2458846877524025e-05,
"loss": 0.1052,
"step": 726
},
{
"epoch": 2.63,
"grad_norm": 5.011961221758716,
"learning_rate": 1.2439881145980444e-05,
"loss": 0.1592,
"step": 727
},
{
"epoch": 2.64,
"grad_norm": 6.402429747161904,
"learning_rate": 1.2420906078085316e-05,
"loss": 0.1506,
"step": 728
},
{
"epoch": 2.64,
"grad_norm": 5.911505976531602,
"learning_rate": 1.2401921746447874e-05,
"loss": 0.1356,
"step": 729
},
{
"epoch": 2.64,
"grad_norm": 4.085941837226761,
"learning_rate": 1.2382928223712807e-05,
"loss": 0.1561,
"step": 730
},
{
"epoch": 2.65,
"grad_norm": 1.8930462870586915,
"learning_rate": 1.2363925582559968e-05,
"loss": 0.1513,
"step": 731
},
{
"epoch": 2.65,
"grad_norm": 5.765133151777372,
"learning_rate": 1.2344913895704099e-05,
"loss": 0.149,
"step": 732
},
{
"epoch": 2.66,
"grad_norm": 5.964131756390052,
"learning_rate": 1.2325893235894563e-05,
"loss": 0.1371,
"step": 733
},
{
"epoch": 2.66,
"grad_norm": 5.656003687233519,
"learning_rate": 1.2306863675915058e-05,
"loss": 0.1365,
"step": 734
},
{
"epoch": 2.66,
"grad_norm": 3.126958801018975,
"learning_rate": 1.228782528858333e-05,
"loss": 0.1202,
"step": 735
},
{
"epoch": 2.67,
"grad_norm": 2.4984355268123632,
"learning_rate": 1.2268778146750914e-05,
"loss": 0.1414,
"step": 736
},
{
"epoch": 2.67,
"grad_norm": 3.6962151946346475,
"learning_rate": 1.2249722323302841e-05,
"loss": 0.1453,
"step": 737
},
{
"epoch": 2.67,
"grad_norm": 3.313461638240804,
"learning_rate": 1.2230657891157365e-05,
"loss": 0.1123,
"step": 738
},
{
"epoch": 2.68,
"grad_norm": 4.601078169226705,
"learning_rate": 1.2211584923265673e-05,
"loss": 0.1497,
"step": 739
},
{
"epoch": 2.68,
"grad_norm": 3.7073417285450403,
"learning_rate": 1.2192503492611625e-05,
"loss": 0.1199,
"step": 740
},
{
"epoch": 2.68,
"grad_norm": 3.1824208325942913,
"learning_rate": 1.2173413672211457e-05,
"loss": 0.1697,
"step": 741
},
{
"epoch": 2.69,
"grad_norm": 2.471535241485352,
"learning_rate": 1.2154315535113513e-05,
"loss": 0.1334,
"step": 742
},
{
"epoch": 2.69,
"grad_norm": 3.4155726172820446,
"learning_rate": 1.2135209154397962e-05,
"loss": 0.1278,
"step": 743
},
{
"epoch": 2.7,
"grad_norm": 3.010032705672301,
"learning_rate": 1.2116094603176513e-05,
"loss": 0.1412,
"step": 744
},
{
"epoch": 2.7,
"grad_norm": 3.0906837228871273,
"learning_rate": 1.2096971954592146e-05,
"loss": 0.1441,
"step": 745
},
{
"epoch": 2.7,
"grad_norm": 2.9486512312912088,
"learning_rate": 1.2077841281818816e-05,
"loss": 0.1516,
"step": 746
},
{
"epoch": 2.71,
"grad_norm": 2.266804011074804,
"learning_rate": 1.2058702658061197e-05,
"loss": 0.107,
"step": 747
},
{
"epoch": 2.71,
"grad_norm": 2.5883749931630056,
"learning_rate": 1.203955615655438e-05,
"loss": 0.1841,
"step": 748
},
{
"epoch": 2.71,
"grad_norm": 2.4797481006413724,
"learning_rate": 1.2020401850563596e-05,
"loss": 0.1044,
"step": 749
},
{
"epoch": 2.72,
"grad_norm": 2.4908442077053508,
"learning_rate": 1.2001239813383951e-05,
"loss": 0.1816,
"step": 750
},
{
"epoch": 2.72,
"grad_norm": 1.5916571109906084,
"learning_rate": 1.1982070118340128e-05,
"loss": 0.1458,
"step": 751
},
{
"epoch": 2.72,
"grad_norm": 2.8372465633779353,
"learning_rate": 1.1962892838786116e-05,
"loss": 0.1146,
"step": 752
},
{
"epoch": 2.73,
"grad_norm": 2.6671483653198065,
"learning_rate": 1.1943708048104923e-05,
"loss": 0.1137,
"step": 753
},
{
"epoch": 2.73,
"grad_norm": 4.055069878690088,
"learning_rate": 1.19245158197083e-05,
"loss": 0.1104,
"step": 754
},
{
"epoch": 2.74,
"grad_norm": 2.486327947054191,
"learning_rate": 1.1905316227036465e-05,
"loss": 0.1429,
"step": 755
},
{
"epoch": 2.74,
"grad_norm": 3.2617966124439186,
"learning_rate": 1.1886109343557808e-05,
"loss": 0.1379,
"step": 756
},
{
"epoch": 2.74,
"grad_norm": 1.9313464650423409,
"learning_rate": 1.186689524276862e-05,
"loss": 0.1198,
"step": 757
},
{
"epoch": 2.75,
"grad_norm": 3.729544641553348,
"learning_rate": 1.1847673998192815e-05,
"loss": 0.1379,
"step": 758
},
{
"epoch": 2.75,
"grad_norm": 3.7821147579366157,
"learning_rate": 1.1828445683381628e-05,
"loss": 0.1413,
"step": 759
},
{
"epoch": 2.75,
"grad_norm": 3.2212472895092787,
"learning_rate": 1.180921037191337e-05,
"loss": 0.1128,
"step": 760
},
{
"epoch": 2.76,
"grad_norm": 3.8905217179467093,
"learning_rate": 1.1789968137393108e-05,
"loss": 0.1412,
"step": 761
},
{
"epoch": 2.76,
"grad_norm": 2.84781205596861,
"learning_rate": 1.1770719053452408e-05,
"loss": 0.135,
"step": 762
},
{
"epoch": 2.76,
"grad_norm": 2.4542496505430895,
"learning_rate": 1.1751463193749044e-05,
"loss": 0.1683,
"step": 763
},
{
"epoch": 2.77,
"grad_norm": 2.879779310063717,
"learning_rate": 1.1732200631966717e-05,
"loss": 0.1012,
"step": 764
},
{
"epoch": 2.77,
"grad_norm": 2.879306798231344,
"learning_rate": 1.1712931441814776e-05,
"loss": 0.1282,
"step": 765
},
{
"epoch": 2.78,
"grad_norm": 2.836413783186633,
"learning_rate": 1.1693655697027935e-05,
"loss": 0.09,
"step": 766
},
{
"epoch": 2.78,
"grad_norm": 3.436793644315394,
"learning_rate": 1.1674373471365987e-05,
"loss": 0.0996,
"step": 767
},
{
"epoch": 2.78,
"grad_norm": 1.9949362166058173,
"learning_rate": 1.165508483861352e-05,
"loss": 0.0854,
"step": 768
},
{
"epoch": 2.79,
"grad_norm": 5.489936954970715,
"learning_rate": 1.1635789872579649e-05,
"loss": 0.1484,
"step": 769
},
{
"epoch": 2.79,
"grad_norm": 6.2559843796412595,
"learning_rate": 1.1616488647097718e-05,
"loss": 0.1283,
"step": 770
},
{
"epoch": 2.79,
"grad_norm": 3.2854828105346408,
"learning_rate": 1.1597181236025024e-05,
"loss": 0.1118,
"step": 771
},
{
"epoch": 2.8,
"grad_norm": 2.1551228922320806,
"learning_rate": 1.1577867713242532e-05,
"loss": 0.1048,
"step": 772
},
{
"epoch": 2.8,
"grad_norm": 3.1898874907443555,
"learning_rate": 1.1558548152654596e-05,
"loss": 0.0907,
"step": 773
},
{
"epoch": 2.8,
"grad_norm": 2.5624001707919226,
"learning_rate": 1.1539222628188675e-05,
"loss": 0.1064,
"step": 774
},
{
"epoch": 2.81,
"grad_norm": 3.449986468785642,
"learning_rate": 1.151989121379505e-05,
"loss": 0.1341,
"step": 775
},
{
"epoch": 2.81,
"grad_norm": 4.025583340892815,
"learning_rate": 1.1500553983446527e-05,
"loss": 0.1442,
"step": 776
},
{
"epoch": 2.82,
"grad_norm": 4.1909284054957805,
"learning_rate": 1.148121101113819e-05,
"loss": 0.1241,
"step": 777
},
{
"epoch": 2.82,
"grad_norm": 3.8649243191633613,
"learning_rate": 1.1461862370887076e-05,
"loss": 0.141,
"step": 778
},
{
"epoch": 2.82,
"grad_norm": 6.6164700789226325,
"learning_rate": 1.1442508136731919e-05,
"loss": 0.1329,
"step": 779
},
{
"epoch": 2.83,
"grad_norm": 2.266029845209213,
"learning_rate": 1.1423148382732854e-05,
"loss": 0.1081,
"step": 780
},
{
"epoch": 2.83,
"grad_norm": 3.3044700369516824,
"learning_rate": 1.1403783182971144e-05,
"loss": 0.0904,
"step": 781
},
{
"epoch": 2.83,
"grad_norm": 2.88988382240665,
"learning_rate": 1.1384412611548887e-05,
"loss": 0.1107,
"step": 782
},
{
"epoch": 2.84,
"grad_norm": 5.240950603595819,
"learning_rate": 1.136503674258874e-05,
"loss": 0.1299,
"step": 783
},
{
"epoch": 2.84,
"grad_norm": 5.643337451467647,
"learning_rate": 1.134565565023362e-05,
"loss": 0.1525,
"step": 784
},
{
"epoch": 2.84,
"grad_norm": 5.3873764814070455,
"learning_rate": 1.1326269408646445e-05,
"loss": 0.1751,
"step": 785
},
{
"epoch": 2.85,
"grad_norm": 1.968627074400663,
"learning_rate": 1.1306878092009828e-05,
"loss": 0.0931,
"step": 786
},
{
"epoch": 2.85,
"grad_norm": 6.546444780384288,
"learning_rate": 1.128748177452581e-05,
"loss": 0.1543,
"step": 787
},
{
"epoch": 2.86,
"grad_norm": 3.173360357653791,
"learning_rate": 1.1268080530415557e-05,
"loss": 0.1093,
"step": 788
},
{
"epoch": 2.86,
"grad_norm": 1.7129676786561316,
"learning_rate": 1.1248674433919097e-05,
"loss": 0.1051,
"step": 789
},
{
"epoch": 2.86,
"grad_norm": 5.005043159410687,
"learning_rate": 1.122926355929502e-05,
"loss": 0.1245,
"step": 790
},
{
"epoch": 2.87,
"grad_norm": 3.616282965265042,
"learning_rate": 1.1209847980820208e-05,
"loss": 0.1571,
"step": 791
},
{
"epoch": 2.87,
"grad_norm": 6.867872868581158,
"learning_rate": 1.119042777278953e-05,
"loss": 0.1368,
"step": 792
},
{
"epoch": 2.87,
"grad_norm": 3.9446600508604286,
"learning_rate": 1.1171003009515577e-05,
"loss": 0.1509,
"step": 793
},
{
"epoch": 2.88,
"grad_norm": 3.0200121905654864,
"learning_rate": 1.1151573765328374e-05,
"loss": 0.1545,
"step": 794
},
{
"epoch": 2.88,
"grad_norm": 2.588590429607821,
"learning_rate": 1.1132140114575085e-05,
"loss": 0.0959,
"step": 795
},
{
"epoch": 2.88,
"grad_norm": 3.31566385103503,
"learning_rate": 1.1112702131619747e-05,
"loss": 0.1096,
"step": 796
},
{
"epoch": 2.89,
"grad_norm": 2.8316523924308608,
"learning_rate": 1.1093259890842962e-05,
"loss": 0.153,
"step": 797
},
{
"epoch": 2.89,
"grad_norm": 2.98969306150191,
"learning_rate": 1.1073813466641633e-05,
"loss": 0.1479,
"step": 798
},
{
"epoch": 2.89,
"grad_norm": 3.7372446047393515,
"learning_rate": 1.1054362933428666e-05,
"loss": 0.1468,
"step": 799
},
{
"epoch": 2.9,
"grad_norm": 6.5695166197444905,
"learning_rate": 1.1034908365632695e-05,
"loss": 0.2094,
"step": 800
},
{
"epoch": 2.9,
"grad_norm": 6.383190307410641,
"learning_rate": 1.101544983769779e-05,
"loss": 0.1734,
"step": 801
},
{
"epoch": 2.91,
"grad_norm": 4.9063394108919445,
"learning_rate": 1.0995987424083178e-05,
"loss": 0.1627,
"step": 802
},
{
"epoch": 2.91,
"grad_norm": 2.657197763917094,
"learning_rate": 1.0976521199262945e-05,
"loss": 0.1815,
"step": 803
},
{
"epoch": 2.91,
"grad_norm": 2.2684003765940095,
"learning_rate": 1.0957051237725775e-05,
"loss": 0.1371,
"step": 804
},
{
"epoch": 2.92,
"grad_norm": 3.2154914226854294,
"learning_rate": 1.0937577613974641e-05,
"loss": 0.1858,
"step": 805
},
{
"epoch": 2.92,
"grad_norm": 1.7328927887784134,
"learning_rate": 1.0918100402526533e-05,
"loss": 0.1304,
"step": 806
},
{
"epoch": 2.92,
"grad_norm": 3.5938542193737084,
"learning_rate": 1.0898619677912165e-05,
"loss": 0.13,
"step": 807
},
{
"epoch": 2.93,
"grad_norm": 3.4966470407927943,
"learning_rate": 1.0879135514675706e-05,
"loss": 0.1625,
"step": 808
},
{
"epoch": 2.93,
"grad_norm": 3.7617991298221796,
"learning_rate": 1.0859647987374467e-05,
"loss": 0.1358,
"step": 809
},
{
"epoch": 2.93,
"grad_norm": 1.741516706211674,
"learning_rate": 1.0840157170578645e-05,
"loss": 0.1189,
"step": 810
},
{
"epoch": 2.94,
"grad_norm": 2.5475441861052572,
"learning_rate": 1.0820663138871018e-05,
"loss": 0.1573,
"step": 811
},
{
"epoch": 2.94,
"grad_norm": 2.4128398950709427,
"learning_rate": 1.0801165966846662e-05,
"loss": 0.1067,
"step": 812
},
{
"epoch": 2.95,
"grad_norm": 2.5320429584826663,
"learning_rate": 1.0781665729112688e-05,
"loss": 0.1459,
"step": 813
},
{
"epoch": 2.95,
"grad_norm": 1.493873281090723,
"learning_rate": 1.0762162500287916e-05,
"loss": 0.1191,
"step": 814
},
{
"epoch": 2.95,
"grad_norm": 3.952044884083061,
"learning_rate": 1.0742656355002622e-05,
"loss": 0.1048,
"step": 815
},
{
"epoch": 2.96,
"grad_norm": 2.5201057122988098,
"learning_rate": 1.0723147367898243e-05,
"loss": 0.1158,
"step": 816
},
{
"epoch": 2.96,
"grad_norm": 2.090876232653966,
"learning_rate": 1.0703635613627084e-05,
"loss": 0.1115,
"step": 817
},
{
"epoch": 2.96,
"grad_norm": 3.7294381501440776,
"learning_rate": 1.068412116685205e-05,
"loss": 0.1681,
"step": 818
},
{
"epoch": 2.97,
"grad_norm": 3.844594828018935,
"learning_rate": 1.0664604102246337e-05,
"loss": 0.1511,
"step": 819
},
{
"epoch": 2.97,
"grad_norm": 2.043390310904499,
"learning_rate": 1.0645084494493166e-05,
"loss": 0.125,
"step": 820
},
{
"epoch": 2.97,
"grad_norm": 1.6488372225760455,
"learning_rate": 1.0625562418285483e-05,
"loss": 0.0856,
"step": 821
},
{
"epoch": 2.98,
"grad_norm": 2.204496057082104,
"learning_rate": 1.0606037948325686e-05,
"loss": 0.1177,
"step": 822
},
{
"epoch": 2.98,
"grad_norm": 3.018303589734971,
"learning_rate": 1.0586511159325332e-05,
"loss": 0.1294,
"step": 823
},
{
"epoch": 2.99,
"grad_norm": 3.9483671709663186,
"learning_rate": 1.0566982126004848e-05,
"loss": 0.1558,
"step": 824
},
{
"epoch": 2.99,
"grad_norm": 4.049023207699091,
"learning_rate": 1.0547450923093248e-05,
"loss": 0.1277,
"step": 825
},
{
"epoch": 2.99,
"grad_norm": 2.6010779766826935,
"learning_rate": 1.052791762532786e-05,
"loss": 0.1483,
"step": 826
},
{
"epoch": 3.0,
"grad_norm": 3.2969099287950385,
"learning_rate": 1.0508382307454013e-05,
"loss": 0.1291,
"step": 827
},
{
"epoch": 3.0,
"grad_norm": 7.521016410890929,
"learning_rate": 1.0488845044224774e-05,
"loss": 0.1446,
"step": 828
},
{
"epoch": 3.0,
"grad_norm": 2.7746271335369643,
"learning_rate": 1.0469305910400651e-05,
"loss": 0.1152,
"step": 829
},
{
"epoch": 3.01,
"grad_norm": 2.503472911411605,
"learning_rate": 1.0449764980749317e-05,
"loss": 0.1209,
"step": 830
},
{
"epoch": 3.01,
"grad_norm": 4.989852384432169,
"learning_rate": 1.0430222330045306e-05,
"loss": 0.1118,
"step": 831
},
{
"epoch": 3.01,
"grad_norm": 5.101365842693291,
"learning_rate": 1.0410678033069745e-05,
"loss": 0.1537,
"step": 832
},
{
"epoch": 3.02,
"grad_norm": 2.1016500861413885,
"learning_rate": 1.039113216461006e-05,
"loss": 0.0934,
"step": 833
},
{
"epoch": 3.02,
"grad_norm": 3.769379114344539,
"learning_rate": 1.0371584799459684e-05,
"loss": 0.1566,
"step": 834
},
{
"epoch": 3.03,
"grad_norm": 2.422947810670274,
"learning_rate": 1.0352036012417788e-05,
"loss": 0.1568,
"step": 835
},
{
"epoch": 3.03,
"grad_norm": 2.627030152793082,
"learning_rate": 1.0332485878288977e-05,
"loss": 0.1504,
"step": 836
},
{
"epoch": 3.03,
"grad_norm": 2.0242132903425296,
"learning_rate": 1.0312934471883007e-05,
"loss": 0.1321,
"step": 837
},
{
"epoch": 3.04,
"grad_norm": 2.706534950512403,
"learning_rate": 1.029338186801451e-05,
"loss": 0.1275,
"step": 838
},
{
"epoch": 3.04,
"grad_norm": 2.480492841544911,
"learning_rate": 1.0273828141502701e-05,
"loss": 0.1446,
"step": 839
},
{
"epoch": 3.04,
"grad_norm": 2.796571352376509,
"learning_rate": 1.0254273367171085e-05,
"loss": 0.1145,
"step": 840
},
{
"epoch": 3.05,
"grad_norm": 2.4558515055056893,
"learning_rate": 1.0234717619847181e-05,
"loss": 0.1289,
"step": 841
},
{
"epoch": 3.05,
"grad_norm": 3.0435993390412546,
"learning_rate": 1.0215160974362224e-05,
"loss": 0.1194,
"step": 842
},
{
"epoch": 3.05,
"grad_norm": 2.0750968956245606,
"learning_rate": 1.0195603505550892e-05,
"loss": 0.137,
"step": 843
},
{
"epoch": 3.06,
"grad_norm": 2.251552421329542,
"learning_rate": 1.0176045288251014e-05,
"loss": 0.0879,
"step": 844
},
{
"epoch": 3.06,
"grad_norm": 2.7520051243350743,
"learning_rate": 1.0156486397303284e-05,
"loss": 0.1469,
"step": 845
},
{
"epoch": 3.07,
"grad_norm": 1.5364115076968212,
"learning_rate": 1.0136926907550968e-05,
"loss": 0.0941,
"step": 846
},
{
"epoch": 3.07,
"grad_norm": 2.9525892000929086,
"learning_rate": 1.0117366893839626e-05,
"loss": 0.1302,
"step": 847
},
{
"epoch": 3.07,
"grad_norm": 2.8403962261072326,
"learning_rate": 1.0097806431016825e-05,
"loss": 0.1463,
"step": 848
},
{
"epoch": 3.08,
"grad_norm": 2.9328411197586712,
"learning_rate": 1.0078245593931853e-05,
"loss": 0.1399,
"step": 849
},
{
"epoch": 3.08,
"grad_norm": 2.1509439724109476,
"learning_rate": 1.0058684457435419e-05,
"loss": 0.1191,
"step": 850
},
{
"epoch": 3.08,
"grad_norm": 4.226341686273808,
"learning_rate": 1.0039123096379388e-05,
"loss": 0.1727,
"step": 851
},
{
"epoch": 3.09,
"grad_norm": 2.5346322318450034,
"learning_rate": 1.0019561585616485e-05,
"loss": 0.1087,
"step": 852
},
{
"epoch": 3.09,
"grad_norm": 2.285971644224022,
"learning_rate": 1e-05,
"loss": 0.1271,
"step": 853
},
{
"epoch": 3.09,
"grad_norm": 3.762171370216196,
"learning_rate": 9.980438414383518e-06,
"loss": 0.1438,
"step": 854
},
{
"epoch": 3.1,
"grad_norm": 2.592486428756341,
"learning_rate": 9.960876903620614e-06,
"loss": 0.1404,
"step": 855
},
{
"epoch": 3.1,
"grad_norm": 1.7624009252093233,
"learning_rate": 9.941315542564583e-06,
"loss": 0.1122,
"step": 856
},
{
"epoch": 3.11,
"grad_norm": 1.9192938052793418,
"learning_rate": 9.92175440606815e-06,
"loss": 0.1297,
"step": 857
},
{
"epoch": 3.11,
"grad_norm": 2.425000066412495,
"learning_rate": 9.902193568983177e-06,
"loss": 0.1192,
"step": 858
},
{
"epoch": 3.11,
"grad_norm": 3.5557215609232777,
"learning_rate": 9.882633106160377e-06,
"loss": 0.1262,
"step": 859
},
{
"epoch": 3.12,
"grad_norm": 1.6285117231596429,
"learning_rate": 9.863073092449033e-06,
"loss": 0.0993,
"step": 860
},
{
"epoch": 3.12,
"grad_norm": 2.9836089506100527,
"learning_rate": 9.843513602696717e-06,
"loss": 0.0971,
"step": 861
},
{
"epoch": 3.12,
"grad_norm": 3.4998856846183584,
"learning_rate": 9.823954711748987e-06,
"loss": 0.1138,
"step": 862
},
{
"epoch": 3.13,
"grad_norm": 2.6057450722798823,
"learning_rate": 9.80439649444911e-06,
"loss": 0.159,
"step": 863
},
{
"epoch": 3.13,
"grad_norm": 2.7988449670156896,
"learning_rate": 9.78483902563778e-06,
"loss": 0.1232,
"step": 864
},
{
"epoch": 3.13,
"grad_norm": 2.159254429419325,
"learning_rate": 9.76528238015282e-06,
"loss": 0.1404,
"step": 865
},
{
"epoch": 3.14,
"grad_norm": 2.72441087365472,
"learning_rate": 9.745726632828913e-06,
"loss": 0.1157,
"step": 866
},
{
"epoch": 3.14,
"grad_norm": 5.251457256016179,
"learning_rate": 9.726171858497297e-06,
"loss": 0.1316,
"step": 867
},
{
"epoch": 3.14,
"grad_norm": 3.358081729074561,
"learning_rate": 9.706618131985489e-06,
"loss": 0.1528,
"step": 868
},
{
"epoch": 3.15,
"grad_norm": 3.739333861254032,
"learning_rate": 9.687065528116996e-06,
"loss": 0.1427,
"step": 869
},
{
"epoch": 3.15,
"grad_norm": 3.3341234083630154,
"learning_rate": 9.667514121711025e-06,
"loss": 0.1434,
"step": 870
},
{
"epoch": 3.16,
"grad_norm": 3.9910243031387322,
"learning_rate": 9.647963987582212e-06,
"loss": 0.1734,
"step": 871
},
{
"epoch": 3.16,
"grad_norm": 2.4751044620978386,
"learning_rate": 9.628415200540317e-06,
"loss": 0.1638,
"step": 872
},
{
"epoch": 3.16,
"grad_norm": 3.2151625865660622,
"learning_rate": 9.608867835389943e-06,
"loss": 0.0991,
"step": 873
},
{
"epoch": 3.17,
"grad_norm": 2.050505065138004,
"learning_rate": 9.589321966930255e-06,
"loss": 0.1425,
"step": 874
},
{
"epoch": 3.17,
"grad_norm": 5.714400327262755,
"learning_rate": 9.569777669954694e-06,
"loss": 0.1296,
"step": 875
},
{
"epoch": 3.17,
"grad_norm": 4.068545396635205,
"learning_rate": 9.550235019250688e-06,
"loss": 0.1246,
"step": 876
},
{
"epoch": 3.18,
"grad_norm": 4.985889593405149,
"learning_rate": 9.530694089599352e-06,
"loss": 0.1523,
"step": 877
},
{
"epoch": 3.18,
"grad_norm": 1.6430696320161076,
"learning_rate": 9.51115495577523e-06,
"loss": 0.1001,
"step": 878
},
{
"epoch": 3.18,
"grad_norm": 1.954482721811444,
"learning_rate": 9.491617692545992e-06,
"loss": 0.1406,
"step": 879
},
{
"epoch": 3.19,
"grad_norm": 2.7044752892563295,
"learning_rate": 9.472082374672145e-06,
"loss": 0.1586,
"step": 880
},
{
"epoch": 3.19,
"grad_norm": 2.231076930402335,
"learning_rate": 9.452549076906755e-06,
"loss": 0.1238,
"step": 881
},
{
"epoch": 3.2,
"grad_norm": 1.9953863011580084,
"learning_rate": 9.433017873995159e-06,
"loss": 0.1152,
"step": 882
},
{
"epoch": 3.2,
"grad_norm": 3.19721787254886,
"learning_rate": 9.413488840674674e-06,
"loss": 0.1232,
"step": 883
},
{
"epoch": 3.2,
"grad_norm": 2.556877803087002,
"learning_rate": 9.393962051674319e-06,
"loss": 0.1219,
"step": 884
},
{
"epoch": 3.21,
"grad_norm": 3.087032793077124,
"learning_rate": 9.374437581714523e-06,
"loss": 0.121,
"step": 885
},
{
"epoch": 3.21,
"grad_norm": 4.557098944942871,
"learning_rate": 9.354915505506839e-06,
"loss": 0.1115,
"step": 886
},
{
"epoch": 3.21,
"grad_norm": 4.378434203901308,
"learning_rate": 9.335395897753666e-06,
"loss": 0.1437,
"step": 887
},
{
"epoch": 3.22,
"grad_norm": 4.066606657793286,
"learning_rate": 9.315878833147953e-06,
"loss": 0.1152,
"step": 888
},
{
"epoch": 3.22,
"grad_norm": 2.3245834907089438,
"learning_rate": 9.296364386372918e-06,
"loss": 0.1084,
"step": 889
},
{
"epoch": 3.22,
"grad_norm": 3.170010357615001,
"learning_rate": 9.27685263210176e-06,
"loss": 0.148,
"step": 890
},
{
"epoch": 3.23,
"grad_norm": 2.9743249642039773,
"learning_rate": 9.257343644997381e-06,
"loss": 0.1657,
"step": 891
},
{
"epoch": 3.23,
"grad_norm": 5.4428836550892346,
"learning_rate": 9.237837499712088e-06,
"loss": 0.1298,
"step": 892
},
{
"epoch": 3.24,
"grad_norm": 3.1902337963099145,
"learning_rate": 9.218334270887316e-06,
"loss": 0.1243,
"step": 893
},
{
"epoch": 3.24,
"grad_norm": 1.6894082283835923,
"learning_rate": 9.19883403315334e-06,
"loss": 0.0869,
"step": 894
},
{
"epoch": 3.24,
"grad_norm": 3.9967215576278963,
"learning_rate": 9.179336861128987e-06,
"loss": 0.1099,
"step": 895
},
{
"epoch": 3.25,
"grad_norm": 2.494250107497,
"learning_rate": 9.159842829421358e-06,
"loss": 0.0927,
"step": 896
},
{
"epoch": 3.25,
"grad_norm": 2.220453898319641,
"learning_rate": 9.140352012625538e-06,
"loss": 0.0954,
"step": 897
},
{
"epoch": 3.25,
"grad_norm": 1.8585494898212889,
"learning_rate": 9.1208644853243e-06,
"loss": 0.0819,
"step": 898
},
{
"epoch": 3.26,
"grad_norm": 2.2204249039070514,
"learning_rate": 9.101380322087837e-06,
"loss": 0.0749,
"step": 899
},
{
"epoch": 3.26,
"grad_norm": 4.291342318127167,
"learning_rate": 9.081899597473469e-06,
"loss": 0.1208,
"step": 900
},
{
"epoch": 3.26,
"grad_norm": 2.2154186516648853,
"learning_rate": 9.06242238602536e-06,
"loss": 0.1011,
"step": 901
},
{
"epoch": 3.27,
"grad_norm": 2.8799603623281826,
"learning_rate": 9.042948762274227e-06,
"loss": 0.1121,
"step": 902
},
{
"epoch": 3.27,
"grad_norm": 3.141717635263762,
"learning_rate": 9.023478800737056e-06,
"loss": 0.1083,
"step": 903
},
{
"epoch": 3.28,
"grad_norm": 2.8267039279104207,
"learning_rate": 9.004012575916825e-06,
"loss": 0.0846,
"step": 904
},
{
"epoch": 3.28,
"grad_norm": 3.7032607732393883,
"learning_rate": 8.984550162302211e-06,
"loss": 0.1043,
"step": 905
},
{
"epoch": 3.28,
"grad_norm": 2.9401418991396793,
"learning_rate": 8.965091634367306e-06,
"loss": 0.0996,
"step": 906
},
{
"epoch": 3.29,
"grad_norm": 3.270486472986497,
"learning_rate": 8.945637066571337e-06,
"loss": 0.0688,
"step": 907
},
{
"epoch": 3.29,
"grad_norm": 4.689647025961772,
"learning_rate": 8.92618653335837e-06,
"loss": 0.0861,
"step": 908
},
{
"epoch": 3.29,
"grad_norm": 2.759321778364716,
"learning_rate": 8.90674010915704e-06,
"loss": 0.0763,
"step": 909
},
{
"epoch": 3.3,
"grad_norm": 3.623678804797402,
"learning_rate": 8.887297868380255e-06,
"loss": 0.1029,
"step": 910
},
{
"epoch": 3.3,
"grad_norm": 3.222924533432378,
"learning_rate": 8.867859885424915e-06,
"loss": 0.0955,
"step": 911
},
{
"epoch": 3.3,
"grad_norm": 5.9486353523383375,
"learning_rate": 8.84842623467163e-06,
"loss": 0.0838,
"step": 912
},
{
"epoch": 3.31,
"grad_norm": 3.5182081825185385,
"learning_rate": 8.828996990484425e-06,
"loss": 0.1039,
"step": 913
},
{
"epoch": 3.31,
"grad_norm": 4.536580119862204,
"learning_rate": 8.809572227210472e-06,
"loss": 0.0969,
"step": 914
},
{
"epoch": 3.32,
"grad_norm": 2.277344712508898,
"learning_rate": 8.790152019179794e-06,
"loss": 0.0895,
"step": 915
},
{
"epoch": 3.32,
"grad_norm": 5.668867235184441,
"learning_rate": 8.770736440704979e-06,
"loss": 0.1596,
"step": 916
},
{
"epoch": 3.32,
"grad_norm": 3.6328993424164415,
"learning_rate": 8.751325566080905e-06,
"loss": 0.1252,
"step": 917
},
{
"epoch": 3.33,
"grad_norm": 3.1744558717883855,
"learning_rate": 8.731919469584443e-06,
"loss": 0.0791,
"step": 918
},
{
"epoch": 3.33,
"grad_norm": 3.7579305119348754,
"learning_rate": 8.712518225474191e-06,
"loss": 0.1331,
"step": 919
},
{
"epoch": 3.33,
"grad_norm": 4.125964433544078,
"learning_rate": 8.693121907990177e-06,
"loss": 0.1353,
"step": 920
},
{
"epoch": 3.34,
"grad_norm": 3.370157612596519,
"learning_rate": 8.673730591353559e-06,
"loss": 0.1312,
"step": 921
},
{
"epoch": 3.34,
"grad_norm": 2.8376369760631097,
"learning_rate": 8.654344349766384e-06,
"loss": 0.0966,
"step": 922
},
{
"epoch": 3.34,
"grad_norm": 3.8923297715397966,
"learning_rate": 8.634963257411266e-06,
"loss": 0.14,
"step": 923
},
{
"epoch": 3.35,
"grad_norm": 2.778872787089105,
"learning_rate": 8.615587388451116e-06,
"loss": 0.153,
"step": 924
},
{
"epoch": 3.35,
"grad_norm": 3.6919755017122267,
"learning_rate": 8.59621681702886e-06,
"loss": 0.1919,
"step": 925
},
{
"epoch": 3.36,
"grad_norm": 2.5356430790154016,
"learning_rate": 8.576851617267151e-06,
"loss": 0.1215,
"step": 926
},
{
"epoch": 3.36,
"grad_norm": 3.352088530955654,
"learning_rate": 8.557491863268088e-06,
"loss": 0.1371,
"step": 927
},
{
"epoch": 3.36,
"grad_norm": 2.026956267831763,
"learning_rate": 8.53813762911293e-06,
"loss": 0.1086,
"step": 928
},
{
"epoch": 3.37,
"grad_norm": 3.087795837752131,
"learning_rate": 8.518788988861815e-06,
"loss": 0.1163,
"step": 929
},
{
"epoch": 3.37,
"grad_norm": 2.734278815496621,
"learning_rate": 8.499446016553475e-06,
"loss": 0.1262,
"step": 930
},
{
"epoch": 3.37,
"grad_norm": 1.4827892129376603,
"learning_rate": 8.480108786204956e-06,
"loss": 0.1029,
"step": 931
},
{
"epoch": 3.38,
"grad_norm": 3.7153729607054373,
"learning_rate": 8.460777371811327e-06,
"loss": 0.1248,
"step": 932
},
{
"epoch": 3.38,
"grad_norm": 1.9463462730942114,
"learning_rate": 8.441451847345407e-06,
"loss": 0.1276,
"step": 933
},
{
"epoch": 3.38,
"grad_norm": 2.39746437004284,
"learning_rate": 8.42213228675747e-06,
"loss": 0.104,
"step": 934
},
{
"epoch": 3.39,
"grad_norm": 2.9019853243357323,
"learning_rate": 8.402818763974978e-06,
"loss": 0.1404,
"step": 935
},
{
"epoch": 3.39,
"grad_norm": 3.316510882049375,
"learning_rate": 8.383511352902285e-06,
"loss": 0.123,
"step": 936
},
{
"epoch": 3.39,
"grad_norm": 4.45987374379237,
"learning_rate": 8.364210127420353e-06,
"loss": 0.1448,
"step": 937
},
{
"epoch": 3.4,
"grad_norm": 3.3388940872208988,
"learning_rate": 8.344915161386485e-06,
"loss": 0.0981,
"step": 938
},
{
"epoch": 3.4,
"grad_norm": 3.507666488522441,
"learning_rate": 8.325626528634016e-06,
"loss": 0.1128,
"step": 939
},
{
"epoch": 3.41,
"grad_norm": 2.5575931258105378,
"learning_rate": 8.306344302972066e-06,
"loss": 0.1281,
"step": 940
},
{
"epoch": 3.41,
"grad_norm": 2.4145418113500163,
"learning_rate": 8.287068558185225e-06,
"loss": 0.1386,
"step": 941
},
{
"epoch": 3.41,
"grad_norm": 2.1059955109379964,
"learning_rate": 8.267799368033288e-06,
"loss": 0.0773,
"step": 942
},
{
"epoch": 3.42,
"grad_norm": 2.9284128398528977,
"learning_rate": 8.248536806250958e-06,
"loss": 0.125,
"step": 943
},
{
"epoch": 3.42,
"grad_norm": 3.798647088081658,
"learning_rate": 8.229280946547595e-06,
"loss": 0.0952,
"step": 944
},
{
"epoch": 3.42,
"grad_norm": 3.7971420986887936,
"learning_rate": 8.210031862606896e-06,
"loss": 0.1174,
"step": 945
},
{
"epoch": 3.43,
"grad_norm": 4.4350462780310975,
"learning_rate": 8.190789628086632e-06,
"loss": 0.1513,
"step": 946
},
{
"epoch": 3.43,
"grad_norm": 3.0237165584661247,
"learning_rate": 8.171554316618374e-06,
"loss": 0.1024,
"step": 947
},
{
"epoch": 3.43,
"grad_norm": 3.7262130451250446,
"learning_rate": 8.15232600180719e-06,
"loss": 0.1436,
"step": 948
},
{
"epoch": 3.44,
"grad_norm": 2.8803876073941224,
"learning_rate": 8.133104757231381e-06,
"loss": 0.1137,
"step": 949
},
{
"epoch": 3.44,
"grad_norm": 2.4558420470179225,
"learning_rate": 8.113890656442194e-06,
"loss": 0.1126,
"step": 950
},
{
"epoch": 3.45,
"grad_norm": 2.392366218086255,
"learning_rate": 8.094683772963537e-06,
"loss": 0.1201,
"step": 951
},
{
"epoch": 3.45,
"grad_norm": 2.9831404455847785,
"learning_rate": 8.075484180291702e-06,
"loss": 0.1063,
"step": 952
},
{
"epoch": 3.45,
"grad_norm": 2.826548310448461,
"learning_rate": 8.056291951895079e-06,
"loss": 0.1187,
"step": 953
},
{
"epoch": 3.46,
"grad_norm": 4.27161161174605,
"learning_rate": 8.037107161213886e-06,
"loss": 0.1192,
"step": 954
},
{
"epoch": 3.46,
"grad_norm": 4.976367947641533,
"learning_rate": 8.017929881659874e-06,
"loss": 0.1512,
"step": 955
},
{
"epoch": 3.46,
"grad_norm": 3.039712986640314,
"learning_rate": 7.99876018661605e-06,
"loss": 0.1401,
"step": 956
},
{
"epoch": 3.47,
"grad_norm": 3.211419500673735,
"learning_rate": 7.979598149436404e-06,
"loss": 0.0895,
"step": 957
},
{
"epoch": 3.47,
"grad_norm": 3.9885902165400418,
"learning_rate": 7.960443843445622e-06,
"loss": 0.1327,
"step": 958
},
{
"epoch": 3.47,
"grad_norm": 2.6607525975325776,
"learning_rate": 7.941297341938803e-06,
"loss": 0.1471,
"step": 959
},
{
"epoch": 3.48,
"grad_norm": 2.107695072062342,
"learning_rate": 7.922158718181184e-06,
"loss": 0.1243,
"step": 960
},
{
"epoch": 3.48,
"grad_norm": 3.3554052153258462,
"learning_rate": 7.903028045407858e-06,
"loss": 0.1187,
"step": 961
},
{
"epoch": 3.49,
"grad_norm": 3.057712546839428,
"learning_rate": 7.883905396823487e-06,
"loss": 0.1382,
"step": 962
},
{
"epoch": 3.49,
"grad_norm": 5.305491453527781,
"learning_rate": 7.86479084560204e-06,
"loss": 0.1308,
"step": 963
},
{
"epoch": 3.49,
"grad_norm": 3.716016528734415,
"learning_rate": 7.845684464886487e-06,
"loss": 0.1244,
"step": 964
},
{
"epoch": 3.5,
"grad_norm": 2.9596409781438693,
"learning_rate": 7.826586327788546e-06,
"loss": 0.1009,
"step": 965
},
{
"epoch": 3.5,
"grad_norm": 3.918947251962521,
"learning_rate": 7.80749650738838e-06,
"loss": 0.1137,
"step": 966
},
{
"epoch": 3.5,
"grad_norm": 2.799796610731959,
"learning_rate": 7.788415076734332e-06,
"loss": 0.1044,
"step": 967
},
{
"epoch": 3.51,
"grad_norm": 3.109717304524271,
"learning_rate": 7.769342108842641e-06,
"loss": 0.1632,
"step": 968
},
{
"epoch": 3.51,
"grad_norm": 2.9002460157326264,
"learning_rate": 7.75027767669716e-06,
"loss": 0.1258,
"step": 969
},
{
"epoch": 3.51,
"grad_norm": 2.8964520967979692,
"learning_rate": 7.731221853249089e-06,
"loss": 0.1289,
"step": 970
},
{
"epoch": 3.52,
"grad_norm": 3.2852566235883196,
"learning_rate": 7.712174711416675e-06,
"loss": 0.1559,
"step": 971
},
{
"epoch": 3.52,
"grad_norm": 2.808680050434026,
"learning_rate": 7.693136324084949e-06,
"loss": 0.1106,
"step": 972
},
{
"epoch": 3.53,
"grad_norm": 2.1171654735689076,
"learning_rate": 7.674106764105442e-06,
"loss": 0.0848,
"step": 973
},
{
"epoch": 3.53,
"grad_norm": 2.430199608103024,
"learning_rate": 7.655086104295904e-06,
"loss": 0.0993,
"step": 974
},
{
"epoch": 3.53,
"grad_norm": 1.8828805141022391,
"learning_rate": 7.636074417440036e-06,
"loss": 0.0905,
"step": 975
},
{
"epoch": 3.54,
"grad_norm": 1.7733655894828702,
"learning_rate": 7.617071776287196e-06,
"loss": 0.1263,
"step": 976
},
{
"epoch": 3.54,
"grad_norm": 2.7250039443873773,
"learning_rate": 7.598078253552129e-06,
"loss": 0.0973,
"step": 977
},
{
"epoch": 3.54,
"grad_norm": 2.7007235642921934,
"learning_rate": 7.5790939219146874e-06,
"loss": 0.0961,
"step": 978
},
{
"epoch": 3.55,
"grad_norm": 2.1500766229328083,
"learning_rate": 7.5601188540195595e-06,
"loss": 0.1183,
"step": 979
},
{
"epoch": 3.55,
"grad_norm": 3.8280992040940123,
"learning_rate": 7.541153122475978e-06,
"loss": 0.1369,
"step": 980
},
{
"epoch": 3.55,
"grad_norm": 2.499330376660129,
"learning_rate": 7.522196799857453e-06,
"loss": 0.1126,
"step": 981
},
{
"epoch": 3.56,
"grad_norm": 1.739850187704323,
"learning_rate": 7.503249958701489e-06,
"loss": 0.0658,
"step": 982
},
{
"epoch": 3.56,
"grad_norm": 2.7369026119660806,
"learning_rate": 7.484312671509307e-06,
"loss": 0.0871,
"step": 983
},
{
"epoch": 3.57,
"grad_norm": 2.6596378118313466,
"learning_rate": 7.46538501074558e-06,
"loss": 0.0965,
"step": 984
},
{
"epoch": 3.57,
"grad_norm": 1.6417651435900409,
"learning_rate": 7.446467048838131e-06,
"loss": 0.0663,
"step": 985
},
{
"epoch": 3.57,
"grad_norm": 2.775971598647237,
"learning_rate": 7.427558858177679e-06,
"loss": 0.1038,
"step": 986
},
{
"epoch": 3.58,
"grad_norm": 2.4252559638844713,
"learning_rate": 7.408660511117547e-06,
"loss": 0.0925,
"step": 987
},
{
"epoch": 3.58,
"grad_norm": 3.503010606223773,
"learning_rate": 7.389772079973397e-06,
"loss": 0.1048,
"step": 988
},
{
"epoch": 3.58,
"grad_norm": 4.857436489741838,
"learning_rate": 7.37089363702294e-06,
"loss": 0.0894,
"step": 989
},
{
"epoch": 3.59,
"grad_norm": 2.1916188059776274,
"learning_rate": 7.352025254505672e-06,
"loss": 0.0624,
"step": 990
},
{
"epoch": 3.59,
"grad_norm": 2.7807935646365083,
"learning_rate": 7.333167004622593e-06,
"loss": 0.0847,
"step": 991
},
{
"epoch": 3.59,
"grad_norm": 4.040001908804561,
"learning_rate": 7.31431895953592e-06,
"loss": 0.1527,
"step": 992
},
{
"epoch": 3.6,
"grad_norm": 3.926769476612875,
"learning_rate": 7.295481191368836e-06,
"loss": 0.0891,
"step": 993
},
{
"epoch": 3.6,
"grad_norm": 3.254662559682786,
"learning_rate": 7.276653772205187e-06,
"loss": 0.0833,
"step": 994
},
{
"epoch": 3.61,
"grad_norm": 4.705257635176926,
"learning_rate": 7.257836774089222e-06,
"loss": 0.1193,
"step": 995
},
{
"epoch": 3.61,
"grad_norm": 2.893193298171004,
"learning_rate": 7.239030269025311e-06,
"loss": 0.1171,
"step": 996
},
{
"epoch": 3.61,
"grad_norm": 2.6770430406514327,
"learning_rate": 7.2202343289776775e-06,
"loss": 0.105,
"step": 997
},
{
"epoch": 3.62,
"grad_norm": 3.903790164902213,
"learning_rate": 7.201449025870113e-06,
"loss": 0.148,
"step": 998
},
{
"epoch": 3.62,
"grad_norm": 5.530297464086568,
"learning_rate": 7.182674431585703e-06,
"loss": 0.0866,
"step": 999
},
{
"epoch": 3.62,
"grad_norm": 4.869988050781526,
"learning_rate": 7.163910617966563e-06,
"loss": 0.1006,
"step": 1000
},
{
"epoch": 3.63,
"grad_norm": 3.579073892813218,
"learning_rate": 7.145157656813546e-06,
"loss": 0.0912,
"step": 1001
},
{
"epoch": 3.63,
"grad_norm": 2.35628309330303,
"learning_rate": 7.126415619885987e-06,
"loss": 0.0528,
"step": 1002
},
{
"epoch": 3.63,
"grad_norm": 4.049861882203114,
"learning_rate": 7.107684578901414e-06,
"loss": 0.1446,
"step": 1003
},
{
"epoch": 3.64,
"grad_norm": 3.2253320575037296,
"learning_rate": 7.088964605535278e-06,
"loss": 0.0971,
"step": 1004
},
{
"epoch": 3.64,
"grad_norm": 4.435165553701907,
"learning_rate": 7.0702557714206786e-06,
"loss": 0.1013,
"step": 1005
},
{
"epoch": 3.64,
"grad_norm": 4.651994900498167,
"learning_rate": 7.0515581481480925e-06,
"loss": 0.1181,
"step": 1006
},
{
"epoch": 3.65,
"grad_norm": 4.968879604252837,
"learning_rate": 7.032871807265097e-06,
"loss": 0.1307,
"step": 1007
},
{
"epoch": 3.65,
"grad_norm": 4.606076160778083,
"learning_rate": 7.014196820276098e-06,
"loss": 0.1174,
"step": 1008
},
{
"epoch": 3.66,
"grad_norm": 3.255916908644234,
"learning_rate": 6.99553325864205e-06,
"loss": 0.1248,
"step": 1009
},
{
"epoch": 3.66,
"grad_norm": 2.45046540729099,
"learning_rate": 6.976881193780196e-06,
"loss": 0.087,
"step": 1010
},
{
"epoch": 3.66,
"grad_norm": 2.3217962969736443,
"learning_rate": 6.958240697063774e-06,
"loss": 0.071,
"step": 1011
},
{
"epoch": 3.67,
"grad_norm": 2.4568876117053033,
"learning_rate": 6.9396118398217675e-06,
"loss": 0.0962,
"step": 1012
},
{
"epoch": 3.67,
"grad_norm": 2.2280111104466642,
"learning_rate": 6.920994693338612e-06,
"loss": 0.1103,
"step": 1013
},
{
"epoch": 3.67,
"grad_norm": 3.1209935926030083,
"learning_rate": 6.90238932885394e-06,
"loss": 0.1,
"step": 1014
},
{
"epoch": 3.68,
"grad_norm": 2.37659138818212,
"learning_rate": 6.883795817562293e-06,
"loss": 0.0838,
"step": 1015
},
{
"epoch": 3.68,
"grad_norm": 4.292882340244696,
"learning_rate": 6.865214230612858e-06,
"loss": 0.1078,
"step": 1016
},
{
"epoch": 3.68,
"grad_norm": 3.079448839621652,
"learning_rate": 6.846644639109186e-06,
"loss": 0.1048,
"step": 1017
},
{
"epoch": 3.69,
"grad_norm": 2.100105736333372,
"learning_rate": 6.8280871141089415e-06,
"loss": 0.0917,
"step": 1018
},
{
"epoch": 3.69,
"grad_norm": 3.498056667988704,
"learning_rate": 6.809541726623602e-06,
"loss": 0.0976,
"step": 1019
},
{
"epoch": 3.7,
"grad_norm": 3.102683380380855,
"learning_rate": 6.791008547618207e-06,
"loss": 0.1066,
"step": 1020
},
{
"epoch": 3.7,
"grad_norm": 2.9892418404379795,
"learning_rate": 6.772487648011075e-06,
"loss": 0.1073,
"step": 1021
},
{
"epoch": 3.7,
"grad_norm": 1.8806743914425466,
"learning_rate": 6.753979098673539e-06,
"loss": 0.1038,
"step": 1022
},
{
"epoch": 3.71,
"grad_norm": 3.122278074964517,
"learning_rate": 6.735482970429676e-06,
"loss": 0.0774,
"step": 1023
},
{
"epoch": 3.71,
"grad_norm": 3.9134486678550173,
"learning_rate": 6.716999334056031e-06,
"loss": 0.1493,
"step": 1024
}
],
"logging_steps": 1.0,
"max_steps": 1656,
"num_input_tokens_seen": 0,
"num_train_epochs": 6,
"save_steps": 512,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": false
},
"attributes": {}
}
},
"total_flos": 0.0,
"train_batch_size": 8,
"trial_name": null,
"trial_params": null
}