MultiPL-T-StarCoderBase_15b / trainer_state.json
cassanof's picture
Upload folder using huggingface_hub
001be6d verified
raw
history blame
49.6 kB
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 0.3326133909287257,
"eval_steps": 500,
"global_step": 308,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0,
"grad_norm": 2.1186674794018874,
"learning_rate": 0.0,
"loss": 0.762,
"step": 1
},
{
"epoch": 0.0,
"grad_norm": 1.5049784219658717,
"learning_rate": 6.020599913279623e-06,
"loss": 0.6777,
"step": 2
},
{
"epoch": 0.0,
"grad_norm": 1.5560748940122215,
"learning_rate": 9.542425094393249e-06,
"loss": 0.6918,
"step": 3
},
{
"epoch": 0.0,
"grad_norm": 1.2511342624485708,
"learning_rate": 1.2041199826559246e-05,
"loss": 0.6375,
"step": 4
},
{
"epoch": 0.01,
"grad_norm": 0.8125037046494756,
"learning_rate": 1.3979400086720374e-05,
"loss": 0.5701,
"step": 5
},
{
"epoch": 0.01,
"grad_norm": 1.8434809472279226,
"learning_rate": 1.5563025007672873e-05,
"loss": 0.6514,
"step": 6
},
{
"epoch": 0.01,
"grad_norm": 1.5763574654037487,
"learning_rate": 1.6901960800285137e-05,
"loss": 0.6088,
"step": 7
},
{
"epoch": 0.01,
"grad_norm": 0.9845853308357277,
"learning_rate": 1.806179973983887e-05,
"loss": 0.5696,
"step": 8
},
{
"epoch": 0.01,
"grad_norm": 0.9825649271082681,
"learning_rate": 1.9084850188786497e-05,
"loss": 0.5759,
"step": 9
},
{
"epoch": 0.01,
"grad_norm": 0.8132097372225459,
"learning_rate": 1.9999999999999998e-05,
"loss": 0.5595,
"step": 10
},
{
"epoch": 0.01,
"grad_norm": 1.319819296186598,
"learning_rate": 2e-05,
"loss": 0.5612,
"step": 11
},
{
"epoch": 0.01,
"grad_norm": 0.8912280533965539,
"learning_rate": 1.997816593886463e-05,
"loss": 0.5426,
"step": 12
},
{
"epoch": 0.01,
"grad_norm": 0.5696945500526134,
"learning_rate": 1.995633187772926e-05,
"loss": 0.5468,
"step": 13
},
{
"epoch": 0.02,
"grad_norm": 1.292370522394383,
"learning_rate": 1.993449781659389e-05,
"loss": 0.5656,
"step": 14
},
{
"epoch": 0.02,
"grad_norm": 0.5804040423506179,
"learning_rate": 1.9912663755458517e-05,
"loss": 0.5344,
"step": 15
},
{
"epoch": 0.02,
"grad_norm": 1.0611806419542331,
"learning_rate": 1.9890829694323144e-05,
"loss": 0.5549,
"step": 16
},
{
"epoch": 0.02,
"grad_norm": 0.5684069321913675,
"learning_rate": 1.9868995633187776e-05,
"loss": 0.5271,
"step": 17
},
{
"epoch": 0.02,
"grad_norm": 0.8373449011138275,
"learning_rate": 1.9847161572052404e-05,
"loss": 0.5529,
"step": 18
},
{
"epoch": 0.02,
"grad_norm": 0.6233456890603677,
"learning_rate": 1.982532751091703e-05,
"loss": 0.5498,
"step": 19
},
{
"epoch": 0.02,
"grad_norm": 0.5892227390005575,
"learning_rate": 1.980349344978166e-05,
"loss": 0.5275,
"step": 20
},
{
"epoch": 0.02,
"grad_norm": 0.5360881738197603,
"learning_rate": 1.978165938864629e-05,
"loss": 0.5459,
"step": 21
},
{
"epoch": 0.02,
"grad_norm": 0.5577347292500204,
"learning_rate": 1.975982532751092e-05,
"loss": 0.533,
"step": 22
},
{
"epoch": 0.02,
"grad_norm": 0.6845128012101741,
"learning_rate": 1.9737991266375546e-05,
"loss": 0.5496,
"step": 23
},
{
"epoch": 0.03,
"grad_norm": 0.42643509104346145,
"learning_rate": 1.9716157205240178e-05,
"loss": 0.5112,
"step": 24
},
{
"epoch": 0.03,
"grad_norm": 0.5149003630149541,
"learning_rate": 1.9694323144104806e-05,
"loss": 0.4802,
"step": 25
},
{
"epoch": 0.03,
"grad_norm": 0.5163933779775761,
"learning_rate": 1.9672489082969434e-05,
"loss": 0.4877,
"step": 26
},
{
"epoch": 0.03,
"grad_norm": 0.5207228225450767,
"learning_rate": 1.965065502183406e-05,
"loss": 0.5513,
"step": 27
},
{
"epoch": 0.03,
"grad_norm": 0.4777497059069117,
"learning_rate": 1.9628820960698693e-05,
"loss": 0.532,
"step": 28
},
{
"epoch": 0.03,
"grad_norm": 0.4794666570948325,
"learning_rate": 1.960698689956332e-05,
"loss": 0.5131,
"step": 29
},
{
"epoch": 0.03,
"grad_norm": 0.4763149650448877,
"learning_rate": 1.958515283842795e-05,
"loss": 0.5193,
"step": 30
},
{
"epoch": 0.03,
"grad_norm": 0.47104986565242496,
"learning_rate": 1.9563318777292576e-05,
"loss": 0.486,
"step": 31
},
{
"epoch": 0.03,
"grad_norm": 0.44186554703050224,
"learning_rate": 1.9541484716157208e-05,
"loss": 0.5062,
"step": 32
},
{
"epoch": 0.04,
"grad_norm": 0.5577375612757014,
"learning_rate": 1.9519650655021836e-05,
"loss": 0.5142,
"step": 33
},
{
"epoch": 0.04,
"grad_norm": 0.4466514535111405,
"learning_rate": 1.9497816593886463e-05,
"loss": 0.5105,
"step": 34
},
{
"epoch": 0.04,
"grad_norm": 0.4599752241697936,
"learning_rate": 1.9475982532751095e-05,
"loss": 0.5162,
"step": 35
},
{
"epoch": 0.04,
"grad_norm": 0.44857709087715164,
"learning_rate": 1.9454148471615723e-05,
"loss": 0.5367,
"step": 36
},
{
"epoch": 0.04,
"grad_norm": 0.41263712857832235,
"learning_rate": 1.943231441048035e-05,
"loss": 0.5027,
"step": 37
},
{
"epoch": 0.04,
"grad_norm": 0.4521119533509463,
"learning_rate": 1.941048034934498e-05,
"loss": 0.5118,
"step": 38
},
{
"epoch": 0.04,
"grad_norm": 0.4662594115199691,
"learning_rate": 1.938864628820961e-05,
"loss": 0.5002,
"step": 39
},
{
"epoch": 0.04,
"grad_norm": 0.4446567090191832,
"learning_rate": 1.9366812227074238e-05,
"loss": 0.4839,
"step": 40
},
{
"epoch": 0.04,
"grad_norm": 0.48251939475160455,
"learning_rate": 1.9344978165938865e-05,
"loss": 0.4929,
"step": 41
},
{
"epoch": 0.05,
"grad_norm": 0.5540231836852683,
"learning_rate": 1.9323144104803497e-05,
"loss": 0.5149,
"step": 42
},
{
"epoch": 0.05,
"grad_norm": 0.43090837602909066,
"learning_rate": 1.9301310043668125e-05,
"loss": 0.4932,
"step": 43
},
{
"epoch": 0.05,
"grad_norm": 0.5198842620428126,
"learning_rate": 1.9279475982532753e-05,
"loss": 0.5107,
"step": 44
},
{
"epoch": 0.05,
"grad_norm": 0.45405522592051506,
"learning_rate": 1.925764192139738e-05,
"loss": 0.5104,
"step": 45
},
{
"epoch": 0.05,
"grad_norm": 0.4563593041116948,
"learning_rate": 1.9235807860262012e-05,
"loss": 0.4915,
"step": 46
},
{
"epoch": 0.05,
"grad_norm": 0.4652171198209756,
"learning_rate": 1.921397379912664e-05,
"loss": 0.4773,
"step": 47
},
{
"epoch": 0.05,
"grad_norm": 0.5075515663557973,
"learning_rate": 1.9192139737991267e-05,
"loss": 0.4918,
"step": 48
},
{
"epoch": 0.05,
"grad_norm": 0.4876926842169163,
"learning_rate": 1.9170305676855895e-05,
"loss": 0.5052,
"step": 49
},
{
"epoch": 0.05,
"grad_norm": 0.5657178444298022,
"learning_rate": 1.9148471615720527e-05,
"loss": 0.4877,
"step": 50
},
{
"epoch": 0.06,
"grad_norm": 0.4224792144955699,
"learning_rate": 1.9126637554585155e-05,
"loss": 0.4844,
"step": 51
},
{
"epoch": 0.06,
"grad_norm": 0.47793247734973493,
"learning_rate": 1.9104803493449782e-05,
"loss": 0.4946,
"step": 52
},
{
"epoch": 0.06,
"grad_norm": 0.47163318223121553,
"learning_rate": 1.908296943231441e-05,
"loss": 0.4777,
"step": 53
},
{
"epoch": 0.06,
"grad_norm": 0.4468196827989752,
"learning_rate": 1.906113537117904e-05,
"loss": 0.4912,
"step": 54
},
{
"epoch": 0.06,
"grad_norm": 0.42476515311951113,
"learning_rate": 1.903930131004367e-05,
"loss": 0.4904,
"step": 55
},
{
"epoch": 0.06,
"grad_norm": 0.4301592924354783,
"learning_rate": 1.9017467248908297e-05,
"loss": 0.4843,
"step": 56
},
{
"epoch": 0.06,
"grad_norm": 0.45155900802054094,
"learning_rate": 1.899563318777293e-05,
"loss": 0.4794,
"step": 57
},
{
"epoch": 0.06,
"grad_norm": 0.47557594241815077,
"learning_rate": 1.8973799126637557e-05,
"loss": 0.4915,
"step": 58
},
{
"epoch": 0.06,
"grad_norm": 0.41055652523585845,
"learning_rate": 1.8951965065502184e-05,
"loss": 0.4761,
"step": 59
},
{
"epoch": 0.06,
"grad_norm": 0.49673065092092256,
"learning_rate": 1.8930131004366816e-05,
"loss": 0.4876,
"step": 60
},
{
"epoch": 0.07,
"grad_norm": 0.4658799523135649,
"learning_rate": 1.8908296943231444e-05,
"loss": 0.4953,
"step": 61
},
{
"epoch": 0.07,
"grad_norm": 0.4184265481713602,
"learning_rate": 1.888646288209607e-05,
"loss": 0.4734,
"step": 62
},
{
"epoch": 0.07,
"grad_norm": 0.5665612896476564,
"learning_rate": 1.88646288209607e-05,
"loss": 0.5221,
"step": 63
},
{
"epoch": 0.07,
"grad_norm": 0.40809681069198844,
"learning_rate": 1.884279475982533e-05,
"loss": 0.4803,
"step": 64
},
{
"epoch": 0.07,
"grad_norm": 0.5235211035164702,
"learning_rate": 1.882096069868996e-05,
"loss": 0.4944,
"step": 65
},
{
"epoch": 0.07,
"grad_norm": 0.4966721954560684,
"learning_rate": 1.8799126637554586e-05,
"loss": 0.5025,
"step": 66
},
{
"epoch": 0.07,
"grad_norm": 0.43399894957700963,
"learning_rate": 1.8777292576419214e-05,
"loss": 0.4861,
"step": 67
},
{
"epoch": 0.07,
"grad_norm": 0.6261012627094004,
"learning_rate": 1.8755458515283846e-05,
"loss": 0.491,
"step": 68
},
{
"epoch": 0.07,
"grad_norm": 0.5504631314192578,
"learning_rate": 1.8733624454148474e-05,
"loss": 0.4788,
"step": 69
},
{
"epoch": 0.08,
"grad_norm": 0.5063285566533242,
"learning_rate": 1.87117903930131e-05,
"loss": 0.4802,
"step": 70
},
{
"epoch": 0.08,
"grad_norm": 0.5795384217921828,
"learning_rate": 1.868995633187773e-05,
"loss": 0.4838,
"step": 71
},
{
"epoch": 0.08,
"grad_norm": 0.39411140991455096,
"learning_rate": 1.866812227074236e-05,
"loss": 0.4636,
"step": 72
},
{
"epoch": 0.08,
"grad_norm": 0.4230648674489383,
"learning_rate": 1.864628820960699e-05,
"loss": 0.4694,
"step": 73
},
{
"epoch": 0.08,
"grad_norm": 0.5138620814929178,
"learning_rate": 1.8624454148471616e-05,
"loss": 0.4865,
"step": 74
},
{
"epoch": 0.08,
"grad_norm": 0.41965115714343726,
"learning_rate": 1.8602620087336244e-05,
"loss": 0.4833,
"step": 75
},
{
"epoch": 0.08,
"grad_norm": 0.4149053560192593,
"learning_rate": 1.8580786026200876e-05,
"loss": 0.4786,
"step": 76
},
{
"epoch": 0.08,
"grad_norm": 0.46599988055432306,
"learning_rate": 1.8558951965065503e-05,
"loss": 0.497,
"step": 77
},
{
"epoch": 0.08,
"grad_norm": 0.4811313526242602,
"learning_rate": 1.853711790393013e-05,
"loss": 0.4485,
"step": 78
},
{
"epoch": 0.09,
"grad_norm": 0.4392560728996164,
"learning_rate": 1.8515283842794763e-05,
"loss": 0.434,
"step": 79
},
{
"epoch": 0.09,
"grad_norm": 0.45146186393597715,
"learning_rate": 1.849344978165939e-05,
"loss": 0.4857,
"step": 80
},
{
"epoch": 0.09,
"grad_norm": 0.4048483216039733,
"learning_rate": 1.847161572052402e-05,
"loss": 0.463,
"step": 81
},
{
"epoch": 0.09,
"grad_norm": 0.39375794039394735,
"learning_rate": 1.844978165938865e-05,
"loss": 0.4777,
"step": 82
},
{
"epoch": 0.09,
"grad_norm": 0.49842165141484956,
"learning_rate": 1.8427947598253278e-05,
"loss": 0.4743,
"step": 83
},
{
"epoch": 0.09,
"grad_norm": 0.45279844618971316,
"learning_rate": 1.8406113537117905e-05,
"loss": 0.4606,
"step": 84
},
{
"epoch": 0.09,
"grad_norm": 0.4186717031357162,
"learning_rate": 1.8384279475982533e-05,
"loss": 0.4869,
"step": 85
},
{
"epoch": 0.09,
"grad_norm": 0.41969330355451234,
"learning_rate": 1.8362445414847165e-05,
"loss": 0.4617,
"step": 86
},
{
"epoch": 0.09,
"grad_norm": 0.4496593802158418,
"learning_rate": 1.8340611353711792e-05,
"loss": 0.473,
"step": 87
},
{
"epoch": 0.1,
"grad_norm": 0.41451974130263514,
"learning_rate": 1.831877729257642e-05,
"loss": 0.4561,
"step": 88
},
{
"epoch": 0.1,
"grad_norm": 0.43177829462958645,
"learning_rate": 1.8296943231441048e-05,
"loss": 0.4473,
"step": 89
},
{
"epoch": 0.1,
"grad_norm": 0.5142765330070325,
"learning_rate": 1.827510917030568e-05,
"loss": 0.4651,
"step": 90
},
{
"epoch": 0.1,
"grad_norm": 0.5298650303411407,
"learning_rate": 1.8253275109170307e-05,
"loss": 0.4324,
"step": 91
},
{
"epoch": 0.1,
"grad_norm": 0.45728734983986014,
"learning_rate": 1.8231441048034935e-05,
"loss": 0.4609,
"step": 92
},
{
"epoch": 0.1,
"grad_norm": 0.476340414110513,
"learning_rate": 1.8209606986899563e-05,
"loss": 0.4864,
"step": 93
},
{
"epoch": 0.1,
"grad_norm": 0.5424768428870163,
"learning_rate": 1.8187772925764194e-05,
"loss": 0.4481,
"step": 94
},
{
"epoch": 0.1,
"grad_norm": 0.3967853775356532,
"learning_rate": 1.8165938864628822e-05,
"loss": 0.443,
"step": 95
},
{
"epoch": 0.1,
"grad_norm": 0.37481758528836623,
"learning_rate": 1.814410480349345e-05,
"loss": 0.4484,
"step": 96
},
{
"epoch": 0.1,
"grad_norm": 0.4467615010105185,
"learning_rate": 1.8122270742358078e-05,
"loss": 0.4644,
"step": 97
},
{
"epoch": 0.11,
"grad_norm": 0.3864694041939627,
"learning_rate": 1.810043668122271e-05,
"loss": 0.4442,
"step": 98
},
{
"epoch": 0.11,
"grad_norm": 0.44938445973951147,
"learning_rate": 1.8078602620087337e-05,
"loss": 0.4633,
"step": 99
},
{
"epoch": 0.11,
"grad_norm": 0.4153749251398598,
"learning_rate": 1.805676855895197e-05,
"loss": 0.4622,
"step": 100
},
{
"epoch": 0.11,
"grad_norm": 0.37908567525993214,
"learning_rate": 1.8034934497816597e-05,
"loss": 0.4557,
"step": 101
},
{
"epoch": 0.11,
"grad_norm": 0.43344971266237664,
"learning_rate": 1.8013100436681224e-05,
"loss": 0.4861,
"step": 102
},
{
"epoch": 0.11,
"grad_norm": 0.3897708901183335,
"learning_rate": 1.7991266375545852e-05,
"loss": 0.4314,
"step": 103
},
{
"epoch": 0.11,
"grad_norm": 0.38897087826119375,
"learning_rate": 1.7969432314410484e-05,
"loss": 0.4478,
"step": 104
},
{
"epoch": 0.11,
"grad_norm": 0.38883284489467035,
"learning_rate": 1.794759825327511e-05,
"loss": 0.4518,
"step": 105
},
{
"epoch": 0.11,
"grad_norm": 0.43206300394588104,
"learning_rate": 1.792576419213974e-05,
"loss": 0.4617,
"step": 106
},
{
"epoch": 0.12,
"grad_norm": 0.40125737231009045,
"learning_rate": 1.7903930131004367e-05,
"loss": 0.4502,
"step": 107
},
{
"epoch": 0.12,
"grad_norm": 0.435314612619678,
"learning_rate": 1.7882096069869e-05,
"loss": 0.4766,
"step": 108
},
{
"epoch": 0.12,
"grad_norm": 0.4144017071697519,
"learning_rate": 1.7860262008733626e-05,
"loss": 0.4665,
"step": 109
},
{
"epoch": 0.12,
"grad_norm": 0.4192066755628224,
"learning_rate": 1.7838427947598254e-05,
"loss": 0.4871,
"step": 110
},
{
"epoch": 0.12,
"grad_norm": 0.45215784643941886,
"learning_rate": 1.7816593886462882e-05,
"loss": 0.4745,
"step": 111
},
{
"epoch": 0.12,
"grad_norm": 0.3946414831281833,
"learning_rate": 1.7794759825327513e-05,
"loss": 0.4692,
"step": 112
},
{
"epoch": 0.12,
"grad_norm": 0.361997860030177,
"learning_rate": 1.777292576419214e-05,
"loss": 0.4554,
"step": 113
},
{
"epoch": 0.12,
"grad_norm": 0.40175416951250204,
"learning_rate": 1.775109170305677e-05,
"loss": 0.4536,
"step": 114
},
{
"epoch": 0.12,
"grad_norm": 0.47114297094100405,
"learning_rate": 1.7729257641921397e-05,
"loss": 0.4708,
"step": 115
},
{
"epoch": 0.13,
"grad_norm": 0.41885478146398897,
"learning_rate": 1.770742358078603e-05,
"loss": 0.451,
"step": 116
},
{
"epoch": 0.13,
"grad_norm": 0.4552034524733042,
"learning_rate": 1.7685589519650656e-05,
"loss": 0.4495,
"step": 117
},
{
"epoch": 0.13,
"grad_norm": 0.4294672314730056,
"learning_rate": 1.7663755458515288e-05,
"loss": 0.4377,
"step": 118
},
{
"epoch": 0.13,
"grad_norm": 0.41158434793213744,
"learning_rate": 1.7641921397379912e-05,
"loss": 0.4603,
"step": 119
},
{
"epoch": 0.13,
"grad_norm": 0.40828284256365754,
"learning_rate": 1.7620087336244543e-05,
"loss": 0.452,
"step": 120
},
{
"epoch": 0.13,
"grad_norm": 0.4116486419111368,
"learning_rate": 1.759825327510917e-05,
"loss": 0.4503,
"step": 121
},
{
"epoch": 0.13,
"grad_norm": 0.3981270422052198,
"learning_rate": 1.7576419213973803e-05,
"loss": 0.4606,
"step": 122
},
{
"epoch": 0.13,
"grad_norm": 0.4493126314995839,
"learning_rate": 1.755458515283843e-05,
"loss": 0.4667,
"step": 123
},
{
"epoch": 0.13,
"grad_norm": 0.424114780787477,
"learning_rate": 1.753275109170306e-05,
"loss": 0.4422,
"step": 124
},
{
"epoch": 0.13,
"grad_norm": 0.3852850414308942,
"learning_rate": 1.7510917030567686e-05,
"loss": 0.4429,
"step": 125
},
{
"epoch": 0.14,
"grad_norm": 0.4282509458412285,
"learning_rate": 1.7489082969432317e-05,
"loss": 0.4432,
"step": 126
},
{
"epoch": 0.14,
"grad_norm": 0.41473375647446675,
"learning_rate": 1.7467248908296945e-05,
"loss": 0.4681,
"step": 127
},
{
"epoch": 0.14,
"grad_norm": 0.42524655917736237,
"learning_rate": 1.7445414847161573e-05,
"loss": 0.4666,
"step": 128
},
{
"epoch": 0.14,
"grad_norm": 0.4577002723023361,
"learning_rate": 1.74235807860262e-05,
"loss": 0.4386,
"step": 129
},
{
"epoch": 0.14,
"grad_norm": 0.4495477549652464,
"learning_rate": 1.7401746724890832e-05,
"loss": 0.4456,
"step": 130
},
{
"epoch": 0.14,
"grad_norm": 0.4589218421705617,
"learning_rate": 1.737991266375546e-05,
"loss": 0.447,
"step": 131
},
{
"epoch": 0.14,
"grad_norm": 0.41129567959332086,
"learning_rate": 1.7358078602620088e-05,
"loss": 0.4714,
"step": 132
},
{
"epoch": 0.14,
"grad_norm": 0.4091662311389778,
"learning_rate": 1.7336244541484716e-05,
"loss": 0.4325,
"step": 133
},
{
"epoch": 0.14,
"grad_norm": 0.43554863694180995,
"learning_rate": 1.7314410480349347e-05,
"loss": 0.428,
"step": 134
},
{
"epoch": 0.15,
"grad_norm": 0.48782967094292995,
"learning_rate": 1.7292576419213975e-05,
"loss": 0.4614,
"step": 135
},
{
"epoch": 0.15,
"grad_norm": 0.410345941910886,
"learning_rate": 1.7270742358078607e-05,
"loss": 0.4358,
"step": 136
},
{
"epoch": 0.15,
"grad_norm": 0.4658797763961389,
"learning_rate": 1.724890829694323e-05,
"loss": 0.4228,
"step": 137
},
{
"epoch": 0.15,
"grad_norm": 0.46687534901658256,
"learning_rate": 1.7227074235807862e-05,
"loss": 0.4363,
"step": 138
},
{
"epoch": 0.15,
"grad_norm": 0.41452146680368457,
"learning_rate": 1.720524017467249e-05,
"loss": 0.4451,
"step": 139
},
{
"epoch": 0.15,
"grad_norm": 0.435633971396934,
"learning_rate": 1.718340611353712e-05,
"loss": 0.4234,
"step": 140
},
{
"epoch": 0.15,
"grad_norm": 0.3833328913948718,
"learning_rate": 1.7161572052401746e-05,
"loss": 0.4457,
"step": 141
},
{
"epoch": 0.15,
"grad_norm": 0.4362215880238739,
"learning_rate": 1.7139737991266377e-05,
"loss": 0.433,
"step": 142
},
{
"epoch": 0.15,
"grad_norm": 0.4495745203741214,
"learning_rate": 1.7117903930131005e-05,
"loss": 0.4275,
"step": 143
},
{
"epoch": 0.16,
"grad_norm": 0.4364392500220487,
"learning_rate": 1.7096069868995636e-05,
"loss": 0.4448,
"step": 144
},
{
"epoch": 0.16,
"grad_norm": 0.43234675029655206,
"learning_rate": 1.7074235807860264e-05,
"loss": 0.4333,
"step": 145
},
{
"epoch": 0.16,
"grad_norm": 0.38155397507175676,
"learning_rate": 1.7052401746724892e-05,
"loss": 0.4151,
"step": 146
},
{
"epoch": 0.16,
"grad_norm": 0.41784216844651545,
"learning_rate": 1.703056768558952e-05,
"loss": 0.4604,
"step": 147
},
{
"epoch": 0.16,
"grad_norm": 0.4282599403566064,
"learning_rate": 1.700873362445415e-05,
"loss": 0.4031,
"step": 148
},
{
"epoch": 0.16,
"grad_norm": 0.40487465599518896,
"learning_rate": 1.698689956331878e-05,
"loss": 0.4409,
"step": 149
},
{
"epoch": 0.16,
"grad_norm": 0.4694993863584187,
"learning_rate": 1.6965065502183407e-05,
"loss": 0.4351,
"step": 150
},
{
"epoch": 0.16,
"grad_norm": 0.40097074583417713,
"learning_rate": 1.6943231441048035e-05,
"loss": 0.4454,
"step": 151
},
{
"epoch": 0.16,
"grad_norm": 0.41500384012996383,
"learning_rate": 1.6921397379912666e-05,
"loss": 0.4133,
"step": 152
},
{
"epoch": 0.17,
"grad_norm": 0.4091240382988871,
"learning_rate": 1.6899563318777294e-05,
"loss": 0.4387,
"step": 153
},
{
"epoch": 0.17,
"grad_norm": 0.38499409045601,
"learning_rate": 1.6877729257641922e-05,
"loss": 0.4176,
"step": 154
},
{
"epoch": 0.17,
"grad_norm": 0.3923298254546311,
"learning_rate": 1.685589519650655e-05,
"loss": 0.4394,
"step": 155
},
{
"epoch": 0.17,
"grad_norm": 0.39382094470718404,
"learning_rate": 1.683406113537118e-05,
"loss": 0.4203,
"step": 156
},
{
"epoch": 0.17,
"grad_norm": 0.3947764097635848,
"learning_rate": 1.681222707423581e-05,
"loss": 0.4079,
"step": 157
},
{
"epoch": 0.17,
"grad_norm": 0.3842228774523482,
"learning_rate": 1.679039301310044e-05,
"loss": 0.4203,
"step": 158
},
{
"epoch": 0.17,
"grad_norm": 0.4362604428547085,
"learning_rate": 1.6768558951965065e-05,
"loss": 0.4214,
"step": 159
},
{
"epoch": 0.17,
"grad_norm": 0.3897244945668934,
"learning_rate": 1.6746724890829696e-05,
"loss": 0.4135,
"step": 160
},
{
"epoch": 0.17,
"grad_norm": 0.39055816078553107,
"learning_rate": 1.6724890829694324e-05,
"loss": 0.4264,
"step": 161
},
{
"epoch": 0.17,
"grad_norm": 0.4059738357585979,
"learning_rate": 1.6703056768558955e-05,
"loss": 0.4426,
"step": 162
},
{
"epoch": 0.18,
"grad_norm": 0.3719736409193062,
"learning_rate": 1.668122270742358e-05,
"loss": 0.3741,
"step": 163
},
{
"epoch": 0.18,
"grad_norm": 0.3994391137360452,
"learning_rate": 1.665938864628821e-05,
"loss": 0.3903,
"step": 164
},
{
"epoch": 0.18,
"grad_norm": 0.4102321554518753,
"learning_rate": 1.663755458515284e-05,
"loss": 0.3947,
"step": 165
},
{
"epoch": 0.18,
"grad_norm": 0.40605248638251906,
"learning_rate": 1.661572052401747e-05,
"loss": 0.374,
"step": 166
},
{
"epoch": 0.18,
"grad_norm": 0.39829795862216355,
"learning_rate": 1.6593886462882098e-05,
"loss": 0.3558,
"step": 167
},
{
"epoch": 0.18,
"grad_norm": 0.5668042067428868,
"learning_rate": 1.6572052401746726e-05,
"loss": 0.3966,
"step": 168
},
{
"epoch": 0.18,
"grad_norm": 0.37012620577733346,
"learning_rate": 1.6550218340611354e-05,
"loss": 0.3696,
"step": 169
},
{
"epoch": 0.18,
"grad_norm": 0.5230789664738862,
"learning_rate": 1.6528384279475985e-05,
"loss": 0.3746,
"step": 170
},
{
"epoch": 0.18,
"grad_norm": 0.38417092465063496,
"learning_rate": 1.6506550218340613e-05,
"loss": 0.3643,
"step": 171
},
{
"epoch": 0.19,
"grad_norm": 0.39278262301667943,
"learning_rate": 1.648471615720524e-05,
"loss": 0.3707,
"step": 172
},
{
"epoch": 0.19,
"grad_norm": 0.4445691345644359,
"learning_rate": 1.646288209606987e-05,
"loss": 0.3825,
"step": 173
},
{
"epoch": 0.19,
"grad_norm": 0.6480928907101309,
"learning_rate": 1.64410480349345e-05,
"loss": 0.3854,
"step": 174
},
{
"epoch": 0.19,
"grad_norm": 0.4272433035623177,
"learning_rate": 1.6419213973799128e-05,
"loss": 0.3703,
"step": 175
},
{
"epoch": 0.19,
"grad_norm": 0.3739993134928334,
"learning_rate": 1.639737991266376e-05,
"loss": 0.3701,
"step": 176
},
{
"epoch": 0.19,
"grad_norm": 0.4010063647136101,
"learning_rate": 1.6375545851528384e-05,
"loss": 0.3723,
"step": 177
},
{
"epoch": 0.19,
"grad_norm": 0.3790230325277792,
"learning_rate": 1.6353711790393015e-05,
"loss": 0.3676,
"step": 178
},
{
"epoch": 0.19,
"grad_norm": 0.3755499066206077,
"learning_rate": 1.6331877729257643e-05,
"loss": 0.3642,
"step": 179
},
{
"epoch": 0.19,
"grad_norm": 0.4241683578769766,
"learning_rate": 1.6310043668122274e-05,
"loss": 0.3899,
"step": 180
},
{
"epoch": 0.2,
"grad_norm": 0.38321915261506356,
"learning_rate": 1.62882096069869e-05,
"loss": 0.3614,
"step": 181
},
{
"epoch": 0.2,
"grad_norm": 0.3761870154387026,
"learning_rate": 1.626637554585153e-05,
"loss": 0.3655,
"step": 182
},
{
"epoch": 0.2,
"grad_norm": 0.39129047927836064,
"learning_rate": 1.6244541484716158e-05,
"loss": 0.383,
"step": 183
},
{
"epoch": 0.2,
"grad_norm": 0.4724242531647335,
"learning_rate": 1.622270742358079e-05,
"loss": 0.3654,
"step": 184
},
{
"epoch": 0.2,
"grad_norm": 0.4319860707229489,
"learning_rate": 1.6200873362445414e-05,
"loss": 0.3906,
"step": 185
},
{
"epoch": 0.2,
"grad_norm": 0.45729422543214604,
"learning_rate": 1.6179039301310045e-05,
"loss": 0.3765,
"step": 186
},
{
"epoch": 0.2,
"grad_norm": 0.45529560900836535,
"learning_rate": 1.6157205240174673e-05,
"loss": 0.3751,
"step": 187
},
{
"epoch": 0.2,
"grad_norm": 0.4503043781159598,
"learning_rate": 1.6135371179039304e-05,
"loss": 0.3687,
"step": 188
},
{
"epoch": 0.2,
"grad_norm": 0.45425963573071876,
"learning_rate": 1.6113537117903932e-05,
"loss": 0.3551,
"step": 189
},
{
"epoch": 0.21,
"grad_norm": 0.35503621037592853,
"learning_rate": 1.609170305676856e-05,
"loss": 0.349,
"step": 190
},
{
"epoch": 0.21,
"grad_norm": 0.3796166907322132,
"learning_rate": 1.6069868995633188e-05,
"loss": 0.3716,
"step": 191
},
{
"epoch": 0.21,
"grad_norm": 0.4173119812888326,
"learning_rate": 1.604803493449782e-05,
"loss": 0.3784,
"step": 192
},
{
"epoch": 0.21,
"grad_norm": 0.38778404930691235,
"learning_rate": 1.6026200873362447e-05,
"loss": 0.3828,
"step": 193
},
{
"epoch": 0.21,
"grad_norm": 0.376339388079517,
"learning_rate": 1.6004366812227075e-05,
"loss": 0.3787,
"step": 194
},
{
"epoch": 0.21,
"grad_norm": 0.375246423976208,
"learning_rate": 1.5982532751091703e-05,
"loss": 0.3528,
"step": 195
},
{
"epoch": 0.21,
"grad_norm": 0.37393557958350465,
"learning_rate": 1.5960698689956334e-05,
"loss": 0.3508,
"step": 196
},
{
"epoch": 0.21,
"grad_norm": 0.38427011175269715,
"learning_rate": 1.5938864628820962e-05,
"loss": 0.3537,
"step": 197
},
{
"epoch": 0.21,
"grad_norm": 0.38025086526439966,
"learning_rate": 1.5917030567685593e-05,
"loss": 0.3594,
"step": 198
},
{
"epoch": 0.21,
"grad_norm": 0.46667328798614865,
"learning_rate": 1.5895196506550218e-05,
"loss": 0.3601,
"step": 199
},
{
"epoch": 0.22,
"grad_norm": 0.4141132395623408,
"learning_rate": 1.587336244541485e-05,
"loss": 0.3586,
"step": 200
},
{
"epoch": 0.22,
"grad_norm": 0.3803714659154048,
"learning_rate": 1.5851528384279477e-05,
"loss": 0.363,
"step": 201
},
{
"epoch": 0.22,
"grad_norm": 0.4605657889095618,
"learning_rate": 1.582969432314411e-05,
"loss": 0.3538,
"step": 202
},
{
"epoch": 0.22,
"grad_norm": 0.4058624399636699,
"learning_rate": 1.5807860262008733e-05,
"loss": 0.3601,
"step": 203
},
{
"epoch": 0.22,
"grad_norm": 0.5379500589660902,
"learning_rate": 1.5786026200873364e-05,
"loss": 0.3669,
"step": 204
},
{
"epoch": 0.22,
"grad_norm": 0.4219898314844134,
"learning_rate": 1.5764192139737992e-05,
"loss": 0.353,
"step": 205
},
{
"epoch": 0.22,
"grad_norm": 0.38152400004537884,
"learning_rate": 1.5742358078602623e-05,
"loss": 0.3604,
"step": 206
},
{
"epoch": 0.22,
"grad_norm": 0.40015810285717957,
"learning_rate": 1.5720524017467248e-05,
"loss": 0.3619,
"step": 207
},
{
"epoch": 0.22,
"grad_norm": 0.41681499423940854,
"learning_rate": 1.569868995633188e-05,
"loss": 0.3357,
"step": 208
},
{
"epoch": 0.23,
"grad_norm": 0.4976226945041277,
"learning_rate": 1.5676855895196507e-05,
"loss": 0.3691,
"step": 209
},
{
"epoch": 0.23,
"grad_norm": 0.43800093718650573,
"learning_rate": 1.5655021834061138e-05,
"loss": 0.3606,
"step": 210
},
{
"epoch": 0.23,
"grad_norm": 0.3919587444642411,
"learning_rate": 1.5633187772925766e-05,
"loss": 0.3597,
"step": 211
},
{
"epoch": 0.23,
"grad_norm": 0.49836512850382286,
"learning_rate": 1.5611353711790394e-05,
"loss": 0.3596,
"step": 212
},
{
"epoch": 0.23,
"grad_norm": 0.4395052391516801,
"learning_rate": 1.5589519650655022e-05,
"loss": 0.352,
"step": 213
},
{
"epoch": 0.23,
"grad_norm": 0.45220282868066214,
"learning_rate": 1.5567685589519653e-05,
"loss": 0.3589,
"step": 214
},
{
"epoch": 0.23,
"grad_norm": 0.43696638330335513,
"learning_rate": 1.554585152838428e-05,
"loss": 0.352,
"step": 215
},
{
"epoch": 0.23,
"grad_norm": 0.4493622923687038,
"learning_rate": 1.552401746724891e-05,
"loss": 0.3452,
"step": 216
},
{
"epoch": 0.23,
"grad_norm": 0.4368637942785743,
"learning_rate": 1.5502183406113537e-05,
"loss": 0.3606,
"step": 217
},
{
"epoch": 0.24,
"grad_norm": 0.39254783399638093,
"learning_rate": 1.5480349344978168e-05,
"loss": 0.3446,
"step": 218
},
{
"epoch": 0.24,
"grad_norm": 0.39267930582092203,
"learning_rate": 1.5458515283842796e-05,
"loss": 0.3463,
"step": 219
},
{
"epoch": 0.24,
"grad_norm": 0.49336143547588307,
"learning_rate": 1.5436681222707427e-05,
"loss": 0.3545,
"step": 220
},
{
"epoch": 0.24,
"grad_norm": 0.3861913649178407,
"learning_rate": 1.5414847161572052e-05,
"loss": 0.3463,
"step": 221
},
{
"epoch": 0.24,
"grad_norm": 0.39706584967316105,
"learning_rate": 1.5393013100436683e-05,
"loss": 0.3492,
"step": 222
},
{
"epoch": 0.24,
"grad_norm": 0.5278921313657406,
"learning_rate": 1.537117903930131e-05,
"loss": 0.3629,
"step": 223
},
{
"epoch": 0.24,
"grad_norm": 0.3925508897738005,
"learning_rate": 1.5349344978165942e-05,
"loss": 0.3303,
"step": 224
},
{
"epoch": 0.24,
"grad_norm": 0.3833632691587687,
"learning_rate": 1.5327510917030567e-05,
"loss": 0.3542,
"step": 225
},
{
"epoch": 0.24,
"grad_norm": 0.4143435226325874,
"learning_rate": 1.5305676855895198e-05,
"loss": 0.3557,
"step": 226
},
{
"epoch": 0.25,
"grad_norm": 0.5459803483958254,
"learning_rate": 1.5283842794759826e-05,
"loss": 0.3431,
"step": 227
},
{
"epoch": 0.25,
"grad_norm": 0.4130510897175159,
"learning_rate": 1.5262008733624457e-05,
"loss": 0.3463,
"step": 228
},
{
"epoch": 0.25,
"grad_norm": 0.4749086480386068,
"learning_rate": 1.5240174672489083e-05,
"loss": 0.3596,
"step": 229
},
{
"epoch": 0.25,
"grad_norm": 0.4727236644494208,
"learning_rate": 1.5218340611353713e-05,
"loss": 0.3384,
"step": 230
},
{
"epoch": 0.25,
"grad_norm": 0.4264208163593211,
"learning_rate": 1.5196506550218343e-05,
"loss": 0.3633,
"step": 231
},
{
"epoch": 0.25,
"grad_norm": 0.45316939300908593,
"learning_rate": 1.517467248908297e-05,
"loss": 0.3532,
"step": 232
},
{
"epoch": 0.25,
"grad_norm": 0.5014932507396431,
"learning_rate": 1.51528384279476e-05,
"loss": 0.3581,
"step": 233
},
{
"epoch": 0.25,
"grad_norm": 0.3740888692728483,
"learning_rate": 1.5131004366812228e-05,
"loss": 0.3591,
"step": 234
},
{
"epoch": 0.25,
"grad_norm": 0.41532342481631906,
"learning_rate": 1.5109170305676858e-05,
"loss": 0.3459,
"step": 235
},
{
"epoch": 0.25,
"grad_norm": 0.3887632636355805,
"learning_rate": 1.5087336244541485e-05,
"loss": 0.3447,
"step": 236
},
{
"epoch": 0.26,
"grad_norm": 0.40230721242978856,
"learning_rate": 1.5065502183406115e-05,
"loss": 0.3672,
"step": 237
},
{
"epoch": 0.26,
"grad_norm": 0.39422442525497225,
"learning_rate": 1.5043668122270743e-05,
"loss": 0.35,
"step": 238
},
{
"epoch": 0.26,
"grad_norm": 0.42685260200105113,
"learning_rate": 1.5021834061135372e-05,
"loss": 0.3614,
"step": 239
},
{
"epoch": 0.26,
"grad_norm": 0.39862400252659125,
"learning_rate": 1.5000000000000002e-05,
"loss": 0.3485,
"step": 240
},
{
"epoch": 0.26,
"grad_norm": 0.4428714302171062,
"learning_rate": 1.497816593886463e-05,
"loss": 0.36,
"step": 241
},
{
"epoch": 0.26,
"grad_norm": 0.4185235272235272,
"learning_rate": 1.495633187772926e-05,
"loss": 0.3492,
"step": 242
},
{
"epoch": 0.26,
"grad_norm": 0.39847525249468174,
"learning_rate": 1.4934497816593887e-05,
"loss": 0.3567,
"step": 243
},
{
"epoch": 0.26,
"grad_norm": 0.3855301479432999,
"learning_rate": 1.4912663755458517e-05,
"loss": 0.3571,
"step": 244
},
{
"epoch": 0.26,
"grad_norm": 0.393234899196987,
"learning_rate": 1.4890829694323145e-05,
"loss": 0.3566,
"step": 245
},
{
"epoch": 0.27,
"grad_norm": 0.45253412592224085,
"learning_rate": 1.4868995633187775e-05,
"loss": 0.3527,
"step": 246
},
{
"epoch": 0.27,
"grad_norm": 0.4218417613746097,
"learning_rate": 1.4847161572052402e-05,
"loss": 0.3391,
"step": 247
},
{
"epoch": 0.27,
"grad_norm": 0.447603156651112,
"learning_rate": 1.4825327510917032e-05,
"loss": 0.3711,
"step": 248
},
{
"epoch": 0.27,
"grad_norm": 0.39097790988732256,
"learning_rate": 1.480349344978166e-05,
"loss": 0.3376,
"step": 249
},
{
"epoch": 0.27,
"grad_norm": 0.4318473286465606,
"learning_rate": 1.478165938864629e-05,
"loss": 0.3506,
"step": 250
},
{
"epoch": 0.27,
"grad_norm": 0.45407014143329794,
"learning_rate": 1.4759825327510919e-05,
"loss": 0.3658,
"step": 251
},
{
"epoch": 0.27,
"grad_norm": 0.4126763263167815,
"learning_rate": 1.4737991266375547e-05,
"loss": 0.3383,
"step": 252
},
{
"epoch": 0.27,
"grad_norm": 0.3962187981580303,
"learning_rate": 1.4716157205240177e-05,
"loss": 0.3449,
"step": 253
},
{
"epoch": 0.27,
"grad_norm": 0.44411484803724655,
"learning_rate": 1.4694323144104804e-05,
"loss": 0.3329,
"step": 254
},
{
"epoch": 0.28,
"grad_norm": 0.40937179753455943,
"learning_rate": 1.4672489082969434e-05,
"loss": 0.3473,
"step": 255
},
{
"epoch": 0.28,
"grad_norm": 0.4170433030971853,
"learning_rate": 1.4650655021834062e-05,
"loss": 0.3537,
"step": 256
},
{
"epoch": 0.28,
"grad_norm": 0.5088525538449871,
"learning_rate": 1.4628820960698691e-05,
"loss": 0.3464,
"step": 257
},
{
"epoch": 0.28,
"grad_norm": 0.42919291729255854,
"learning_rate": 1.460698689956332e-05,
"loss": 0.3598,
"step": 258
},
{
"epoch": 0.28,
"grad_norm": 0.3968573821655448,
"learning_rate": 1.4585152838427949e-05,
"loss": 0.3297,
"step": 259
},
{
"epoch": 0.28,
"grad_norm": 0.41865791119556794,
"learning_rate": 1.4563318777292577e-05,
"loss": 0.3592,
"step": 260
},
{
"epoch": 0.28,
"grad_norm": 0.42353970958609694,
"learning_rate": 1.4541484716157206e-05,
"loss": 0.3548,
"step": 261
},
{
"epoch": 0.28,
"grad_norm": 0.3742329381749764,
"learning_rate": 1.4519650655021836e-05,
"loss": 0.3218,
"step": 262
},
{
"epoch": 0.28,
"grad_norm": 0.4626643436578089,
"learning_rate": 1.4497816593886464e-05,
"loss": 0.3384,
"step": 263
},
{
"epoch": 0.29,
"grad_norm": 0.45856052063838026,
"learning_rate": 1.4475982532751093e-05,
"loss": 0.3615,
"step": 264
},
{
"epoch": 0.29,
"grad_norm": 0.41283923418455426,
"learning_rate": 1.4454148471615721e-05,
"loss": 0.3524,
"step": 265
},
{
"epoch": 0.29,
"grad_norm": 0.4074545937674161,
"learning_rate": 1.4432314410480351e-05,
"loss": 0.3409,
"step": 266
},
{
"epoch": 0.29,
"grad_norm": 0.41807996749714293,
"learning_rate": 1.4410480349344979e-05,
"loss": 0.3452,
"step": 267
},
{
"epoch": 0.29,
"grad_norm": 0.38052190844013845,
"learning_rate": 1.4388646288209608e-05,
"loss": 0.3434,
"step": 268
},
{
"epoch": 0.29,
"grad_norm": 0.44469063822750765,
"learning_rate": 1.4366812227074236e-05,
"loss": 0.35,
"step": 269
},
{
"epoch": 0.29,
"grad_norm": 0.3829776252082372,
"learning_rate": 1.4344978165938866e-05,
"loss": 0.3263,
"step": 270
},
{
"epoch": 0.29,
"grad_norm": 0.4056196273835837,
"learning_rate": 1.4323144104803495e-05,
"loss": 0.3539,
"step": 271
},
{
"epoch": 0.29,
"grad_norm": 0.40412139770148303,
"learning_rate": 1.4301310043668123e-05,
"loss": 0.3598,
"step": 272
},
{
"epoch": 0.29,
"grad_norm": 0.4202242804763555,
"learning_rate": 1.4279475982532753e-05,
"loss": 0.3337,
"step": 273
},
{
"epoch": 0.3,
"grad_norm": 0.4047872913704621,
"learning_rate": 1.4257641921397381e-05,
"loss": 0.3511,
"step": 274
},
{
"epoch": 0.3,
"grad_norm": 0.37472436629683387,
"learning_rate": 1.423580786026201e-05,
"loss": 0.3307,
"step": 275
},
{
"epoch": 0.3,
"grad_norm": 0.41684890179246353,
"learning_rate": 1.4213973799126638e-05,
"loss": 0.3379,
"step": 276
},
{
"epoch": 0.3,
"grad_norm": 0.4174816823127464,
"learning_rate": 1.4192139737991268e-05,
"loss": 0.3611,
"step": 277
},
{
"epoch": 0.3,
"grad_norm": 0.4487870671522349,
"learning_rate": 1.4170305676855896e-05,
"loss": 0.3392,
"step": 278
},
{
"epoch": 0.3,
"grad_norm": 0.41458956443794104,
"learning_rate": 1.4148471615720525e-05,
"loss": 0.3467,
"step": 279
},
{
"epoch": 0.3,
"grad_norm": 0.46610708626195474,
"learning_rate": 1.4126637554585155e-05,
"loss": 0.3353,
"step": 280
},
{
"epoch": 0.3,
"grad_norm": 0.4512003769892443,
"learning_rate": 1.4104803493449783e-05,
"loss": 0.3513,
"step": 281
},
{
"epoch": 0.3,
"grad_norm": 0.3858475360185647,
"learning_rate": 1.408296943231441e-05,
"loss": 0.3177,
"step": 282
},
{
"epoch": 0.31,
"grad_norm": 0.3971417244165327,
"learning_rate": 1.406113537117904e-05,
"loss": 0.3517,
"step": 283
},
{
"epoch": 0.31,
"grad_norm": 0.4318791589108847,
"learning_rate": 1.403930131004367e-05,
"loss": 0.3369,
"step": 284
},
{
"epoch": 0.31,
"grad_norm": 0.39746635489556087,
"learning_rate": 1.4017467248908298e-05,
"loss": 0.3212,
"step": 285
},
{
"epoch": 0.31,
"grad_norm": 0.43224599512736944,
"learning_rate": 1.3995633187772927e-05,
"loss": 0.3451,
"step": 286
},
{
"epoch": 0.31,
"grad_norm": 0.4169628481404007,
"learning_rate": 1.3973799126637555e-05,
"loss": 0.3429,
"step": 287
},
{
"epoch": 0.31,
"grad_norm": 0.38109401419780187,
"learning_rate": 1.3951965065502185e-05,
"loss": 0.3444,
"step": 288
},
{
"epoch": 0.31,
"grad_norm": 0.4174351360849819,
"learning_rate": 1.3930131004366814e-05,
"loss": 0.3395,
"step": 289
},
{
"epoch": 0.31,
"grad_norm": 0.4089628933545283,
"learning_rate": 1.3908296943231442e-05,
"loss": 0.3534,
"step": 290
},
{
"epoch": 0.31,
"grad_norm": 0.45790289224080694,
"learning_rate": 1.388646288209607e-05,
"loss": 0.3365,
"step": 291
},
{
"epoch": 0.32,
"grad_norm": 0.37659122705694703,
"learning_rate": 1.38646288209607e-05,
"loss": 0.3331,
"step": 292
},
{
"epoch": 0.32,
"grad_norm": 0.3837943816847714,
"learning_rate": 1.384279475982533e-05,
"loss": 0.3182,
"step": 293
},
{
"epoch": 0.32,
"grad_norm": 0.3822006474102691,
"learning_rate": 1.3820960698689957e-05,
"loss": 0.3074,
"step": 294
},
{
"epoch": 0.32,
"grad_norm": 0.3796325094330029,
"learning_rate": 1.3799126637554587e-05,
"loss": 0.311,
"step": 295
},
{
"epoch": 0.32,
"grad_norm": 0.3910995271433598,
"learning_rate": 1.3777292576419215e-05,
"loss": 0.3151,
"step": 296
},
{
"epoch": 0.32,
"grad_norm": 0.4162711670508305,
"learning_rate": 1.3755458515283844e-05,
"loss": 0.318,
"step": 297
},
{
"epoch": 0.32,
"grad_norm": 0.39243348147197954,
"learning_rate": 1.3733624454148474e-05,
"loss": 0.315,
"step": 298
},
{
"epoch": 0.32,
"grad_norm": 0.3718609158068742,
"learning_rate": 1.3711790393013102e-05,
"loss": 0.3068,
"step": 299
},
{
"epoch": 0.32,
"grad_norm": 0.4114227903281369,
"learning_rate": 1.368995633187773e-05,
"loss": 0.3086,
"step": 300
},
{
"epoch": 0.33,
"grad_norm": 0.37117152152007493,
"learning_rate": 1.366812227074236e-05,
"loss": 0.3082,
"step": 301
},
{
"epoch": 0.33,
"grad_norm": 0.37955301673885294,
"learning_rate": 1.3646288209606989e-05,
"loss": 0.3048,
"step": 302
},
{
"epoch": 0.33,
"grad_norm": 0.7145676209823392,
"learning_rate": 1.3624454148471617e-05,
"loss": 0.3151,
"step": 303
},
{
"epoch": 0.33,
"grad_norm": 0.4155052349515932,
"learning_rate": 1.3602620087336245e-05,
"loss": 0.322,
"step": 304
},
{
"epoch": 0.33,
"grad_norm": 0.41405357495171785,
"learning_rate": 1.3580786026200874e-05,
"loss": 0.3246,
"step": 305
},
{
"epoch": 0.33,
"grad_norm": 0.4272800105120246,
"learning_rate": 1.3558951965065504e-05,
"loss": 0.3218,
"step": 306
},
{
"epoch": 0.33,
"grad_norm": 0.39827328458103745,
"learning_rate": 1.3537117903930132e-05,
"loss": 0.3177,
"step": 307
},
{
"epoch": 0.33,
"grad_norm": 0.42606470314677275,
"learning_rate": 1.3515283842794761e-05,
"loss": 0.3158,
"step": 308
}
],
"logging_steps": 1,
"max_steps": 926,
"num_input_tokens_seen": 0,
"num_train_epochs": 9223372036854775807,
"save_steps": 154,
"total_flos": 329999752101888.0,
"train_batch_size": 1,
"trial_name": null,
"trial_params": null
}