subject3-test1 / trainer_state.json
lillian039's picture
Model save
416c4ed verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 2.0,
"eval_steps": 500,
"global_step": 754,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.002652519893899204,
"grad_norm": 972.6011082224239,
"learning_rate": 1.3157894736842107e-07,
"loss": 12.3485,
"step": 1
},
{
"epoch": 0.005305039787798408,
"grad_norm": 1176.0761331054841,
"learning_rate": 2.6315789473684213e-07,
"loss": 12.3023,
"step": 2
},
{
"epoch": 0.007957559681697613,
"grad_norm": 971.133776912219,
"learning_rate": 3.9473684210526315e-07,
"loss": 12.2062,
"step": 3
},
{
"epoch": 0.010610079575596816,
"grad_norm": 1161.3124856806703,
"learning_rate": 5.263157894736843e-07,
"loss": 12.2629,
"step": 4
},
{
"epoch": 0.013262599469496022,
"grad_norm": 1332.84779500764,
"learning_rate": 6.578947368421053e-07,
"loss": 12.3509,
"step": 5
},
{
"epoch": 0.015915119363395226,
"grad_norm": 850.1180640186841,
"learning_rate": 7.894736842105263e-07,
"loss": 12.1591,
"step": 6
},
{
"epoch": 0.01856763925729443,
"grad_norm": 1123.22480648108,
"learning_rate": 9.210526315789474e-07,
"loss": 11.8747,
"step": 7
},
{
"epoch": 0.021220159151193633,
"grad_norm": 953.0673098616203,
"learning_rate": 1.0526315789473685e-06,
"loss": 11.6101,
"step": 8
},
{
"epoch": 0.023872679045092837,
"grad_norm": 1117.0646961596742,
"learning_rate": 1.1842105263157894e-06,
"loss": 11.4673,
"step": 9
},
{
"epoch": 0.026525198938992044,
"grad_norm": 1083.1435085272988,
"learning_rate": 1.3157894736842106e-06,
"loss": 10.2665,
"step": 10
},
{
"epoch": 0.029177718832891247,
"grad_norm": 925.8133851516698,
"learning_rate": 1.4473684210526317e-06,
"loss": 9.8587,
"step": 11
},
{
"epoch": 0.03183023872679045,
"grad_norm": 835.5900642092195,
"learning_rate": 1.5789473684210526e-06,
"loss": 9.5232,
"step": 12
},
{
"epoch": 0.034482758620689655,
"grad_norm": 737.5585272473943,
"learning_rate": 1.710526315789474e-06,
"loss": 8.6792,
"step": 13
},
{
"epoch": 0.03713527851458886,
"grad_norm": 973.6275891557045,
"learning_rate": 1.8421052631578948e-06,
"loss": 8.5067,
"step": 14
},
{
"epoch": 0.03978779840848806,
"grad_norm": 987.1822647467712,
"learning_rate": 1.973684210526316e-06,
"loss": 8.1794,
"step": 15
},
{
"epoch": 0.042440318302387266,
"grad_norm": 825.59398545438,
"learning_rate": 2.105263157894737e-06,
"loss": 7.7437,
"step": 16
},
{
"epoch": 0.04509283819628647,
"grad_norm": 664.0819933865747,
"learning_rate": 2.236842105263158e-06,
"loss": 6.4606,
"step": 17
},
{
"epoch": 0.04774535809018567,
"grad_norm": 399.57434205536475,
"learning_rate": 2.368421052631579e-06,
"loss": 5.6519,
"step": 18
},
{
"epoch": 0.050397877984084884,
"grad_norm": 504.5286526909389,
"learning_rate": 2.5e-06,
"loss": 5.3274,
"step": 19
},
{
"epoch": 0.05305039787798409,
"grad_norm": 388.29875810234256,
"learning_rate": 2.631578947368421e-06,
"loss": 5.0673,
"step": 20
},
{
"epoch": 0.05570291777188329,
"grad_norm": 454.4658090119901,
"learning_rate": 2.7631578947368424e-06,
"loss": 4.7588,
"step": 21
},
{
"epoch": 0.058355437665782495,
"grad_norm": 366.33318334352083,
"learning_rate": 2.8947368421052634e-06,
"loss": 4.5846,
"step": 22
},
{
"epoch": 0.0610079575596817,
"grad_norm": 328.1174135802827,
"learning_rate": 3.0263157894736843e-06,
"loss": 4.4205,
"step": 23
},
{
"epoch": 0.0636604774535809,
"grad_norm": 466.9071960005728,
"learning_rate": 3.157894736842105e-06,
"loss": 4.2348,
"step": 24
},
{
"epoch": 0.06631299734748011,
"grad_norm": 331.2124348618216,
"learning_rate": 3.289473684210527e-06,
"loss": 3.9631,
"step": 25
},
{
"epoch": 0.06896551724137931,
"grad_norm": 348.3850354268525,
"learning_rate": 3.421052631578948e-06,
"loss": 3.6177,
"step": 26
},
{
"epoch": 0.07161803713527852,
"grad_norm": 332.5916510702998,
"learning_rate": 3.5526315789473687e-06,
"loss": 3.5028,
"step": 27
},
{
"epoch": 0.07427055702917772,
"grad_norm": 456.9904876458593,
"learning_rate": 3.6842105263157896e-06,
"loss": 3.6164,
"step": 28
},
{
"epoch": 0.07692307692307693,
"grad_norm": 436.5446671330148,
"learning_rate": 3.815789473684211e-06,
"loss": 3.3163,
"step": 29
},
{
"epoch": 0.07957559681697612,
"grad_norm": 266.0326848946069,
"learning_rate": 3.947368421052632e-06,
"loss": 3.4249,
"step": 30
},
{
"epoch": 0.08222811671087533,
"grad_norm": 264.7876966039979,
"learning_rate": 4.078947368421053e-06,
"loss": 3.3034,
"step": 31
},
{
"epoch": 0.08488063660477453,
"grad_norm": 368.7949684184987,
"learning_rate": 4.210526315789474e-06,
"loss": 3.1039,
"step": 32
},
{
"epoch": 0.08753315649867374,
"grad_norm": 255.2314821527665,
"learning_rate": 4.342105263157895e-06,
"loss": 3.0517,
"step": 33
},
{
"epoch": 0.09018567639257294,
"grad_norm": 234.26369572444867,
"learning_rate": 4.473684210526316e-06,
"loss": 2.9529,
"step": 34
},
{
"epoch": 0.09283819628647215,
"grad_norm": 230.93299067113927,
"learning_rate": 4.605263157894737e-06,
"loss": 2.8815,
"step": 35
},
{
"epoch": 0.09549071618037135,
"grad_norm": 232.19712182995283,
"learning_rate": 4.736842105263158e-06,
"loss": 2.9011,
"step": 36
},
{
"epoch": 0.09814323607427056,
"grad_norm": 233.9410464893109,
"learning_rate": 4.8684210526315795e-06,
"loss": 2.7754,
"step": 37
},
{
"epoch": 0.10079575596816977,
"grad_norm": 240.55698339891316,
"learning_rate": 5e-06,
"loss": 2.7326,
"step": 38
},
{
"epoch": 0.10344827586206896,
"grad_norm": 204.71398942027966,
"learning_rate": 5.131578947368422e-06,
"loss": 2.7594,
"step": 39
},
{
"epoch": 0.10610079575596817,
"grad_norm": 191.82223594470645,
"learning_rate": 5.263157894736842e-06,
"loss": 2.665,
"step": 40
},
{
"epoch": 0.10875331564986737,
"grad_norm": 391.21195275764177,
"learning_rate": 5.394736842105264e-06,
"loss": 2.842,
"step": 41
},
{
"epoch": 0.11140583554376658,
"grad_norm": 199.44545824533185,
"learning_rate": 5.526315789473685e-06,
"loss": 2.7292,
"step": 42
},
{
"epoch": 0.11405835543766578,
"grad_norm": 214.39560903670093,
"learning_rate": 5.657894736842106e-06,
"loss": 2.7937,
"step": 43
},
{
"epoch": 0.11671087533156499,
"grad_norm": 135.53162028919942,
"learning_rate": 5.789473684210527e-06,
"loss": 2.6044,
"step": 44
},
{
"epoch": 0.11936339522546419,
"grad_norm": 184.52491951315736,
"learning_rate": 5.921052631578948e-06,
"loss": 2.6461,
"step": 45
},
{
"epoch": 0.1220159151193634,
"grad_norm": 164.26425091990012,
"learning_rate": 6.0526315789473685e-06,
"loss": 2.6699,
"step": 46
},
{
"epoch": 0.1246684350132626,
"grad_norm": 156.31534034010923,
"learning_rate": 6.18421052631579e-06,
"loss": 2.6341,
"step": 47
},
{
"epoch": 0.1273209549071618,
"grad_norm": 133.568318629801,
"learning_rate": 6.31578947368421e-06,
"loss": 2.5945,
"step": 48
},
{
"epoch": 0.129973474801061,
"grad_norm": 129.47868792668547,
"learning_rate": 6.447368421052632e-06,
"loss": 2.4362,
"step": 49
},
{
"epoch": 0.13262599469496023,
"grad_norm": 148.49435032805644,
"learning_rate": 6.578947368421054e-06,
"loss": 2.6795,
"step": 50
},
{
"epoch": 0.13527851458885942,
"grad_norm": 140.9084313756403,
"learning_rate": 6.710526315789474e-06,
"loss": 2.5829,
"step": 51
},
{
"epoch": 0.13793103448275862,
"grad_norm": 152.83640328644313,
"learning_rate": 6.842105263157896e-06,
"loss": 2.9126,
"step": 52
},
{
"epoch": 0.14058355437665782,
"grad_norm": 123.55761772751443,
"learning_rate": 6.973684210526316e-06,
"loss": 2.6503,
"step": 53
},
{
"epoch": 0.14323607427055704,
"grad_norm": 100.61578026023533,
"learning_rate": 7.1052631578947375e-06,
"loss": 2.3635,
"step": 54
},
{
"epoch": 0.14588859416445624,
"grad_norm": 159.03053787861532,
"learning_rate": 7.236842105263158e-06,
"loss": 3.1021,
"step": 55
},
{
"epoch": 0.14854111405835543,
"grad_norm": 123.64692196540877,
"learning_rate": 7.368421052631579e-06,
"loss": 2.8577,
"step": 56
},
{
"epoch": 0.15119363395225463,
"grad_norm": 87.70627460451284,
"learning_rate": 7.500000000000001e-06,
"loss": 2.3376,
"step": 57
},
{
"epoch": 0.15384615384615385,
"grad_norm": 86.58878221665253,
"learning_rate": 7.631578947368423e-06,
"loss": 2.4375,
"step": 58
},
{
"epoch": 0.15649867374005305,
"grad_norm": 134.90794157537533,
"learning_rate": 7.763157894736843e-06,
"loss": 2.5644,
"step": 59
},
{
"epoch": 0.15915119363395225,
"grad_norm": 115.31216244306997,
"learning_rate": 7.894736842105265e-06,
"loss": 2.4072,
"step": 60
},
{
"epoch": 0.16180371352785147,
"grad_norm": 120.1225075682861,
"learning_rate": 8.026315789473685e-06,
"loss": 2.5956,
"step": 61
},
{
"epoch": 0.16445623342175067,
"grad_norm": 136.94352276466043,
"learning_rate": 8.157894736842106e-06,
"loss": 2.4038,
"step": 62
},
{
"epoch": 0.16710875331564987,
"grad_norm": 111.78482749569982,
"learning_rate": 8.289473684210526e-06,
"loss": 2.3572,
"step": 63
},
{
"epoch": 0.16976127320954906,
"grad_norm": 100.8891133060052,
"learning_rate": 8.421052631578948e-06,
"loss": 2.257,
"step": 64
},
{
"epoch": 0.1724137931034483,
"grad_norm": 114.86478282978065,
"learning_rate": 8.552631578947368e-06,
"loss": 2.2236,
"step": 65
},
{
"epoch": 0.17506631299734748,
"grad_norm": 114.08371988056022,
"learning_rate": 8.68421052631579e-06,
"loss": 2.228,
"step": 66
},
{
"epoch": 0.17771883289124668,
"grad_norm": 113.07954964295763,
"learning_rate": 8.81578947368421e-06,
"loss": 2.2422,
"step": 67
},
{
"epoch": 0.18037135278514588,
"grad_norm": 109.31976151906129,
"learning_rate": 8.947368421052632e-06,
"loss": 2.1732,
"step": 68
},
{
"epoch": 0.1830238726790451,
"grad_norm": 90.99647003375713,
"learning_rate": 9.078947368421054e-06,
"loss": 2.1485,
"step": 69
},
{
"epoch": 0.1856763925729443,
"grad_norm": 120.51965849864614,
"learning_rate": 9.210526315789474e-06,
"loss": 2.2091,
"step": 70
},
{
"epoch": 0.1883289124668435,
"grad_norm": 96.16044170894298,
"learning_rate": 9.342105263157895e-06,
"loss": 2.0965,
"step": 71
},
{
"epoch": 0.1909814323607427,
"grad_norm": 113.04131258428713,
"learning_rate": 9.473684210526315e-06,
"loss": 2.232,
"step": 72
},
{
"epoch": 0.19363395225464192,
"grad_norm": 93.19087300109942,
"learning_rate": 9.605263157894737e-06,
"loss": 2.062,
"step": 73
},
{
"epoch": 0.1962864721485411,
"grad_norm": 122.34858082117903,
"learning_rate": 9.736842105263159e-06,
"loss": 2.2507,
"step": 74
},
{
"epoch": 0.1989389920424403,
"grad_norm": 123.75061956942726,
"learning_rate": 9.868421052631579e-06,
"loss": 2.0923,
"step": 75
},
{
"epoch": 0.20159151193633953,
"grad_norm": 95.31909307451943,
"learning_rate": 1e-05,
"loss": 2.0863,
"step": 76
},
{
"epoch": 0.20424403183023873,
"grad_norm": 111.39378682665635,
"learning_rate": 9.999946324068588e-06,
"loss": 2.0406,
"step": 77
},
{
"epoch": 0.20689655172413793,
"grad_norm": 80.94452155733912,
"learning_rate": 9.999785297426788e-06,
"loss": 2.0968,
"step": 78
},
{
"epoch": 0.20954907161803712,
"grad_norm": 84.78034162492699,
"learning_rate": 9.999516923531906e-06,
"loss": 1.9962,
"step": 79
},
{
"epoch": 0.21220159151193635,
"grad_norm": 78.02592069732914,
"learning_rate": 9.999141208146029e-06,
"loss": 1.9925,
"step": 80
},
{
"epoch": 0.21485411140583555,
"grad_norm": 73.73682362098175,
"learning_rate": 9.998658159335903e-06,
"loss": 1.9603,
"step": 81
},
{
"epoch": 0.21750663129973474,
"grad_norm": 90.67851878871835,
"learning_rate": 9.998067787472772e-06,
"loss": 1.8926,
"step": 82
},
{
"epoch": 0.22015915119363394,
"grad_norm": 62.82693612482471,
"learning_rate": 9.997370105232134e-06,
"loss": 1.8182,
"step": 83
},
{
"epoch": 0.22281167108753316,
"grad_norm": 75.05067588018298,
"learning_rate": 9.99656512759349e-06,
"loss": 1.8357,
"step": 84
},
{
"epoch": 0.22546419098143236,
"grad_norm": 49.91545803670374,
"learning_rate": 9.995652871840006e-06,
"loss": 1.7348,
"step": 85
},
{
"epoch": 0.22811671087533156,
"grad_norm": 91.12448174019961,
"learning_rate": 9.994633357558158e-06,
"loss": 1.8954,
"step": 86
},
{
"epoch": 0.23076923076923078,
"grad_norm": 52.94023906195075,
"learning_rate": 9.993506606637297e-06,
"loss": 1.8498,
"step": 87
},
{
"epoch": 0.23342175066312998,
"grad_norm": 58.195205266701194,
"learning_rate": 9.992272643269181e-06,
"loss": 1.7826,
"step": 88
},
{
"epoch": 0.23607427055702918,
"grad_norm": 66.84628315063024,
"learning_rate": 9.990931493947467e-06,
"loss": 1.7434,
"step": 89
},
{
"epoch": 0.23872679045092837,
"grad_norm": 73.69628201213949,
"learning_rate": 9.989483187467128e-06,
"loss": 1.8642,
"step": 90
},
{
"epoch": 0.2413793103448276,
"grad_norm": 52.58610935369465,
"learning_rate": 9.987927754923844e-06,
"loss": 1.836,
"step": 91
},
{
"epoch": 0.2440318302387268,
"grad_norm": 78.35414171000565,
"learning_rate": 9.986265229713332e-06,
"loss": 1.932,
"step": 92
},
{
"epoch": 0.246684350132626,
"grad_norm": 49.2141069227244,
"learning_rate": 9.98449564753063e-06,
"loss": 1.7129,
"step": 93
},
{
"epoch": 0.2493368700265252,
"grad_norm": 61.31351433697711,
"learning_rate": 9.982619046369321e-06,
"loss": 1.7202,
"step": 94
},
{
"epoch": 0.2519893899204244,
"grad_norm": 68.28056374656848,
"learning_rate": 9.980635466520738e-06,
"loss": 1.6906,
"step": 95
},
{
"epoch": 0.2546419098143236,
"grad_norm": 46.951538432249045,
"learning_rate": 9.978544950573075e-06,
"loss": 1.632,
"step": 96
},
{
"epoch": 0.2572944297082228,
"grad_norm": 67.90324049138877,
"learning_rate": 9.976347543410487e-06,
"loss": 1.656,
"step": 97
},
{
"epoch": 0.259946949602122,
"grad_norm": 64.68477738459366,
"learning_rate": 9.974043292212129e-06,
"loss": 1.4681,
"step": 98
},
{
"epoch": 0.2625994694960212,
"grad_norm": 52.8724055352585,
"learning_rate": 9.97163224645113e-06,
"loss": 1.5811,
"step": 99
},
{
"epoch": 0.26525198938992045,
"grad_norm": 35.35995771432847,
"learning_rate": 9.96911445789354e-06,
"loss": 1.3361,
"step": 100
},
{
"epoch": 0.26790450928381965,
"grad_norm": 44.61978123978171,
"learning_rate": 9.966489980597217e-06,
"loss": 1.5174,
"step": 101
},
{
"epoch": 0.27055702917771884,
"grad_norm": 57.19406544164446,
"learning_rate": 9.963758870910672e-06,
"loss": 1.4109,
"step": 102
},
{
"epoch": 0.27320954907161804,
"grad_norm": 50.526490813756546,
"learning_rate": 9.960921187471841e-06,
"loss": 1.3191,
"step": 103
},
{
"epoch": 0.27586206896551724,
"grad_norm": 101.13725470627442,
"learning_rate": 9.957976991206847e-06,
"loss": 1.6531,
"step": 104
},
{
"epoch": 0.27851458885941643,
"grad_norm": 52.15413810029031,
"learning_rate": 9.95492634532868e-06,
"loss": 1.4429,
"step": 105
},
{
"epoch": 0.28116710875331563,
"grad_norm": 41.87999077534381,
"learning_rate": 9.951769315335843e-06,
"loss": 1.238,
"step": 106
},
{
"epoch": 0.2838196286472148,
"grad_norm": 37.40295368354187,
"learning_rate": 9.94850596901095e-06,
"loss": 1.1688,
"step": 107
},
{
"epoch": 0.2864721485411141,
"grad_norm": 68.04586288775573,
"learning_rate": 9.94513637641926e-06,
"loss": 1.461,
"step": 108
},
{
"epoch": 0.2891246684350133,
"grad_norm": 44.169402573464126,
"learning_rate": 9.94166060990718e-06,
"loss": 1.1791,
"step": 109
},
{
"epoch": 0.2917771883289125,
"grad_norm": 47.80762405569716,
"learning_rate": 9.938078744100713e-06,
"loss": 1.1185,
"step": 110
},
{
"epoch": 0.29442970822281167,
"grad_norm": 58.966613634353685,
"learning_rate": 9.934390855903852e-06,
"loss": 1.3944,
"step": 111
},
{
"epoch": 0.29708222811671087,
"grad_norm": 47.03397627383742,
"learning_rate": 9.930597024496933e-06,
"loss": 1.23,
"step": 112
},
{
"epoch": 0.29973474801061006,
"grad_norm": 40.39452844321203,
"learning_rate": 9.926697331334924e-06,
"loss": 1.0874,
"step": 113
},
{
"epoch": 0.30238726790450926,
"grad_norm": 40.58586118259995,
"learning_rate": 9.922691860145696e-06,
"loss": 1.1465,
"step": 114
},
{
"epoch": 0.3050397877984085,
"grad_norm": 81.40197786842415,
"learning_rate": 9.918580696928206e-06,
"loss": 1.5197,
"step": 115
},
{
"epoch": 0.3076923076923077,
"grad_norm": 42.37482487824635,
"learning_rate": 9.91436392995066e-06,
"loss": 1.1805,
"step": 116
},
{
"epoch": 0.3103448275862069,
"grad_norm": 31.02071243624101,
"learning_rate": 9.910041649748613e-06,
"loss": 0.9539,
"step": 117
},
{
"epoch": 0.3129973474801061,
"grad_norm": 41.401274375888676,
"learning_rate": 9.905613949123036e-06,
"loss": 0.9762,
"step": 118
},
{
"epoch": 0.3156498673740053,
"grad_norm": 49.185437588379855,
"learning_rate": 9.901080923138308e-06,
"loss": 1.0671,
"step": 119
},
{
"epoch": 0.3183023872679045,
"grad_norm": 25.940285977239768,
"learning_rate": 9.896442669120188e-06,
"loss": 0.8426,
"step": 120
},
{
"epoch": 0.3209549071618037,
"grad_norm": 46.84002530549447,
"learning_rate": 9.891699286653714e-06,
"loss": 1.0983,
"step": 121
},
{
"epoch": 0.32360742705570295,
"grad_norm": 40.94275204536911,
"learning_rate": 9.886850877581079e-06,
"loss": 1.049,
"step": 122
},
{
"epoch": 0.32625994694960214,
"grad_norm": 28.321978107011155,
"learning_rate": 9.88189754599943e-06,
"loss": 0.8165,
"step": 123
},
{
"epoch": 0.32891246684350134,
"grad_norm": 30.5824466235061,
"learning_rate": 9.87683939825864e-06,
"loss": 0.7374,
"step": 124
},
{
"epoch": 0.33156498673740054,
"grad_norm": 30.73141505007011,
"learning_rate": 9.87167654295903e-06,
"loss": 0.7421,
"step": 125
},
{
"epoch": 0.33421750663129973,
"grad_norm": 38.284482417984805,
"learning_rate": 9.866409090949023e-06,
"loss": 1.0553,
"step": 126
},
{
"epoch": 0.33687002652519893,
"grad_norm": 51.217111013485166,
"learning_rate": 9.861037155322777e-06,
"loss": 0.907,
"step": 127
},
{
"epoch": 0.3395225464190981,
"grad_norm": 35.667190844261036,
"learning_rate": 9.855560851417752e-06,
"loss": 0.7419,
"step": 128
},
{
"epoch": 0.3421750663129973,
"grad_norm": 47.52472262750116,
"learning_rate": 9.849980296812231e-06,
"loss": 0.9425,
"step": 129
},
{
"epoch": 0.3448275862068966,
"grad_norm": 48.31204711246393,
"learning_rate": 9.844295611322804e-06,
"loss": 0.8446,
"step": 130
},
{
"epoch": 0.34748010610079577,
"grad_norm": 30.190906581474252,
"learning_rate": 9.838506917001784e-06,
"loss": 0.6881,
"step": 131
},
{
"epoch": 0.35013262599469497,
"grad_norm": 46.794947303199244,
"learning_rate": 9.832614338134595e-06,
"loss": 0.6701,
"step": 132
},
{
"epoch": 0.35278514588859416,
"grad_norm": 44.10169615851438,
"learning_rate": 9.826618001237101e-06,
"loss": 0.701,
"step": 133
},
{
"epoch": 0.35543766578249336,
"grad_norm": 40.06922950860214,
"learning_rate": 9.82051803505289e-06,
"loss": 0.7066,
"step": 134
},
{
"epoch": 0.35809018567639256,
"grad_norm": 51.10173470889434,
"learning_rate": 9.814314570550506e-06,
"loss": 0.7107,
"step": 135
},
{
"epoch": 0.36074270557029176,
"grad_norm": 45.42561699905777,
"learning_rate": 9.808007740920647e-06,
"loss": 0.7882,
"step": 136
},
{
"epoch": 0.363395225464191,
"grad_norm": 30.22023691550999,
"learning_rate": 9.80159768157329e-06,
"loss": 0.5774,
"step": 137
},
{
"epoch": 0.3660477453580902,
"grad_norm": 37.33347097999451,
"learning_rate": 9.795084530134801e-06,
"loss": 0.6183,
"step": 138
},
{
"epoch": 0.3687002652519894,
"grad_norm": 31.72442511930728,
"learning_rate": 9.788468426444968e-06,
"loss": 0.799,
"step": 139
},
{
"epoch": 0.3713527851458886,
"grad_norm": 44.85515304779163,
"learning_rate": 9.781749512554e-06,
"loss": 0.6424,
"step": 140
},
{
"epoch": 0.3740053050397878,
"grad_norm": 39.0917840315655,
"learning_rate": 9.774927932719484e-06,
"loss": 0.6998,
"step": 141
},
{
"epoch": 0.376657824933687,
"grad_norm": 31.573363442317945,
"learning_rate": 9.768003833403278e-06,
"loss": 0.4961,
"step": 142
},
{
"epoch": 0.3793103448275862,
"grad_norm": 61.98565644042616,
"learning_rate": 9.760977363268374e-06,
"loss": 1.0158,
"step": 143
},
{
"epoch": 0.3819628647214854,
"grad_norm": 39.75425925803264,
"learning_rate": 9.753848673175707e-06,
"loss": 0.6281,
"step": 144
},
{
"epoch": 0.38461538461538464,
"grad_norm": 48.98336149254754,
"learning_rate": 9.746617916180906e-06,
"loss": 0.6136,
"step": 145
},
{
"epoch": 0.38726790450928383,
"grad_norm": 57.044641212074524,
"learning_rate": 9.739285247531019e-06,
"loss": 0.7083,
"step": 146
},
{
"epoch": 0.38992042440318303,
"grad_norm": 63.365556425165224,
"learning_rate": 9.731850824661171e-06,
"loss": 0.5737,
"step": 147
},
{
"epoch": 0.3925729442970822,
"grad_norm": 29.826608394688588,
"learning_rate": 9.724314807191197e-06,
"loss": 0.4653,
"step": 148
},
{
"epoch": 0.3952254641909814,
"grad_norm": 47.58176548578861,
"learning_rate": 9.716677356922193e-06,
"loss": 0.5239,
"step": 149
},
{
"epoch": 0.3978779840848806,
"grad_norm": 34.51576173886699,
"learning_rate": 9.708938637833065e-06,
"loss": 0.5865,
"step": 150
},
{
"epoch": 0.4005305039787798,
"grad_norm": 34.740787732790245,
"learning_rate": 9.701098816076995e-06,
"loss": 0.5071,
"step": 151
},
{
"epoch": 0.40318302387267907,
"grad_norm": 35.31607232875537,
"learning_rate": 9.693158059977879e-06,
"loss": 0.494,
"step": 152
},
{
"epoch": 0.40583554376657827,
"grad_norm": 51.83713992645971,
"learning_rate": 9.685116540026703e-06,
"loss": 0.6192,
"step": 153
},
{
"epoch": 0.40848806366047746,
"grad_norm": 37.44373095646274,
"learning_rate": 9.6769744288779e-06,
"loss": 0.5033,
"step": 154
},
{
"epoch": 0.41114058355437666,
"grad_norm": 109.25763221989088,
"learning_rate": 9.668731901345632e-06,
"loss": 0.8446,
"step": 155
},
{
"epoch": 0.41379310344827586,
"grad_norm": 72.46155458193397,
"learning_rate": 9.660389134400034e-06,
"loss": 0.7201,
"step": 156
},
{
"epoch": 0.41644562334217505,
"grad_norm": 92.07617516331763,
"learning_rate": 9.651946307163417e-06,
"loss": 0.9007,
"step": 157
},
{
"epoch": 0.41909814323607425,
"grad_norm": 28.589438406273647,
"learning_rate": 9.643403600906433e-06,
"loss": 0.3304,
"step": 158
},
{
"epoch": 0.4217506631299735,
"grad_norm": 33.709892226026334,
"learning_rate": 9.634761199044165e-06,
"loss": 0.5636,
"step": 159
},
{
"epoch": 0.4244031830238727,
"grad_norm": 158.4075248636022,
"learning_rate": 9.626019287132202e-06,
"loss": 0.4064,
"step": 160
},
{
"epoch": 0.4270557029177719,
"grad_norm": 38.574563627234824,
"learning_rate": 9.617178052862648e-06,
"loss": 0.4748,
"step": 161
},
{
"epoch": 0.4297082228116711,
"grad_norm": 45.60608001495772,
"learning_rate": 9.608237686060099e-06,
"loss": 0.6974,
"step": 162
},
{
"epoch": 0.4323607427055703,
"grad_norm": 47.48506948979431,
"learning_rate": 9.599198378677559e-06,
"loss": 0.4992,
"step": 163
},
{
"epoch": 0.4350132625994695,
"grad_norm": 31.370496495096628,
"learning_rate": 9.590060324792328e-06,
"loss": 0.4248,
"step": 164
},
{
"epoch": 0.4376657824933687,
"grad_norm": 30.604914876341343,
"learning_rate": 9.580823720601824e-06,
"loss": 0.3956,
"step": 165
},
{
"epoch": 0.4403183023872679,
"grad_norm": 35.21040129146704,
"learning_rate": 9.571488764419381e-06,
"loss": 0.4175,
"step": 166
},
{
"epoch": 0.44297082228116713,
"grad_norm": 35.4113416422159,
"learning_rate": 9.562055656669988e-06,
"loss": 0.5203,
"step": 167
},
{
"epoch": 0.44562334217506633,
"grad_norm": 39.91487718635988,
"learning_rate": 9.552524599885982e-06,
"loss": 0.378,
"step": 168
},
{
"epoch": 0.4482758620689655,
"grad_norm": 39.85559208094948,
"learning_rate": 9.542895798702702e-06,
"loss": 0.6038,
"step": 169
},
{
"epoch": 0.4509283819628647,
"grad_norm": 25.908557322404143,
"learning_rate": 9.5331694598541e-06,
"loss": 0.3786,
"step": 170
},
{
"epoch": 0.4535809018567639,
"grad_norm": 48.47896763638714,
"learning_rate": 9.52334579216829e-06,
"loss": 0.5274,
"step": 171
},
{
"epoch": 0.4562334217506631,
"grad_norm": 36.56071047636118,
"learning_rate": 9.51342500656308e-06,
"loss": 0.4753,
"step": 172
},
{
"epoch": 0.4588859416445623,
"grad_norm": 40.92239723888123,
"learning_rate": 9.503407316041432e-06,
"loss": 0.4009,
"step": 173
},
{
"epoch": 0.46153846153846156,
"grad_norm": 24.061372567374256,
"learning_rate": 9.493292935686896e-06,
"loss": 0.3719,
"step": 174
},
{
"epoch": 0.46419098143236076,
"grad_norm": 32.5043140267615,
"learning_rate": 9.483082082658984e-06,
"loss": 0.348,
"step": 175
},
{
"epoch": 0.46684350132625996,
"grad_norm": 33.49855253180825,
"learning_rate": 9.472774976188515e-06,
"loss": 0.2976,
"step": 176
},
{
"epoch": 0.46949602122015915,
"grad_norm": 43.029659310923336,
"learning_rate": 9.462371837572907e-06,
"loss": 0.3728,
"step": 177
},
{
"epoch": 0.47214854111405835,
"grad_norm": 31.765940790477142,
"learning_rate": 9.451872890171419e-06,
"loss": 0.2834,
"step": 178
},
{
"epoch": 0.47480106100795755,
"grad_norm": 46.81294121573895,
"learning_rate": 9.441278359400366e-06,
"loss": 0.3757,
"step": 179
},
{
"epoch": 0.47745358090185674,
"grad_norm": 46.581740603921325,
"learning_rate": 9.430588472728271e-06,
"loss": 0.3292,
"step": 180
},
{
"epoch": 0.48010610079575594,
"grad_norm": 33.689497051794476,
"learning_rate": 9.41980345967098e-06,
"loss": 0.3834,
"step": 181
},
{
"epoch": 0.4827586206896552,
"grad_norm": 61.128747699132035,
"learning_rate": 9.408923551786742e-06,
"loss": 0.3354,
"step": 182
},
{
"epoch": 0.4854111405835544,
"grad_norm": 22.95346596441586,
"learning_rate": 9.397948982671237e-06,
"loss": 0.3053,
"step": 183
},
{
"epoch": 0.4880636604774536,
"grad_norm": 30.898534305601594,
"learning_rate": 9.386879987952549e-06,
"loss": 0.2642,
"step": 184
},
{
"epoch": 0.4907161803713528,
"grad_norm": 33.57899929124302,
"learning_rate": 9.375716805286122e-06,
"loss": 0.3217,
"step": 185
},
{
"epoch": 0.493368700265252,
"grad_norm": 62.90298927426579,
"learning_rate": 9.364459674349642e-06,
"loss": 0.3244,
"step": 186
},
{
"epoch": 0.4960212201591512,
"grad_norm": 43.421745744085165,
"learning_rate": 9.353108836837907e-06,
"loss": 0.4662,
"step": 187
},
{
"epoch": 0.4986737400530504,
"grad_norm": 84.88782280618557,
"learning_rate": 9.341664536457626e-06,
"loss": 0.4652,
"step": 188
},
{
"epoch": 0.5013262599469496,
"grad_norm": 41.48296017032388,
"learning_rate": 9.330127018922195e-06,
"loss": 0.4524,
"step": 189
},
{
"epoch": 0.5039787798408488,
"grad_norm": 54.307781009627426,
"learning_rate": 9.318496531946411e-06,
"loss": 0.4255,
"step": 190
},
{
"epoch": 0.506631299734748,
"grad_norm": 28.526745313141138,
"learning_rate": 9.306773325241161e-06,
"loss": 0.3938,
"step": 191
},
{
"epoch": 0.5092838196286472,
"grad_norm": 32.22292357278733,
"learning_rate": 9.294957650508065e-06,
"loss": 0.4339,
"step": 192
},
{
"epoch": 0.5119363395225465,
"grad_norm": 71.01658099069965,
"learning_rate": 9.283049761434059e-06,
"loss": 0.3714,
"step": 193
},
{
"epoch": 0.5145888594164456,
"grad_norm": 74.60447764546221,
"learning_rate": 9.27104991368596e-06,
"loss": 0.4595,
"step": 194
},
{
"epoch": 0.5172413793103449,
"grad_norm": 34.72647596903285,
"learning_rate": 9.258958364904966e-06,
"loss": 0.4545,
"step": 195
},
{
"epoch": 0.519893899204244,
"grad_norm": 26.331798545183236,
"learning_rate": 9.246775374701139e-06,
"loss": 0.3962,
"step": 196
},
{
"epoch": 0.5225464190981433,
"grad_norm": 24.700967086911067,
"learning_rate": 9.234501204647814e-06,
"loss": 0.3429,
"step": 197
},
{
"epoch": 0.5251989389920424,
"grad_norm": 54.171499277303575,
"learning_rate": 9.222136118275996e-06,
"loss": 0.4216,
"step": 198
},
{
"epoch": 0.5278514588859416,
"grad_norm": 27.43993544126369,
"learning_rate": 9.209680381068698e-06,
"loss": 0.6582,
"step": 199
},
{
"epoch": 0.5305039787798409,
"grad_norm": 22.598607279543927,
"learning_rate": 9.197134260455233e-06,
"loss": 0.4346,
"step": 200
},
{
"epoch": 0.53315649867374,
"grad_norm": 25.120430306898687,
"learning_rate": 9.184498025805493e-06,
"loss": 0.3572,
"step": 201
},
{
"epoch": 0.5358090185676393,
"grad_norm": 30.90653283841454,
"learning_rate": 9.171771948424138e-06,
"loss": 0.5035,
"step": 202
},
{
"epoch": 0.5384615384615384,
"grad_norm": 38.745294217181815,
"learning_rate": 9.158956301544791e-06,
"loss": 0.6001,
"step": 203
},
{
"epoch": 0.5411140583554377,
"grad_norm": 12.99132628044725,
"learning_rate": 9.146051360324166e-06,
"loss": 0.2217,
"step": 204
},
{
"epoch": 0.5437665782493368,
"grad_norm": 21.483605650056905,
"learning_rate": 9.13305740183616e-06,
"loss": 0.3407,
"step": 205
},
{
"epoch": 0.5464190981432361,
"grad_norm": 22.04202864170304,
"learning_rate": 9.119974705065902e-06,
"loss": 0.3155,
"step": 206
},
{
"epoch": 0.5490716180371353,
"grad_norm": 34.047701260544905,
"learning_rate": 9.106803550903765e-06,
"loss": 0.2755,
"step": 207
},
{
"epoch": 0.5517241379310345,
"grad_norm": 30.616049044407195,
"learning_rate": 9.093544222139338e-06,
"loss": 0.3207,
"step": 208
},
{
"epoch": 0.5543766578249337,
"grad_norm": 19.414012419163278,
"learning_rate": 9.080197003455347e-06,
"loss": 0.2854,
"step": 209
},
{
"epoch": 0.5570291777188329,
"grad_norm": 34.50661859771314,
"learning_rate": 9.066762181421552e-06,
"loss": 0.2571,
"step": 210
},
{
"epoch": 0.5596816976127321,
"grad_norm": 24.18802559390643,
"learning_rate": 9.053240044488587e-06,
"loss": 0.3284,
"step": 211
},
{
"epoch": 0.5623342175066313,
"grad_norm": 26.231041064560618,
"learning_rate": 9.039630882981769e-06,
"loss": 0.2911,
"step": 212
},
{
"epoch": 0.5649867374005305,
"grad_norm": 18.744950063838594,
"learning_rate": 9.025934989094866e-06,
"loss": 0.2078,
"step": 213
},
{
"epoch": 0.5676392572944297,
"grad_norm": 32.40690745848201,
"learning_rate": 9.012152656883824e-06,
"loss": 0.2154,
"step": 214
},
{
"epoch": 0.5702917771883289,
"grad_norm": 19.852273416933915,
"learning_rate": 8.998284182260448e-06,
"loss": 0.1813,
"step": 215
},
{
"epoch": 0.5729442970822282,
"grad_norm": 51.45945703972048,
"learning_rate": 8.984329862986056e-06,
"loss": 0.2579,
"step": 216
},
{
"epoch": 0.5755968169761273,
"grad_norm": 26.7842923687961,
"learning_rate": 8.970289998665083e-06,
"loss": 0.326,
"step": 217
},
{
"epoch": 0.5782493368700266,
"grad_norm": 25.798048935320374,
"learning_rate": 8.956164890738643e-06,
"loss": 0.2557,
"step": 218
},
{
"epoch": 0.5809018567639257,
"grad_norm": 31.147062160430725,
"learning_rate": 8.941954842478071e-06,
"loss": 0.3109,
"step": 219
},
{
"epoch": 0.583554376657825,
"grad_norm": 33.73392858792033,
"learning_rate": 8.927660158978392e-06,
"loss": 0.192,
"step": 220
},
{
"epoch": 0.5862068965517241,
"grad_norm": 52.12002946089713,
"learning_rate": 8.913281147151793e-06,
"loss": 0.3124,
"step": 221
},
{
"epoch": 0.5888594164456233,
"grad_norm": 31.949599003240756,
"learning_rate": 8.898818115721009e-06,
"loss": 0.3063,
"step": 222
},
{
"epoch": 0.5915119363395226,
"grad_norm": 46.81555399953881,
"learning_rate": 8.884271375212714e-06,
"loss": 0.3242,
"step": 223
},
{
"epoch": 0.5941644562334217,
"grad_norm": 35.262038994934144,
"learning_rate": 8.86964123795085e-06,
"loss": 0.2459,
"step": 224
},
{
"epoch": 0.596816976127321,
"grad_norm": 37.79974742141044,
"learning_rate": 8.85492801804991e-06,
"loss": 0.2955,
"step": 225
},
{
"epoch": 0.5994694960212201,
"grad_norm": 54.98818702954406,
"learning_rate": 8.84013203140821e-06,
"loss": 0.367,
"step": 226
},
{
"epoch": 0.6021220159151194,
"grad_norm": 40.40698875247452,
"learning_rate": 8.825253595701097e-06,
"loss": 0.2885,
"step": 227
},
{
"epoch": 0.6047745358090185,
"grad_norm": 52.621249803374496,
"learning_rate": 8.810293030374126e-06,
"loss": 0.2208,
"step": 228
},
{
"epoch": 0.6074270557029178,
"grad_norm": 63.43714192736091,
"learning_rate": 8.795250656636207e-06,
"loss": 0.4095,
"step": 229
},
{
"epoch": 0.610079575596817,
"grad_norm": 45.5316777547875,
"learning_rate": 8.780126797452713e-06,
"loss": 0.3347,
"step": 230
},
{
"epoch": 0.6127320954907162,
"grad_norm": 51.68778010061592,
"learning_rate": 8.764921777538533e-06,
"loss": 0.5593,
"step": 231
},
{
"epoch": 0.6153846153846154,
"grad_norm": 187.21679917554656,
"learning_rate": 8.749635923351108e-06,
"loss": 0.2935,
"step": 232
},
{
"epoch": 0.6180371352785146,
"grad_norm": 34.59940203028243,
"learning_rate": 8.734269563083424e-06,
"loss": 0.2359,
"step": 233
},
{
"epoch": 0.6206896551724138,
"grad_norm": 38.825893758239005,
"learning_rate": 8.71882302665696e-06,
"loss": 0.1639,
"step": 234
},
{
"epoch": 0.623342175066313,
"grad_norm": 51.493519726499024,
"learning_rate": 8.70329664571461e-06,
"loss": 0.375,
"step": 235
},
{
"epoch": 0.6259946949602122,
"grad_norm": 47.202719237816765,
"learning_rate": 8.687690753613554e-06,
"loss": 0.2737,
"step": 236
},
{
"epoch": 0.6286472148541115,
"grad_norm": 47.06710432775105,
"learning_rate": 8.672005685418115e-06,
"loss": 0.329,
"step": 237
},
{
"epoch": 0.6312997347480106,
"grad_norm": 36.186199865765076,
"learning_rate": 8.656241777892544e-06,
"loss": 0.1865,
"step": 238
},
{
"epoch": 0.6339522546419099,
"grad_norm": 82.68094414992525,
"learning_rate": 8.640399369493813e-06,
"loss": 0.2824,
"step": 239
},
{
"epoch": 0.636604774535809,
"grad_norm": 37.23457078121522,
"learning_rate": 8.624478800364332e-06,
"loss": 0.2146,
"step": 240
},
{
"epoch": 0.6392572944297082,
"grad_norm": 49.777881838826744,
"learning_rate": 8.608480412324652e-06,
"loss": 0.444,
"step": 241
},
{
"epoch": 0.6419098143236074,
"grad_norm": 48.32237627009601,
"learning_rate": 8.592404548866123e-06,
"loss": 0.2079,
"step": 242
},
{
"epoch": 0.6445623342175066,
"grad_norm": 37.924814101167854,
"learning_rate": 8.576251555143524e-06,
"loss": 0.1501,
"step": 243
},
{
"epoch": 0.6472148541114059,
"grad_norm": 35.79857920052079,
"learning_rate": 8.56002177796765e-06,
"loss": 0.131,
"step": 244
},
{
"epoch": 0.649867374005305,
"grad_norm": 62.52788609216863,
"learning_rate": 8.543715565797861e-06,
"loss": 0.4995,
"step": 245
},
{
"epoch": 0.6525198938992043,
"grad_norm": 53.50944198275586,
"learning_rate": 8.527333268734607e-06,
"loss": 0.3744,
"step": 246
},
{
"epoch": 0.6551724137931034,
"grad_norm": 47.32416611175991,
"learning_rate": 8.510875238511911e-06,
"loss": 0.152,
"step": 247
},
{
"epoch": 0.6578249336870027,
"grad_norm": 56.236921384204315,
"learning_rate": 8.494341828489812e-06,
"loss": 0.3054,
"step": 248
},
{
"epoch": 0.6604774535809018,
"grad_norm": 57.63962006677401,
"learning_rate": 8.477733393646787e-06,
"loss": 0.2399,
"step": 249
},
{
"epoch": 0.6631299734748011,
"grad_norm": 42.888481005602536,
"learning_rate": 8.461050290572114e-06,
"loss": 0.2081,
"step": 250
},
{
"epoch": 0.6657824933687002,
"grad_norm": 20.99857739960341,
"learning_rate": 8.444292877458238e-06,
"loss": 0.1291,
"step": 251
},
{
"epoch": 0.6684350132625995,
"grad_norm": 61.31650373045394,
"learning_rate": 8.427461514093056e-06,
"loss": 0.3973,
"step": 252
},
{
"epoch": 0.6710875331564987,
"grad_norm": 62.39650389044618,
"learning_rate": 8.410556561852212e-06,
"loss": 0.3041,
"step": 253
},
{
"epoch": 0.6737400530503979,
"grad_norm": 24.6664128060848,
"learning_rate": 8.39357838369133e-06,
"loss": 0.1809,
"step": 254
},
{
"epoch": 0.6763925729442971,
"grad_norm": 67.09141289555055,
"learning_rate": 8.376527344138222e-06,
"loss": 0.1814,
"step": 255
},
{
"epoch": 0.6790450928381963,
"grad_norm": 42.36398464526459,
"learning_rate": 8.359403809285054e-06,
"loss": 0.1685,
"step": 256
},
{
"epoch": 0.6816976127320955,
"grad_norm": 36.868570222869444,
"learning_rate": 8.342208146780504e-06,
"loss": 0.2083,
"step": 257
},
{
"epoch": 0.6843501326259946,
"grad_norm": 187.91006984894173,
"learning_rate": 8.324940725821853e-06,
"loss": 0.2198,
"step": 258
},
{
"epoch": 0.6870026525198939,
"grad_norm": 94.1194320864834,
"learning_rate": 8.30760191714706e-06,
"loss": 0.1839,
"step": 259
},
{
"epoch": 0.6896551724137931,
"grad_norm": 90.43733640770276,
"learning_rate": 8.290192093026805e-06,
"loss": 0.2862,
"step": 260
},
{
"epoch": 0.6923076923076923,
"grad_norm": 33.625780709523845,
"learning_rate": 8.272711627256501e-06,
"loss": 0.1615,
"step": 261
},
{
"epoch": 0.6949602122015915,
"grad_norm": 36.64648281855228,
"learning_rate": 8.255160895148263e-06,
"loss": 0.2367,
"step": 262
},
{
"epoch": 0.6976127320954907,
"grad_norm": 50.56944146133072,
"learning_rate": 8.237540273522844e-06,
"loss": 0.1622,
"step": 263
},
{
"epoch": 0.7002652519893899,
"grad_norm": 182.53585445600774,
"learning_rate": 8.219850140701557e-06,
"loss": 0.1438,
"step": 264
},
{
"epoch": 0.7029177718832891,
"grad_norm": 69.8729850923303,
"learning_rate": 8.202090876498144e-06,
"loss": 0.2296,
"step": 265
},
{
"epoch": 0.7055702917771883,
"grad_norm": 31.529852166286762,
"learning_rate": 8.184262862210624e-06,
"loss": 0.1816,
"step": 266
},
{
"epoch": 0.7082228116710876,
"grad_norm": 29.112616136097394,
"learning_rate": 8.166366480613107e-06,
"loss": 0.1042,
"step": 267
},
{
"epoch": 0.7108753315649867,
"grad_norm": 21.42596725620029,
"learning_rate": 8.14840211594757e-06,
"loss": 0.1284,
"step": 268
},
{
"epoch": 0.713527851458886,
"grad_norm": 43.121642674406175,
"learning_rate": 8.13037015391562e-06,
"loss": 0.2312,
"step": 269
},
{
"epoch": 0.7161803713527851,
"grad_norm": 45.89885836530061,
"learning_rate": 8.112270981670196e-06,
"loss": 0.157,
"step": 270
},
{
"epoch": 0.7188328912466844,
"grad_norm": 46.12217694940257,
"learning_rate": 8.09410498780727e-06,
"loss": 0.1332,
"step": 271
},
{
"epoch": 0.7214854111405835,
"grad_norm": 37.25833887900994,
"learning_rate": 8.075872562357502e-06,
"loss": 0.129,
"step": 272
},
{
"epoch": 0.7241379310344828,
"grad_norm": 33.182430046980066,
"learning_rate": 8.057574096777854e-06,
"loss": 0.157,
"step": 273
},
{
"epoch": 0.726790450928382,
"grad_norm": 25.452121277422545,
"learning_rate": 8.039209983943201e-06,
"loss": 0.0775,
"step": 274
},
{
"epoch": 0.7294429708222812,
"grad_norm": 141.08365924116302,
"learning_rate": 8.020780618137889e-06,
"loss": 0.3046,
"step": 275
},
{
"epoch": 0.7320954907161804,
"grad_norm": 32.48697951888386,
"learning_rate": 8.002286395047267e-06,
"loss": 0.0674,
"step": 276
},
{
"epoch": 0.7347480106100795,
"grad_norm": 133.0319126654085,
"learning_rate": 7.983727711749194e-06,
"loss": 0.2422,
"step": 277
},
{
"epoch": 0.7374005305039788,
"grad_norm": 61.60012063683119,
"learning_rate": 7.965104966705518e-06,
"loss": 0.1673,
"step": 278
},
{
"epoch": 0.7400530503978779,
"grad_norm": 60.7112974795619,
"learning_rate": 7.946418559753509e-06,
"loss": 0.2377,
"step": 279
},
{
"epoch": 0.7427055702917772,
"grad_norm": 49.872485049267205,
"learning_rate": 7.927668892097288e-06,
"loss": 0.1291,
"step": 280
},
{
"epoch": 0.7453580901856764,
"grad_norm": 84.27985493725895,
"learning_rate": 7.908856366299206e-06,
"loss": 0.2068,
"step": 281
},
{
"epoch": 0.7480106100795756,
"grad_norm": 89.07028975843645,
"learning_rate": 7.889981386271202e-06,
"loss": 0.3291,
"step": 282
},
{
"epoch": 0.7506631299734748,
"grad_norm": 33.01333445164221,
"learning_rate": 7.871044357266124e-06,
"loss": 0.1875,
"step": 283
},
{
"epoch": 0.753315649867374,
"grad_norm": 61.66655390306763,
"learning_rate": 7.852045685869046e-06,
"loss": 0.233,
"step": 284
},
{
"epoch": 0.7559681697612732,
"grad_norm": 45.392626938408974,
"learning_rate": 7.832985779988518e-06,
"loss": 0.0932,
"step": 285
},
{
"epoch": 0.7586206896551724,
"grad_norm": 62.82412358551919,
"learning_rate": 7.81386504884782e-06,
"loss": 0.2112,
"step": 286
},
{
"epoch": 0.7612732095490716,
"grad_norm": 77.77135839682533,
"learning_rate": 7.794683902976175e-06,
"loss": 0.1791,
"step": 287
},
{
"epoch": 0.7639257294429708,
"grad_norm": 50.290161550536745,
"learning_rate": 7.775442754199929e-06,
"loss": 0.1481,
"step": 288
},
{
"epoch": 0.76657824933687,
"grad_norm": 67.88391445622501,
"learning_rate": 7.75614201563372e-06,
"loss": 0.2346,
"step": 289
},
{
"epoch": 0.7692307692307693,
"grad_norm": 93.00871825203926,
"learning_rate": 7.736782101671587e-06,
"loss": 0.1737,
"step": 290
},
{
"epoch": 0.7718832891246684,
"grad_norm": 53.71872191270983,
"learning_rate": 7.717363427978103e-06,
"loss": 0.2112,
"step": 291
},
{
"epoch": 0.7745358090185677,
"grad_norm": 32.55623505216373,
"learning_rate": 7.697886411479422e-06,
"loss": 0.1045,
"step": 292
},
{
"epoch": 0.7771883289124668,
"grad_norm": 119.02364134570698,
"learning_rate": 7.67835147035435e-06,
"loss": 0.1283,
"step": 293
},
{
"epoch": 0.7798408488063661,
"grad_norm": 82.88296400008042,
"learning_rate": 7.658759024025349e-06,
"loss": 0.2001,
"step": 294
},
{
"epoch": 0.7824933687002652,
"grad_norm": 87.85759054670997,
"learning_rate": 7.639109493149537e-06,
"loss": 0.2218,
"step": 295
},
{
"epoch": 0.7851458885941645,
"grad_norm": 135.43767274810497,
"learning_rate": 7.6194032996096685e-06,
"loss": 0.1864,
"step": 296
},
{
"epoch": 0.7877984084880637,
"grad_norm": 35.39816057230434,
"learning_rate": 7.599640866505058e-06,
"loss": 0.1078,
"step": 297
},
{
"epoch": 0.7904509283819628,
"grad_norm": 86.97826726357368,
"learning_rate": 7.579822618142505e-06,
"loss": 0.1618,
"step": 298
},
{
"epoch": 0.7931034482758621,
"grad_norm": 63.73431809970673,
"learning_rate": 7.559948980027189e-06,
"loss": 0.2698,
"step": 299
},
{
"epoch": 0.7957559681697612,
"grad_norm": 60.183076177452044,
"learning_rate": 7.540020378853523e-06,
"loss": 0.1594,
"step": 300
},
{
"epoch": 0.7984084880636605,
"grad_norm": 52.5406608746334,
"learning_rate": 7.520037242496e-06,
"loss": 0.1836,
"step": 301
},
{
"epoch": 0.8010610079575596,
"grad_norm": 58.3353288564282,
"learning_rate": 7.500000000000001e-06,
"loss": 0.2385,
"step": 302
},
{
"epoch": 0.8037135278514589,
"grad_norm": 95.12475164244071,
"learning_rate": 7.479909081572587e-06,
"loss": 0.149,
"step": 303
},
{
"epoch": 0.8063660477453581,
"grad_norm": 46.159718954423134,
"learning_rate": 7.459764918573264e-06,
"loss": 0.1241,
"step": 304
},
{
"epoch": 0.8090185676392573,
"grad_norm": 585.8543926676177,
"learning_rate": 7.4395679435047175e-06,
"loss": 0.2069,
"step": 305
},
{
"epoch": 0.8116710875331565,
"grad_norm": 43.62270741644705,
"learning_rate": 7.419318590003524e-06,
"loss": 0.2192,
"step": 306
},
{
"epoch": 0.8143236074270557,
"grad_norm": 80.80866203584026,
"learning_rate": 7.399017292830848e-06,
"loss": 0.1693,
"step": 307
},
{
"epoch": 0.8169761273209549,
"grad_norm": 140.06215238615462,
"learning_rate": 7.3786644878631035e-06,
"loss": 0.1179,
"step": 308
},
{
"epoch": 0.8196286472148541,
"grad_norm": 78.61750140083358,
"learning_rate": 7.358260612082596e-06,
"loss": 0.2927,
"step": 309
},
{
"epoch": 0.8222811671087533,
"grad_norm": 70.13628004985249,
"learning_rate": 7.3378061035681415e-06,
"loss": 0.1519,
"step": 310
},
{
"epoch": 0.8249336870026526,
"grad_norm": 57.22683921472029,
"learning_rate": 7.317301401485657e-06,
"loss": 0.1996,
"step": 311
},
{
"epoch": 0.8275862068965517,
"grad_norm": 46.56616511235378,
"learning_rate": 7.296746946078737e-06,
"loss": 0.1493,
"step": 312
},
{
"epoch": 0.830238726790451,
"grad_norm": 52.16793854587364,
"learning_rate": 7.276143178659195e-06,
"loss": 0.1366,
"step": 313
},
{
"epoch": 0.8328912466843501,
"grad_norm": 56.1327455558407,
"learning_rate": 7.255490541597594e-06,
"loss": 0.1955,
"step": 314
},
{
"epoch": 0.8355437665782494,
"grad_norm": 64.04061033319763,
"learning_rate": 7.2347894783137485e-06,
"loss": 0.1223,
"step": 315
},
{
"epoch": 0.8381962864721485,
"grad_norm": 41.30534249766932,
"learning_rate": 7.2140404332671986e-06,
"loss": 0.1542,
"step": 316
},
{
"epoch": 0.8408488063660478,
"grad_norm": 59.39598856442543,
"learning_rate": 7.19324385194767e-06,
"loss": 0.2146,
"step": 317
},
{
"epoch": 0.843501326259947,
"grad_norm": 37.41911932589385,
"learning_rate": 7.172400180865514e-06,
"loss": 0.0893,
"step": 318
},
{
"epoch": 0.8461538461538461,
"grad_norm": 73.2170655681548,
"learning_rate": 7.1515098675421125e-06,
"loss": 0.1732,
"step": 319
},
{
"epoch": 0.8488063660477454,
"grad_norm": 54.90046866257738,
"learning_rate": 7.130573360500277e-06,
"loss": 0.1439,
"step": 320
},
{
"epoch": 0.8514588859416445,
"grad_norm": 57.04949794426898,
"learning_rate": 7.109591109254614e-06,
"loss": 0.2042,
"step": 321
},
{
"epoch": 0.8541114058355438,
"grad_norm": 63.62163449990602,
"learning_rate": 7.088563564301874e-06,
"loss": 0.3106,
"step": 322
},
{
"epoch": 0.8567639257294429,
"grad_norm": 47.16915214970692,
"learning_rate": 7.067491177111282e-06,
"loss": 0.2004,
"step": 323
},
{
"epoch": 0.8594164456233422,
"grad_norm": 47.16540297885484,
"learning_rate": 7.046374400114842e-06,
"loss": 0.1318,
"step": 324
},
{
"epoch": 0.8620689655172413,
"grad_norm": 31.645829514913345,
"learning_rate": 7.0252136866976205e-06,
"loss": 0.1516,
"step": 325
},
{
"epoch": 0.8647214854111406,
"grad_norm": 24.012429851214378,
"learning_rate": 7.004009491188023e-06,
"loss": 0.0749,
"step": 326
},
{
"epoch": 0.8673740053050398,
"grad_norm": 32.73778677034213,
"learning_rate": 6.982762268848024e-06,
"loss": 0.125,
"step": 327
},
{
"epoch": 0.870026525198939,
"grad_norm": 82.59316907110794,
"learning_rate": 6.961472475863406e-06,
"loss": 0.2883,
"step": 328
},
{
"epoch": 0.8726790450928382,
"grad_norm": 62.849464785868946,
"learning_rate": 6.940140569333953e-06,
"loss": 0.0979,
"step": 329
},
{
"epoch": 0.8753315649867374,
"grad_norm": 43.47928012947127,
"learning_rate": 6.918767007263646e-06,
"loss": 0.1342,
"step": 330
},
{
"epoch": 0.8779840848806366,
"grad_norm": 34.882979257908005,
"learning_rate": 6.897352248550828e-06,
"loss": 0.1035,
"step": 331
},
{
"epoch": 0.8806366047745358,
"grad_norm": 29.19456412156272,
"learning_rate": 6.875896752978345e-06,
"loss": 0.0811,
"step": 332
},
{
"epoch": 0.883289124668435,
"grad_norm": 35.59516597305405,
"learning_rate": 6.85440098120368e-06,
"loss": 0.0848,
"step": 333
},
{
"epoch": 0.8859416445623343,
"grad_norm": 36.16086076919905,
"learning_rate": 6.832865394749065e-06,
"loss": 0.091,
"step": 334
},
{
"epoch": 0.8885941644562334,
"grad_norm": 177.05756609050815,
"learning_rate": 6.811290455991561e-06,
"loss": 0.3371,
"step": 335
},
{
"epoch": 0.8912466843501327,
"grad_norm": 92.92983470592152,
"learning_rate": 6.7896766281531435e-06,
"loss": 0.2157,
"step": 336
},
{
"epoch": 0.8938992042440318,
"grad_norm": 63.197994960078006,
"learning_rate": 6.768024375290747e-06,
"loss": 0.1932,
"step": 337
},
{
"epoch": 0.896551724137931,
"grad_norm": 48.34862843223494,
"learning_rate": 6.7463341622863074e-06,
"loss": 0.0846,
"step": 338
},
{
"epoch": 0.8992042440318302,
"grad_norm": 55.66528827842737,
"learning_rate": 6.724606454836782e-06,
"loss": 0.0901,
"step": 339
},
{
"epoch": 0.9018567639257294,
"grad_norm": 57.60148358043267,
"learning_rate": 6.702841719444141e-06,
"loss": 0.1768,
"step": 340
},
{
"epoch": 0.9045092838196287,
"grad_norm": 74.31896937236691,
"learning_rate": 6.681040423405363e-06,
"loss": 0.1168,
"step": 341
},
{
"epoch": 0.9071618037135278,
"grad_norm": 47.82878168837927,
"learning_rate": 6.659203034802397e-06,
"loss": 0.1596,
"step": 342
},
{
"epoch": 0.9098143236074271,
"grad_norm": 93.24911135040223,
"learning_rate": 6.637330022492112e-06,
"loss": 0.1042,
"step": 343
},
{
"epoch": 0.9124668435013262,
"grad_norm": 35.793207624941154,
"learning_rate": 6.615421856096231e-06,
"loss": 0.0924,
"step": 344
},
{
"epoch": 0.9151193633952255,
"grad_norm": 29.532406452075197,
"learning_rate": 6.593479005991251e-06,
"loss": 0.0592,
"step": 345
},
{
"epoch": 0.9177718832891246,
"grad_norm": 94.46539610098456,
"learning_rate": 6.571501943298335e-06,
"loss": 0.1761,
"step": 346
},
{
"epoch": 0.9204244031830239,
"grad_norm": 90.85615975016468,
"learning_rate": 6.549491139873211e-06,
"loss": 0.2461,
"step": 347
},
{
"epoch": 0.9230769230769231,
"grad_norm": 61.149450630924925,
"learning_rate": 6.527447068296026e-06,
"loss": 0.132,
"step": 348
},
{
"epoch": 0.9257294429708223,
"grad_norm": 53.31973958738185,
"learning_rate": 6.50537020186121e-06,
"loss": 0.0903,
"step": 349
},
{
"epoch": 0.9283819628647215,
"grad_norm": 61.6331760199786,
"learning_rate": 6.483261014567311e-06,
"loss": 0.1165,
"step": 350
},
{
"epoch": 0.9310344827586207,
"grad_norm": 13.522720380850254,
"learning_rate": 6.4611199811068196e-06,
"loss": 0.0518,
"step": 351
},
{
"epoch": 0.9336870026525199,
"grad_norm": 56.84483914980899,
"learning_rate": 6.4389475768559675e-06,
"loss": 0.096,
"step": 352
},
{
"epoch": 0.9363395225464191,
"grad_norm": 43.113140594612666,
"learning_rate": 6.416744277864541e-06,
"loss": 0.0585,
"step": 353
},
{
"epoch": 0.9389920424403183,
"grad_norm": 30.744059362187798,
"learning_rate": 6.394510560845637e-06,
"loss": 0.0431,
"step": 354
},
{
"epoch": 0.9416445623342176,
"grad_norm": 89.21785052474625,
"learning_rate": 6.372246903165445e-06,
"loss": 0.1349,
"step": 355
},
{
"epoch": 0.9442970822281167,
"grad_norm": 36.9671448724562,
"learning_rate": 6.349953782832991e-06,
"loss": 0.0571,
"step": 356
},
{
"epoch": 0.946949602122016,
"grad_norm": 69.48840780248835,
"learning_rate": 6.327631678489874e-06,
"loss": 0.1355,
"step": 357
},
{
"epoch": 0.9496021220159151,
"grad_norm": 60.4321035877009,
"learning_rate": 6.305281069399989e-06,
"loss": 0.0963,
"step": 358
},
{
"epoch": 0.9522546419098143,
"grad_norm": 40.04735809339083,
"learning_rate": 6.282902435439242e-06,
"loss": 0.0928,
"step": 359
},
{
"epoch": 0.9549071618037135,
"grad_norm": 54.37683366544728,
"learning_rate": 6.26049625708524e-06,
"loss": 0.1326,
"step": 360
},
{
"epoch": 0.9575596816976127,
"grad_norm": 99.91926297776382,
"learning_rate": 6.238063015406982e-06,
"loss": 0.139,
"step": 361
},
{
"epoch": 0.9602122015915119,
"grad_norm": 101.75866107017366,
"learning_rate": 6.215603192054523e-06,
"loss": 0.1668,
"step": 362
},
{
"epoch": 0.9628647214854111,
"grad_norm": 98.85344488282533,
"learning_rate": 6.1931172692486405e-06,
"loss": 0.2849,
"step": 363
},
{
"epoch": 0.9655172413793104,
"grad_norm": 165.94536763450807,
"learning_rate": 6.17060572977047e-06,
"loss": 0.134,
"step": 364
},
{
"epoch": 0.9681697612732095,
"grad_norm": 54.285346936222666,
"learning_rate": 6.1480690569511545e-06,
"loss": 0.0964,
"step": 365
},
{
"epoch": 0.9708222811671088,
"grad_norm": 55.157535754858486,
"learning_rate": 6.125507734661458e-06,
"loss": 0.1046,
"step": 366
},
{
"epoch": 0.9734748010610079,
"grad_norm": 96.63952887374089,
"learning_rate": 6.1029222473013705e-06,
"loss": 0.159,
"step": 367
},
{
"epoch": 0.9761273209549072,
"grad_norm": 85.19730687411784,
"learning_rate": 6.080313079789723e-06,
"loss": 0.3654,
"step": 368
},
{
"epoch": 0.9787798408488063,
"grad_norm": 150.05384389958417,
"learning_rate": 6.0576807175537654e-06,
"loss": 0.3451,
"step": 369
},
{
"epoch": 0.9814323607427056,
"grad_norm": 58.19141403492245,
"learning_rate": 6.035025646518747e-06,
"loss": 0.2298,
"step": 370
},
{
"epoch": 0.9840848806366048,
"grad_norm": 736.1395772866435,
"learning_rate": 6.012348353097484e-06,
"loss": 0.1161,
"step": 371
},
{
"epoch": 0.986737400530504,
"grad_norm": 124.97267593433126,
"learning_rate": 5.9896493241799115e-06,
"loss": 0.1798,
"step": 372
},
{
"epoch": 0.9893899204244032,
"grad_norm": 39.63404839325758,
"learning_rate": 5.966929047122641e-06,
"loss": 0.0431,
"step": 373
},
{
"epoch": 0.9920424403183024,
"grad_norm": 42.1987745432697,
"learning_rate": 5.944188009738483e-06,
"loss": 0.131,
"step": 374
},
{
"epoch": 0.9946949602122016,
"grad_norm": 46.74565476536693,
"learning_rate": 5.921426700285986e-06,
"loss": 0.1121,
"step": 375
},
{
"epoch": 0.9973474801061007,
"grad_norm": 27.180360013119735,
"learning_rate": 5.898645607458941e-06,
"loss": 0.0807,
"step": 376
},
{
"epoch": 1.0,
"grad_norm": 52.54379466042234,
"learning_rate": 5.8758452203758995e-06,
"loss": 0.11,
"step": 377
},
{
"epoch": 1.0,
"eval_loss": 0.3463860750198364,
"eval_runtime": 175.4859,
"eval_samples_per_second": 12.052,
"eval_steps_per_second": 1.51,
"step": 377
},
{
"epoch": 1.0026525198938991,
"grad_norm": 17.68142839472272,
"learning_rate": 5.8530260285696674e-06,
"loss": 0.0529,
"step": 378
},
{
"epoch": 1.0053050397877985,
"grad_norm": 51.89104013831488,
"learning_rate": 5.830188521976794e-06,
"loss": 0.1815,
"step": 379
},
{
"epoch": 1.0079575596816976,
"grad_norm": 50.26750376877717,
"learning_rate": 5.807333190927054e-06,
"loss": 0.1903,
"step": 380
},
{
"epoch": 1.0106100795755968,
"grad_norm": 26.284561902513307,
"learning_rate": 5.784460526132918e-06,
"loss": 0.0937,
"step": 381
},
{
"epoch": 1.013262599469496,
"grad_norm": 29.45075692230731,
"learning_rate": 5.761571018679025e-06,
"loss": 0.0689,
"step": 382
},
{
"epoch": 1.0159151193633953,
"grad_norm": 44.374685492191965,
"learning_rate": 5.738665160011627e-06,
"loss": 0.1024,
"step": 383
},
{
"epoch": 1.0185676392572944,
"grad_norm": 110.37886147075102,
"learning_rate": 5.715743441928041e-06,
"loss": 0.1542,
"step": 384
},
{
"epoch": 1.0212201591511936,
"grad_norm": 55.13375919414892,
"learning_rate": 5.6928063565660955e-06,
"loss": 0.1021,
"step": 385
},
{
"epoch": 1.023872679045093,
"grad_norm": 61.58755412552258,
"learning_rate": 5.669854396393559e-06,
"loss": 0.1664,
"step": 386
},
{
"epoch": 1.026525198938992,
"grad_norm": 16.119940601375582,
"learning_rate": 5.646888054197568e-06,
"loss": 0.0401,
"step": 387
},
{
"epoch": 1.0291777188328912,
"grad_norm": 34.5316730073702,
"learning_rate": 5.623907823074044e-06,
"loss": 0.0592,
"step": 388
},
{
"epoch": 1.0318302387267904,
"grad_norm": 17.85636087705779,
"learning_rate": 5.600914196417112e-06,
"loss": 0.0356,
"step": 389
},
{
"epoch": 1.0344827586206897,
"grad_norm": 82.08938241465589,
"learning_rate": 5.577907667908505e-06,
"loss": 0.318,
"step": 390
},
{
"epoch": 1.0371352785145889,
"grad_norm": 10.735136547662583,
"learning_rate": 5.5548887315069575e-06,
"loss": 0.0349,
"step": 391
},
{
"epoch": 1.039787798408488,
"grad_norm": 29.23400812860403,
"learning_rate": 5.531857881437612e-06,
"loss": 0.163,
"step": 392
},
{
"epoch": 1.0424403183023874,
"grad_norm": 9.144587638643738,
"learning_rate": 5.508815612181401e-06,
"loss": 0.0328,
"step": 393
},
{
"epoch": 1.0450928381962865,
"grad_norm": 91.3897853620049,
"learning_rate": 5.48576241846443e-06,
"loss": 0.1606,
"step": 394
},
{
"epoch": 1.0477453580901857,
"grad_norm": 64.31305092536942,
"learning_rate": 5.462698795247357e-06,
"loss": 0.2104,
"step": 395
},
{
"epoch": 1.0503978779840848,
"grad_norm": 20.209916148778973,
"learning_rate": 5.4396252377147615e-06,
"loss": 0.0633,
"step": 396
},
{
"epoch": 1.0530503978779842,
"grad_norm": 46.54402940059323,
"learning_rate": 5.416542241264524e-06,
"loss": 0.1654,
"step": 397
},
{
"epoch": 1.0557029177718833,
"grad_norm": 33.01089496895264,
"learning_rate": 5.39345030149718e-06,
"loss": 0.0621,
"step": 398
},
{
"epoch": 1.0583554376657824,
"grad_norm": 39.99501520680759,
"learning_rate": 5.370349914205273e-06,
"loss": 0.0498,
"step": 399
},
{
"epoch": 1.0610079575596818,
"grad_norm": 18.118774769065993,
"learning_rate": 5.347241575362729e-06,
"loss": 0.0299,
"step": 400
},
{
"epoch": 1.063660477453581,
"grad_norm": 62.17306334991743,
"learning_rate": 5.324125781114193e-06,
"loss": 0.1847,
"step": 401
},
{
"epoch": 1.06631299734748,
"grad_norm": 56.97150682105584,
"learning_rate": 5.30100302776438e-06,
"loss": 0.1063,
"step": 402
},
{
"epoch": 1.0689655172413792,
"grad_norm": 6.0117370575361475,
"learning_rate": 5.277873811767415e-06,
"loss": 0.0236,
"step": 403
},
{
"epoch": 1.0716180371352786,
"grad_norm": 60.033520469100175,
"learning_rate": 5.254738629716186e-06,
"loss": 0.2285,
"step": 404
},
{
"epoch": 1.0742705570291777,
"grad_norm": 63.69961951769621,
"learning_rate": 5.231597978331669e-06,
"loss": 0.0603,
"step": 405
},
{
"epoch": 1.0769230769230769,
"grad_norm": 143.5016156103116,
"learning_rate": 5.208452354452275e-06,
"loss": 0.2138,
"step": 406
},
{
"epoch": 1.079575596816976,
"grad_norm": 59.54079006680123,
"learning_rate": 5.185302255023166e-06,
"loss": 0.1195,
"step": 407
},
{
"epoch": 1.0822281167108754,
"grad_norm": 74.24827747223898,
"learning_rate": 5.162148177085604e-06,
"loss": 0.0795,
"step": 408
},
{
"epoch": 1.0848806366047745,
"grad_norm": 35.752176310460186,
"learning_rate": 5.1389906177662705e-06,
"loss": 0.1055,
"step": 409
},
{
"epoch": 1.0875331564986737,
"grad_norm": 44.45779507253205,
"learning_rate": 5.115830074266592e-06,
"loss": 0.0765,
"step": 410
},
{
"epoch": 1.090185676392573,
"grad_norm": 18.742694564415604,
"learning_rate": 5.092667043852062e-06,
"loss": 0.0398,
"step": 411
},
{
"epoch": 1.0928381962864722,
"grad_norm": 57.0358614235308,
"learning_rate": 5.069502023841576e-06,
"loss": 0.0715,
"step": 412
},
{
"epoch": 1.0954907161803713,
"grad_norm": 60.239344515637356,
"learning_rate": 5.046335511596746e-06,
"loss": 0.0848,
"step": 413
},
{
"epoch": 1.0981432360742707,
"grad_norm": 16.893018773823467,
"learning_rate": 5.0231680045112174e-06,
"loss": 0.0262,
"step": 414
},
{
"epoch": 1.1007957559681698,
"grad_norm": 22.200535604754553,
"learning_rate": 5e-06,
"loss": 0.0323,
"step": 415
},
{
"epoch": 1.103448275862069,
"grad_norm": 12.087786574422244,
"learning_rate": 4.976831995488784e-06,
"loss": 0.0322,
"step": 416
},
{
"epoch": 1.106100795755968,
"grad_norm": 3.4135212180684533,
"learning_rate": 4.953664488403256e-06,
"loss": 0.0224,
"step": 417
},
{
"epoch": 1.1087533156498675,
"grad_norm": 73.5459113588346,
"learning_rate": 4.9304979761584256e-06,
"loss": 0.1572,
"step": 418
},
{
"epoch": 1.1114058355437666,
"grad_norm": 119.11937925866647,
"learning_rate": 4.90733295614794e-06,
"loss": 0.074,
"step": 419
},
{
"epoch": 1.1140583554376657,
"grad_norm": 17.00988830972538,
"learning_rate": 4.884169925733409e-06,
"loss": 0.0396,
"step": 420
},
{
"epoch": 1.1167108753315649,
"grad_norm": 63.38848105296801,
"learning_rate": 4.86100938223373e-06,
"loss": 0.2442,
"step": 421
},
{
"epoch": 1.1193633952254642,
"grad_norm": 1.858090026430698,
"learning_rate": 4.837851822914397e-06,
"loss": 0.0173,
"step": 422
},
{
"epoch": 1.1220159151193634,
"grad_norm": 30.20171759259156,
"learning_rate": 4.814697744976835e-06,
"loss": 0.0414,
"step": 423
},
{
"epoch": 1.1246684350132625,
"grad_norm": 77.7716831140678,
"learning_rate": 4.791547645547727e-06,
"loss": 0.1056,
"step": 424
},
{
"epoch": 1.1273209549071619,
"grad_norm": 99.332034153341,
"learning_rate": 4.768402021668332e-06,
"loss": 0.1221,
"step": 425
},
{
"epoch": 1.129973474801061,
"grad_norm": 121.44507801917504,
"learning_rate": 4.7452613702838166e-06,
"loss": 0.1468,
"step": 426
},
{
"epoch": 1.1326259946949602,
"grad_norm": 58.54743022339587,
"learning_rate": 4.722126188232586e-06,
"loss": 0.0665,
"step": 427
},
{
"epoch": 1.1352785145888595,
"grad_norm": 32.52455340574876,
"learning_rate": 4.698996972235622e-06,
"loss": 0.0568,
"step": 428
},
{
"epoch": 1.1379310344827587,
"grad_norm": 5.838433990840112,
"learning_rate": 4.6758742188858074e-06,
"loss": 0.0192,
"step": 429
},
{
"epoch": 1.1405835543766578,
"grad_norm": 71.81629603584015,
"learning_rate": 4.652758424637271e-06,
"loss": 0.1831,
"step": 430
},
{
"epoch": 1.143236074270557,
"grad_norm": 28.144271443634484,
"learning_rate": 4.629650085794728e-06,
"loss": 0.0433,
"step": 431
},
{
"epoch": 1.1458885941644563,
"grad_norm": 10.923481408273847,
"learning_rate": 4.606549698502824e-06,
"loss": 0.0204,
"step": 432
},
{
"epoch": 1.1485411140583555,
"grad_norm": 35.06820640518518,
"learning_rate": 4.583457758735477e-06,
"loss": 0.031,
"step": 433
},
{
"epoch": 1.1511936339522546,
"grad_norm": 55.77005306913307,
"learning_rate": 4.56037476228524e-06,
"loss": 0.0803,
"step": 434
},
{
"epoch": 1.1538461538461537,
"grad_norm": 75.59003795948266,
"learning_rate": 4.537301204752647e-06,
"loss": 0.0309,
"step": 435
},
{
"epoch": 1.156498673740053,
"grad_norm": 1.3813703392066852,
"learning_rate": 4.514237581535571e-06,
"loss": 0.0151,
"step": 436
},
{
"epoch": 1.1591511936339522,
"grad_norm": 15.654231932623063,
"learning_rate": 4.4911843878186e-06,
"loss": 0.0232,
"step": 437
},
{
"epoch": 1.1618037135278514,
"grad_norm": 1.5271112385991394,
"learning_rate": 4.468142118562389e-06,
"loss": 0.0142,
"step": 438
},
{
"epoch": 1.1644562334217508,
"grad_norm": 41.808618529434774,
"learning_rate": 4.4451112684930424e-06,
"loss": 0.0365,
"step": 439
},
{
"epoch": 1.16710875331565,
"grad_norm": 8.875571871982734,
"learning_rate": 4.422092332091497e-06,
"loss": 0.0174,
"step": 440
},
{
"epoch": 1.169761273209549,
"grad_norm": 42.48178681496721,
"learning_rate": 4.399085803582889e-06,
"loss": 0.0619,
"step": 441
},
{
"epoch": 1.1724137931034484,
"grad_norm": 60.49769628624578,
"learning_rate": 4.3760921769259585e-06,
"loss": 0.0729,
"step": 442
},
{
"epoch": 1.1750663129973475,
"grad_norm": 26.574412523619465,
"learning_rate": 4.353111945802433e-06,
"loss": 0.0338,
"step": 443
},
{
"epoch": 1.1777188328912467,
"grad_norm": 3.355645813229191,
"learning_rate": 4.3301456036064415e-06,
"loss": 0.0146,
"step": 444
},
{
"epoch": 1.1803713527851458,
"grad_norm": 3.0080563293578133,
"learning_rate": 4.307193643433907e-06,
"loss": 0.0146,
"step": 445
},
{
"epoch": 1.1830238726790452,
"grad_norm": 63.60306018611037,
"learning_rate": 4.28425655807196e-06,
"loss": 0.0911,
"step": 446
},
{
"epoch": 1.1856763925729443,
"grad_norm": 89.9524066237151,
"learning_rate": 4.261334839988375e-06,
"loss": 0.0838,
"step": 447
},
{
"epoch": 1.1883289124668435,
"grad_norm": 33.0673426347805,
"learning_rate": 4.2384289813209754e-06,
"loss": 0.0254,
"step": 448
},
{
"epoch": 1.1909814323607426,
"grad_norm": 47.18288151519677,
"learning_rate": 4.2155394738670814e-06,
"loss": 0.0601,
"step": 449
},
{
"epoch": 1.193633952254642,
"grad_norm": 123.89391604420489,
"learning_rate": 4.192666809072948e-06,
"loss": 0.1175,
"step": 450
},
{
"epoch": 1.1962864721485411,
"grad_norm": 1.0021352013229665,
"learning_rate": 4.1698114780232085e-06,
"loss": 0.0121,
"step": 451
},
{
"epoch": 1.1989389920424403,
"grad_norm": 10.214692929672639,
"learning_rate": 4.146973971430333e-06,
"loss": 0.0181,
"step": 452
},
{
"epoch": 1.2015915119363396,
"grad_norm": 15.04816767414729,
"learning_rate": 4.124154779624101e-06,
"loss": 0.018,
"step": 453
},
{
"epoch": 1.2042440318302388,
"grad_norm": 132.58538119418554,
"learning_rate": 4.101354392541061e-06,
"loss": 0.0335,
"step": 454
},
{
"epoch": 1.206896551724138,
"grad_norm": 420.64703357856126,
"learning_rate": 4.078573299714014e-06,
"loss": 0.0501,
"step": 455
},
{
"epoch": 1.209549071618037,
"grad_norm": 280.33971081551715,
"learning_rate": 4.055811990261518e-06,
"loss": 0.2018,
"step": 456
},
{
"epoch": 1.2122015915119364,
"grad_norm": 88.97956230266567,
"learning_rate": 4.033070952877362e-06,
"loss": 0.0857,
"step": 457
},
{
"epoch": 1.2148541114058355,
"grad_norm": 163.09979246949874,
"learning_rate": 4.010350675820091e-06,
"loss": 0.0972,
"step": 458
},
{
"epoch": 1.2175066312997347,
"grad_norm": 42.15910822649088,
"learning_rate": 3.987651646902518e-06,
"loss": 0.0287,
"step": 459
},
{
"epoch": 1.2201591511936338,
"grad_norm": 64.1429638803959,
"learning_rate": 3.964974353481254e-06,
"loss": 0.1458,
"step": 460
},
{
"epoch": 1.2228116710875332,
"grad_norm": 30.51792590892679,
"learning_rate": 3.942319282446236e-06,
"loss": 0.024,
"step": 461
},
{
"epoch": 1.2254641909814323,
"grad_norm": 181.99918301562113,
"learning_rate": 3.9196869202102775e-06,
"loss": 0.098,
"step": 462
},
{
"epoch": 1.2281167108753315,
"grad_norm": 195.65169748487375,
"learning_rate": 3.89707775269863e-06,
"loss": 0.3297,
"step": 463
},
{
"epoch": 1.2307692307692308,
"grad_norm": 145.26469256015454,
"learning_rate": 3.874492265338544e-06,
"loss": 0.3517,
"step": 464
},
{
"epoch": 1.23342175066313,
"grad_norm": 35.48516271276079,
"learning_rate": 3.851930943048845e-06,
"loss": 0.0478,
"step": 465
},
{
"epoch": 1.2360742705570291,
"grad_norm": 65.85506728640877,
"learning_rate": 3.829394270229531e-06,
"loss": 0.0828,
"step": 466
},
{
"epoch": 1.2387267904509285,
"grad_norm": 0.6539317465235754,
"learning_rate": 3.8068827307513624e-06,
"loss": 0.0108,
"step": 467
},
{
"epoch": 1.2413793103448276,
"grad_norm": 3.9741081031290233,
"learning_rate": 3.7843968079454773e-06,
"loss": 0.0114,
"step": 468
},
{
"epoch": 1.2440318302387268,
"grad_norm": 53.32466974048667,
"learning_rate": 3.7619369845930195e-06,
"loss": 0.0723,
"step": 469
},
{
"epoch": 1.246684350132626,
"grad_norm": 90.17775858739552,
"learning_rate": 3.7395037429147615e-06,
"loss": 0.1513,
"step": 470
},
{
"epoch": 1.2493368700265253,
"grad_norm": 160.86802203362117,
"learning_rate": 3.7170975645607587e-06,
"loss": 0.0339,
"step": 471
},
{
"epoch": 1.2519893899204244,
"grad_norm": 5.200593648400127,
"learning_rate": 3.694718930600012e-06,
"loss": 0.0125,
"step": 472
},
{
"epoch": 1.2546419098143236,
"grad_norm": 117.79945671801583,
"learning_rate": 3.672368321510128e-06,
"loss": 0.2488,
"step": 473
},
{
"epoch": 1.2572944297082227,
"grad_norm": 165.35263196181958,
"learning_rate": 3.6500462171670104e-06,
"loss": 0.1481,
"step": 474
},
{
"epoch": 1.259946949602122,
"grad_norm": 129.4190841061859,
"learning_rate": 3.6277530968345552e-06,
"loss": 0.0589,
"step": 475
},
{
"epoch": 1.2625994694960212,
"grad_norm": 55.628019444318824,
"learning_rate": 3.605489439154365e-06,
"loss": 0.0454,
"step": 476
},
{
"epoch": 1.2652519893899203,
"grad_norm": 93.59588719854446,
"learning_rate": 3.583255722135462e-06,
"loss": 0.1275,
"step": 477
},
{
"epoch": 1.2679045092838197,
"grad_norm": 10.07453203891493,
"learning_rate": 3.5610524231440324e-06,
"loss": 0.0153,
"step": 478
},
{
"epoch": 1.2705570291777188,
"grad_norm": 26.44208504298303,
"learning_rate": 3.5388800188931825e-06,
"loss": 0.034,
"step": 479
},
{
"epoch": 1.273209549071618,
"grad_norm": 43.17905002513331,
"learning_rate": 3.5167389854326907e-06,
"loss": 0.0589,
"step": 480
},
{
"epoch": 1.2758620689655173,
"grad_norm": 86.23570826911956,
"learning_rate": 3.4946297981387913e-06,
"loss": 0.0662,
"step": 481
},
{
"epoch": 1.2785145888594165,
"grad_norm": 15.699132266261188,
"learning_rate": 3.472552931703975e-06,
"loss": 0.0244,
"step": 482
},
{
"epoch": 1.2811671087533156,
"grad_norm": 83.37866584966203,
"learning_rate": 3.4505088601267913e-06,
"loss": 0.0561,
"step": 483
},
{
"epoch": 1.2838196286472148,
"grad_norm": 7.371340772061292,
"learning_rate": 3.428498056701665e-06,
"loss": 0.0157,
"step": 484
},
{
"epoch": 1.2864721485411141,
"grad_norm": 72.03330352703713,
"learning_rate": 3.4065209940087507e-06,
"loss": 0.0528,
"step": 485
},
{
"epoch": 1.2891246684350133,
"grad_norm": 60.07439706689129,
"learning_rate": 3.3845781439037695e-06,
"loss": 0.1746,
"step": 486
},
{
"epoch": 1.2917771883289124,
"grad_norm": 15.830602220363893,
"learning_rate": 3.3626699775078884e-06,
"loss": 0.0183,
"step": 487
},
{
"epoch": 1.2944297082228116,
"grad_norm": 90.63531502713072,
"learning_rate": 3.3407969651976045e-06,
"loss": 0.047,
"step": 488
},
{
"epoch": 1.297082228116711,
"grad_norm": 74.83398542874417,
"learning_rate": 3.3189595765946394e-06,
"loss": 0.1173,
"step": 489
},
{
"epoch": 1.29973474801061,
"grad_norm": 9.886317265655553,
"learning_rate": 3.2971582805558622e-06,
"loss": 0.0155,
"step": 490
},
{
"epoch": 1.3023872679045092,
"grad_norm": 113.17084370887686,
"learning_rate": 3.27539354516322e-06,
"loss": 0.0786,
"step": 491
},
{
"epoch": 1.3050397877984086,
"grad_norm": 42.35804473126241,
"learning_rate": 3.253665837713694e-06,
"loss": 0.0489,
"step": 492
},
{
"epoch": 1.3076923076923077,
"grad_norm": 32.04559934238327,
"learning_rate": 3.2319756247092552e-06,
"loss": 0.0737,
"step": 493
},
{
"epoch": 1.3103448275862069,
"grad_norm": 22.395640501451613,
"learning_rate": 3.2103233718468574e-06,
"loss": 0.0298,
"step": 494
},
{
"epoch": 1.3129973474801062,
"grad_norm": 95.51322529061028,
"learning_rate": 3.1887095440084402e-06,
"loss": 0.0758,
"step": 495
},
{
"epoch": 1.3156498673740054,
"grad_norm": 28.65751993200879,
"learning_rate": 3.167134605250938e-06,
"loss": 0.0442,
"step": 496
},
{
"epoch": 1.3183023872679045,
"grad_norm": 78.32058560501815,
"learning_rate": 3.14559901879632e-06,
"loss": 0.1396,
"step": 497
},
{
"epoch": 1.3209549071618036,
"grad_norm": 55.49433188217796,
"learning_rate": 3.1241032470216564e-06,
"loss": 0.1229,
"step": 498
},
{
"epoch": 1.323607427055703,
"grad_norm": 23.12635143909747,
"learning_rate": 3.102647751449174e-06,
"loss": 0.0258,
"step": 499
},
{
"epoch": 1.3262599469496021,
"grad_norm": 28.230637007866875,
"learning_rate": 3.081232992736355e-06,
"loss": 0.037,
"step": 500
},
{
"epoch": 1.3289124668435013,
"grad_norm": 7.715649744709754,
"learning_rate": 3.059859430666049e-06,
"loss": 0.0146,
"step": 501
},
{
"epoch": 1.3315649867374004,
"grad_norm": 25.891769502451563,
"learning_rate": 3.0385275241365965e-06,
"loss": 0.02,
"step": 502
},
{
"epoch": 1.3342175066312998,
"grad_norm": 62.79349231746409,
"learning_rate": 3.017237731151976e-06,
"loss": 0.1742,
"step": 503
},
{
"epoch": 1.336870026525199,
"grad_norm": 30.499199180675188,
"learning_rate": 2.9959905088119777e-06,
"loss": 0.0302,
"step": 504
},
{
"epoch": 1.339522546419098,
"grad_norm": 73.98055141461457,
"learning_rate": 2.9747863133023803e-06,
"loss": 0.0736,
"step": 505
},
{
"epoch": 1.3421750663129974,
"grad_norm": 39.29927119144451,
"learning_rate": 2.9536255998851615e-06,
"loss": 0.0237,
"step": 506
},
{
"epoch": 1.3448275862068966,
"grad_norm": 98.33679695718767,
"learning_rate": 2.93250882288872e-06,
"loss": 0.0387,
"step": 507
},
{
"epoch": 1.3474801061007957,
"grad_norm": 35.11947851166149,
"learning_rate": 2.9114364356981274e-06,
"loss": 0.1034,
"step": 508
},
{
"epoch": 1.350132625994695,
"grad_norm": 88.45903761328185,
"learning_rate": 2.8904088907453887e-06,
"loss": 0.1778,
"step": 509
},
{
"epoch": 1.3527851458885942,
"grad_norm": 8.532979695014493,
"learning_rate": 2.8694266394997238e-06,
"loss": 0.0149,
"step": 510
},
{
"epoch": 1.3554376657824934,
"grad_norm": 6.249816237214853,
"learning_rate": 2.8484901324578883e-06,
"loss": 0.0133,
"step": 511
},
{
"epoch": 1.3580901856763925,
"grad_norm": 22.97585624735204,
"learning_rate": 2.827599819134489e-06,
"loss": 0.0341,
"step": 512
},
{
"epoch": 1.3607427055702916,
"grad_norm": 36.53367358492221,
"learning_rate": 2.8067561480523315e-06,
"loss": 0.0412,
"step": 513
},
{
"epoch": 1.363395225464191,
"grad_norm": 72.92777350676762,
"learning_rate": 2.7859595667328027e-06,
"loss": 0.0346,
"step": 514
},
{
"epoch": 1.3660477453580901,
"grad_norm": 4.019754908273797,
"learning_rate": 2.7652105216862536e-06,
"loss": 0.0115,
"step": 515
},
{
"epoch": 1.3687002652519893,
"grad_norm": 26.804417375354443,
"learning_rate": 2.7445094584024067e-06,
"loss": 0.0298,
"step": 516
},
{
"epoch": 1.3713527851458887,
"grad_norm": 45.990202357286485,
"learning_rate": 2.723856821340806e-06,
"loss": 0.0377,
"step": 517
},
{
"epoch": 1.3740053050397878,
"grad_norm": 37.03560383879199,
"learning_rate": 2.703253053921266e-06,
"loss": 0.0401,
"step": 518
},
{
"epoch": 1.376657824933687,
"grad_norm": 132.83236380385904,
"learning_rate": 2.682698598514343e-06,
"loss": 0.0593,
"step": 519
},
{
"epoch": 1.3793103448275863,
"grad_norm": 81.33023653955709,
"learning_rate": 2.6621938964318593e-06,
"loss": 0.0678,
"step": 520
},
{
"epoch": 1.3819628647214854,
"grad_norm": 19.815072857885436,
"learning_rate": 2.6417393879174056e-06,
"loss": 0.0162,
"step": 521
},
{
"epoch": 1.3846153846153846,
"grad_norm": 80.27696589823212,
"learning_rate": 2.621335512136899e-06,
"loss": 0.0956,
"step": 522
},
{
"epoch": 1.387267904509284,
"grad_norm": 75.6641316305186,
"learning_rate": 2.600982707169154e-06,
"loss": 0.0504,
"step": 523
},
{
"epoch": 1.389920424403183,
"grad_norm": 6.8828160616189855,
"learning_rate": 2.580681409996477e-06,
"loss": 0.0117,
"step": 524
},
{
"epoch": 1.3925729442970822,
"grad_norm": 79.52956610275369,
"learning_rate": 2.5604320564952846e-06,
"loss": 0.0378,
"step": 525
},
{
"epoch": 1.3952254641909814,
"grad_norm": 3.3412476079565105,
"learning_rate": 2.5402350814267364e-06,
"loss": 0.0116,
"step": 526
},
{
"epoch": 1.3978779840848805,
"grad_norm": 74.0836189917631,
"learning_rate": 2.5200909184274125e-06,
"loss": 0.1159,
"step": 527
},
{
"epoch": 1.4005305039787799,
"grad_norm": 52.65976926514463,
"learning_rate": 2.5000000000000015e-06,
"loss": 0.0575,
"step": 528
},
{
"epoch": 1.403183023872679,
"grad_norm": 66.03843598072747,
"learning_rate": 2.4799627575040014e-06,
"loss": 0.0445,
"step": 529
},
{
"epoch": 1.4058355437665782,
"grad_norm": 2.2027472519213678,
"learning_rate": 2.4599796211464772e-06,
"loss": 0.0102,
"step": 530
},
{
"epoch": 1.4084880636604775,
"grad_norm": 44.9196517675172,
"learning_rate": 2.4400510199728123e-06,
"loss": 0.0195,
"step": 531
},
{
"epoch": 1.4111405835543767,
"grad_norm": 91.76177563325083,
"learning_rate": 2.4201773818574956e-06,
"loss": 0.0151,
"step": 532
},
{
"epoch": 1.4137931034482758,
"grad_norm": 23.09555777900529,
"learning_rate": 2.400359133494944e-06,
"loss": 0.0191,
"step": 533
},
{
"epoch": 1.4164456233421752,
"grad_norm": 118.53313959667297,
"learning_rate": 2.3805967003903336e-06,
"loss": 0.0666,
"step": 534
},
{
"epoch": 1.4190981432360743,
"grad_norm": 124.60220963221789,
"learning_rate": 2.360890506850464e-06,
"loss": 0.0947,
"step": 535
},
{
"epoch": 1.4217506631299734,
"grad_norm": 108.48487599136776,
"learning_rate": 2.341240975974653e-06,
"loss": 0.0334,
"step": 536
},
{
"epoch": 1.4244031830238728,
"grad_norm": 29.258330324047396,
"learning_rate": 2.3216485296456514e-06,
"loss": 0.0167,
"step": 537
},
{
"epoch": 1.427055702917772,
"grad_norm": 44.99491729254627,
"learning_rate": 2.302113588520578e-06,
"loss": 0.0196,
"step": 538
},
{
"epoch": 1.429708222811671,
"grad_norm": 194.73710142416067,
"learning_rate": 2.2826365720218984e-06,
"loss": 0.0788,
"step": 539
},
{
"epoch": 1.4323607427055702,
"grad_norm": 58.92853113805291,
"learning_rate": 2.263217898328415e-06,
"loss": 0.024,
"step": 540
},
{
"epoch": 1.4350132625994694,
"grad_norm": 131.34154138736488,
"learning_rate": 2.243857984366284e-06,
"loss": 0.0557,
"step": 541
},
{
"epoch": 1.4376657824933687,
"grad_norm": 22.727392713957276,
"learning_rate": 2.2245572458000714e-06,
"loss": 0.0102,
"step": 542
},
{
"epoch": 1.4403183023872679,
"grad_norm": 6.179370744669459,
"learning_rate": 2.205316097023826e-06,
"loss": 0.0123,
"step": 543
},
{
"epoch": 1.442970822281167,
"grad_norm": 5.1332994713955244,
"learning_rate": 2.1861349511521817e-06,
"loss": 0.0111,
"step": 544
},
{
"epoch": 1.4456233421750664,
"grad_norm": 39.86131150680043,
"learning_rate": 2.1670142200114837e-06,
"loss": 0.0329,
"step": 545
},
{
"epoch": 1.4482758620689655,
"grad_norm": 10.043074941303454,
"learning_rate": 2.147954314130955e-06,
"loss": 0.0127,
"step": 546
},
{
"epoch": 1.4509283819628647,
"grad_norm": 33.75209517992541,
"learning_rate": 2.128955642733877e-06,
"loss": 0.0108,
"step": 547
},
{
"epoch": 1.453580901856764,
"grad_norm": 4.281022797107646,
"learning_rate": 2.1100186137288005e-06,
"loss": 0.0094,
"step": 548
},
{
"epoch": 1.4562334217506632,
"grad_norm": 47.72341676502409,
"learning_rate": 2.0911436337007935e-06,
"loss": 0.0217,
"step": 549
},
{
"epoch": 1.4588859416445623,
"grad_norm": 58.00881611775122,
"learning_rate": 2.072331107902713e-06,
"loss": 0.048,
"step": 550
},
{
"epoch": 1.4615384615384617,
"grad_norm": 0.6359561449436009,
"learning_rate": 2.0535814402464922e-06,
"loss": 0.009,
"step": 551
},
{
"epoch": 1.4641909814323608,
"grad_norm": 3.323348183961715,
"learning_rate": 2.034895033294483e-06,
"loss": 0.0099,
"step": 552
},
{
"epoch": 1.46684350132626,
"grad_norm": 31.33520595629068,
"learning_rate": 2.0162722882508072e-06,
"loss": 0.0104,
"step": 553
},
{
"epoch": 1.469496021220159,
"grad_norm": 0.5675415392727176,
"learning_rate": 1.9977136049527348e-06,
"loss": 0.0088,
"step": 554
},
{
"epoch": 1.4721485411140582,
"grad_norm": 0.5205511687748441,
"learning_rate": 1.9792193818621118e-06,
"loss": 0.0085,
"step": 555
},
{
"epoch": 1.4748010610079576,
"grad_norm": 0.46492420932481743,
"learning_rate": 1.960790016056801e-06,
"loss": 0.0083,
"step": 556
},
{
"epoch": 1.4774535809018567,
"grad_norm": 0.6103064433138241,
"learning_rate": 1.9424259032221482e-06,
"loss": 0.0083,
"step": 557
},
{
"epoch": 1.4801061007957559,
"grad_norm": 101.84912553346997,
"learning_rate": 1.9241274376425e-06,
"loss": 0.0504,
"step": 558
},
{
"epoch": 1.4827586206896552,
"grad_norm": 0.5036290681501117,
"learning_rate": 1.90589501219273e-06,
"loss": 0.0082,
"step": 559
},
{
"epoch": 1.4854111405835544,
"grad_norm": 0.49630303098429807,
"learning_rate": 1.8877290183298058e-06,
"loss": 0.008,
"step": 560
},
{
"epoch": 1.4880636604774535,
"grad_norm": 1.7511111350237467,
"learning_rate": 1.869629846084382e-06,
"loss": 0.0084,
"step": 561
},
{
"epoch": 1.490716180371353,
"grad_norm": 145.93290092744815,
"learning_rate": 1.8515978840524302e-06,
"loss": 0.0746,
"step": 562
},
{
"epoch": 1.493368700265252,
"grad_norm": 2.600079267900106,
"learning_rate": 1.8336335193868955e-06,
"loss": 0.0086,
"step": 563
},
{
"epoch": 1.4960212201591512,
"grad_norm": 0.750605001982341,
"learning_rate": 1.8157371377893769e-06,
"loss": 0.008,
"step": 564
},
{
"epoch": 1.4986737400530503,
"grad_norm": 3.3186117605501178,
"learning_rate": 1.7979091235018564e-06,
"loss": 0.009,
"step": 565
},
{
"epoch": 1.5013262599469495,
"grad_norm": 0.43220741408065144,
"learning_rate": 1.7801498592984445e-06,
"loss": 0.0075,
"step": 566
},
{
"epoch": 1.5039787798408488,
"grad_norm": 0.498889987966915,
"learning_rate": 1.762459726477157e-06,
"loss": 0.0074,
"step": 567
},
{
"epoch": 1.506631299734748,
"grad_norm": 0.4530320806531492,
"learning_rate": 1.7448391048517378e-06,
"loss": 0.0074,
"step": 568
},
{
"epoch": 1.509283819628647,
"grad_norm": 0.4031770086436206,
"learning_rate": 1.7272883727434996e-06,
"loss": 0.0072,
"step": 569
},
{
"epoch": 1.5119363395225465,
"grad_norm": 0.42231287937693796,
"learning_rate": 1.709807906973196e-06,
"loss": 0.0072,
"step": 570
},
{
"epoch": 1.5145888594164456,
"grad_norm": 2.5024275258180335,
"learning_rate": 1.6923980828529424e-06,
"loss": 0.0075,
"step": 571
},
{
"epoch": 1.5172413793103448,
"grad_norm": 84.0292035953675,
"learning_rate": 1.6750592741781496e-06,
"loss": 0.0856,
"step": 572
},
{
"epoch": 1.5198938992042441,
"grad_norm": 0.5403791435068346,
"learning_rate": 1.657791853219497e-06,
"loss": 0.007,
"step": 573
},
{
"epoch": 1.5225464190981433,
"grad_norm": 0.4226395502128115,
"learning_rate": 1.640596190714947e-06,
"loss": 0.0069,
"step": 574
},
{
"epoch": 1.5251989389920424,
"grad_norm": 0.3768980080326811,
"learning_rate": 1.623472655861782e-06,
"loss": 0.0067,
"step": 575
},
{
"epoch": 1.5278514588859418,
"grad_norm": 0.3755495508872125,
"learning_rate": 1.6064216163086716e-06,
"loss": 0.0067,
"step": 576
},
{
"epoch": 1.530503978779841,
"grad_norm": 0.49910529427933337,
"learning_rate": 1.589443438147789e-06,
"loss": 0.0068,
"step": 577
},
{
"epoch": 1.53315649867374,
"grad_norm": 90.27478366870945,
"learning_rate": 1.5725384859069454e-06,
"loss": 0.0604,
"step": 578
},
{
"epoch": 1.5358090185676394,
"grad_norm": 0.3589735178642482,
"learning_rate": 1.5557071225417648e-06,
"loss": 0.0064,
"step": 579
},
{
"epoch": 1.5384615384615383,
"grad_norm": 0.3576882586646404,
"learning_rate": 1.5389497094278861e-06,
"loss": 0.0063,
"step": 580
},
{
"epoch": 1.5411140583554377,
"grad_norm": 0.35892809435755246,
"learning_rate": 1.5222666063532138e-06,
"loss": 0.0063,
"step": 581
},
{
"epoch": 1.5437665782493368,
"grad_norm": 0.3976724392685113,
"learning_rate": 1.5056581715101887e-06,
"loss": 0.0063,
"step": 582
},
{
"epoch": 1.546419098143236,
"grad_norm": 0.5400185192537355,
"learning_rate": 1.4891247614880904e-06,
"loss": 0.0062,
"step": 583
},
{
"epoch": 1.5490716180371353,
"grad_norm": 2.049521701650788,
"learning_rate": 1.472666731265394e-06,
"loss": 0.0066,
"step": 584
},
{
"epoch": 1.5517241379310345,
"grad_norm": 0.3468430762292899,
"learning_rate": 1.456284434202142e-06,
"loss": 0.0061,
"step": 585
},
{
"epoch": 1.5543766578249336,
"grad_norm": 0.3467327054472945,
"learning_rate": 1.4399782220323515e-06,
"loss": 0.006,
"step": 586
},
{
"epoch": 1.557029177718833,
"grad_norm": 0.35228902598430756,
"learning_rate": 1.4237484448564759e-06,
"loss": 0.006,
"step": 587
},
{
"epoch": 1.5596816976127321,
"grad_norm": 2.6000097356754077,
"learning_rate": 1.4075954511338784e-06,
"loss": 0.0065,
"step": 588
},
{
"epoch": 1.5623342175066313,
"grad_norm": 1.237130912335302,
"learning_rate": 1.3915195876753495e-06,
"loss": 0.006,
"step": 589
},
{
"epoch": 1.5649867374005306,
"grad_norm": 3.584447291257898,
"learning_rate": 1.3755211996356687e-06,
"loss": 0.0058,
"step": 590
},
{
"epoch": 1.5676392572944295,
"grad_norm": 9.918392784619872,
"learning_rate": 1.3596006305061888e-06,
"loss": 0.0093,
"step": 591
},
{
"epoch": 1.570291777188329,
"grad_norm": 6.146405804061137,
"learning_rate": 1.3437582221074574e-06,
"loss": 0.0063,
"step": 592
},
{
"epoch": 1.5729442970822283,
"grad_norm": 0.3182841602870679,
"learning_rate": 1.3279943145818874e-06,
"loss": 0.0057,
"step": 593
},
{
"epoch": 1.5755968169761272,
"grad_norm": 0.5271389383491623,
"learning_rate": 1.3123092463864456e-06,
"loss": 0.0058,
"step": 594
},
{
"epoch": 1.5782493368700266,
"grad_norm": 0.31139484943580775,
"learning_rate": 1.2967033542853918e-06,
"loss": 0.0055,
"step": 595
},
{
"epoch": 1.5809018567639257,
"grad_norm": 0.31737735749701884,
"learning_rate": 1.2811769733430406e-06,
"loss": 0.0056,
"step": 596
},
{
"epoch": 1.5835543766578248,
"grad_norm": 122.25701299417396,
"learning_rate": 1.2657304369165768e-06,
"loss": 0.0109,
"step": 597
},
{
"epoch": 1.5862068965517242,
"grad_norm": 0.3525084942228702,
"learning_rate": 1.250364076648894e-06,
"loss": 0.0055,
"step": 598
},
{
"epoch": 1.5888594164456233,
"grad_norm": 0.3192413472102024,
"learning_rate": 1.2350782224614689e-06,
"loss": 0.0054,
"step": 599
},
{
"epoch": 1.5915119363395225,
"grad_norm": 0.29866301866270556,
"learning_rate": 1.2198732025472876e-06,
"loss": 0.0053,
"step": 600
},
{
"epoch": 1.5941644562334218,
"grad_norm": 106.17123075764732,
"learning_rate": 1.2047493433637935e-06,
"loss": 0.0891,
"step": 601
},
{
"epoch": 1.596816976127321,
"grad_norm": 0.6551855618573065,
"learning_rate": 1.1897069696258756e-06,
"loss": 0.0055,
"step": 602
},
{
"epoch": 1.5994694960212201,
"grad_norm": 0.3010392367594056,
"learning_rate": 1.1747464042989037e-06,
"loss": 0.0053,
"step": 603
},
{
"epoch": 1.6021220159151195,
"grad_norm": 0.2963399545869238,
"learning_rate": 1.1598679685917901e-06,
"loss": 0.0053,
"step": 604
},
{
"epoch": 1.6047745358090184,
"grad_norm": 7.673021209841488,
"learning_rate": 1.1450719819500906e-06,
"loss": 0.0059,
"step": 605
},
{
"epoch": 1.6074270557029178,
"grad_norm": 0.5894185742400115,
"learning_rate": 1.1303587620491513e-06,
"loss": 0.0053,
"step": 606
},
{
"epoch": 1.6100795755968171,
"grad_norm": 0.3022377778609233,
"learning_rate": 1.1157286247872873e-06,
"loss": 0.0053,
"step": 607
},
{
"epoch": 1.612732095490716,
"grad_norm": 0.998174163284724,
"learning_rate": 1.1011818842789928e-06,
"loss": 0.0053,
"step": 608
},
{
"epoch": 1.6153846153846154,
"grad_norm": 1.2605257767864775,
"learning_rate": 1.0867188528482087e-06,
"loss": 0.0053,
"step": 609
},
{
"epoch": 1.6180371352785146,
"grad_norm": 22.434991228580405,
"learning_rate": 1.0723398410216085e-06,
"loss": 0.0113,
"step": 610
},
{
"epoch": 1.6206896551724137,
"grad_norm": 0.30072458656709766,
"learning_rate": 1.0580451575219304e-06,
"loss": 0.0052,
"step": 611
},
{
"epoch": 1.623342175066313,
"grad_norm": 6.785389276141757,
"learning_rate": 1.043835109261357e-06,
"loss": 0.0067,
"step": 612
},
{
"epoch": 1.6259946949602122,
"grad_norm": 0.43895873989236295,
"learning_rate": 1.0297100013349181e-06,
"loss": 0.0052,
"step": 613
},
{
"epoch": 1.6286472148541113,
"grad_norm": 0.3095572825845269,
"learning_rate": 1.0156701370139454e-06,
"loss": 0.005,
"step": 614
},
{
"epoch": 1.6312997347480107,
"grad_norm": 0.2807082976360762,
"learning_rate": 1.0017158177395531e-06,
"loss": 0.0049,
"step": 615
},
{
"epoch": 1.6339522546419099,
"grad_norm": 0.2774911079059377,
"learning_rate": 9.878473431161767e-07,
"loss": 0.0049,
"step": 616
},
{
"epoch": 1.636604774535809,
"grad_norm": 0.39533257376305914,
"learning_rate": 9.740650109051348e-07,
"loss": 0.0049,
"step": 617
},
{
"epoch": 1.6392572944297084,
"grad_norm": 0.28296242314049025,
"learning_rate": 9.603691170182316e-07,
"loss": 0.005,
"step": 618
},
{
"epoch": 1.6419098143236073,
"grad_norm": 3.9041204188770458,
"learning_rate": 9.467599555114137e-07,
"loss": 0.0058,
"step": 619
},
{
"epoch": 1.6445623342175066,
"grad_norm": 0.2846533102236989,
"learning_rate": 9.332378185784491e-07,
"loss": 0.005,
"step": 620
},
{
"epoch": 1.647214854111406,
"grad_norm": 51.93209557405583,
"learning_rate": 9.198029965446537e-07,
"loss": 0.0101,
"step": 621
},
{
"epoch": 1.649867374005305,
"grad_norm": 0.2696954936621249,
"learning_rate": 9.064557778606631e-07,
"loss": 0.0048,
"step": 622
},
{
"epoch": 1.6525198938992043,
"grad_norm": 0.2695956972348163,
"learning_rate": 8.931964490962364e-07,
"loss": 0.0048,
"step": 623
},
{
"epoch": 1.6551724137931034,
"grad_norm": 94.98929934493466,
"learning_rate": 8.800252949340998e-07,
"loss": 0.0683,
"step": 624
},
{
"epoch": 1.6578249336870026,
"grad_norm": 1.2974916163318027,
"learning_rate": 8.669425981638413e-07,
"loss": 0.005,
"step": 625
},
{
"epoch": 1.660477453580902,
"grad_norm": 0.2710452417200414,
"learning_rate": 8.539486396758357e-07,
"loss": 0.0048,
"step": 626
},
{
"epoch": 1.663129973474801,
"grad_norm": 0.2666246456468241,
"learning_rate": 8.410436984552112e-07,
"loss": 0.0048,
"step": 627
},
{
"epoch": 1.6657824933687002,
"grad_norm": 1.6036131193583794,
"learning_rate": 8.282280515758639e-07,
"loss": 0.0049,
"step": 628
},
{
"epoch": 1.6684350132625996,
"grad_norm": 0.26275985762388926,
"learning_rate": 8.15501974194508e-07,
"loss": 0.0047,
"step": 629
},
{
"epoch": 1.6710875331564987,
"grad_norm": 0.2760173400456831,
"learning_rate": 8.02865739544767e-07,
"loss": 0.0047,
"step": 630
},
{
"epoch": 1.6737400530503979,
"grad_norm": 0.258375590884943,
"learning_rate": 7.903196189313039e-07,
"loss": 0.0046,
"step": 631
},
{
"epoch": 1.6763925729442972,
"grad_norm": 0.2622808578616796,
"learning_rate": 7.778638817240042e-07,
"loss": 0.0047,
"step": 632
},
{
"epoch": 1.6790450928381961,
"grad_norm": 0.25880678528941237,
"learning_rate": 7.654987953521875e-07,
"loss": 0.0046,
"step": 633
},
{
"epoch": 1.6816976127320955,
"grad_norm": 0.25847177799776694,
"learning_rate": 7.532246252988617e-07,
"loss": 0.0046,
"step": 634
},
{
"epoch": 1.6843501326259946,
"grad_norm": 9.543103543412947,
"learning_rate": 7.410416350950333e-07,
"loss": 0.006,
"step": 635
},
{
"epoch": 1.6870026525198938,
"grad_norm": 0.2583838978284882,
"learning_rate": 7.289500863140414e-07,
"loss": 0.0045,
"step": 636
},
{
"epoch": 1.6896551724137931,
"grad_norm": 0.25730496900642014,
"learning_rate": 7.16950238565941e-07,
"loss": 0.0046,
"step": 637
},
{
"epoch": 1.6923076923076923,
"grad_norm": 100.42652935388513,
"learning_rate": 7.05042349491935e-07,
"loss": 0.0583,
"step": 638
},
{
"epoch": 1.6949602122015914,
"grad_norm": 0.2532567668137234,
"learning_rate": 6.932266747588395e-07,
"loss": 0.0045,
"step": 639
},
{
"epoch": 1.6976127320954908,
"grad_norm": 0.3114113921947813,
"learning_rate": 6.815034680535915e-07,
"loss": 0.0045,
"step": 640
},
{
"epoch": 1.70026525198939,
"grad_norm": 0.3692338355058846,
"learning_rate": 6.698729810778065e-07,
"loss": 0.0046,
"step": 641
},
{
"epoch": 1.702917771883289,
"grad_norm": 6.091089006073549,
"learning_rate": 6.583354635423755e-07,
"loss": 0.0052,
"step": 642
},
{
"epoch": 1.7055702917771884,
"grad_norm": 26.336302510662076,
"learning_rate": 6.46891163162095e-07,
"loss": 0.0081,
"step": 643
},
{
"epoch": 1.7082228116710876,
"grad_norm": 0.29512973513581486,
"learning_rate": 6.355403256503595e-07,
"loss": 0.0045,
"step": 644
},
{
"epoch": 1.7108753315649867,
"grad_norm": 0.7223730285452392,
"learning_rate": 6.242831947138806e-07,
"loss": 0.0046,
"step": 645
},
{
"epoch": 1.713527851458886,
"grad_norm": 126.44512664163686,
"learning_rate": 6.131200120474512e-07,
"loss": 0.0318,
"step": 646
},
{
"epoch": 1.716180371352785,
"grad_norm": 0.40609491390564195,
"learning_rate": 6.020510173287636e-07,
"loss": 0.0046,
"step": 647
},
{
"epoch": 1.7188328912466844,
"grad_norm": 0.2507013793463241,
"learning_rate": 5.910764482132575e-07,
"loss": 0.0044,
"step": 648
},
{
"epoch": 1.7214854111405835,
"grad_norm": 0.24773838391502986,
"learning_rate": 5.801965403290221e-07,
"loss": 0.0044,
"step": 649
},
{
"epoch": 1.7241379310344827,
"grad_norm": 0.2486645179595358,
"learning_rate": 5.694115272717326e-07,
"loss": 0.0044,
"step": 650
},
{
"epoch": 1.726790450928382,
"grad_norm": 0.24730568044224677,
"learning_rate": 5.587216405996343e-07,
"loss": 0.0044,
"step": 651
},
{
"epoch": 1.7294429708222812,
"grad_norm": 0.2538856932859245,
"learning_rate": 5.481271098285818e-07,
"loss": 0.0043,
"step": 652
},
{
"epoch": 1.7320954907161803,
"grad_norm": 0.26593342998498926,
"learning_rate": 5.376281624270946e-07,
"loss": 0.0044,
"step": 653
},
{
"epoch": 1.7347480106100797,
"grad_norm": 0.248248124621863,
"learning_rate": 5.272250238114857e-07,
"loss": 0.0044,
"step": 654
},
{
"epoch": 1.7374005305039788,
"grad_norm": 0.2458121042836853,
"learning_rate": 5.169179173410178e-07,
"loss": 0.0044,
"step": 655
},
{
"epoch": 1.740053050397878,
"grad_norm": 0.24729370932843403,
"learning_rate": 5.067070643131056e-07,
"loss": 0.0044,
"step": 656
},
{
"epoch": 1.7427055702917773,
"grad_norm": 15.385259424248543,
"learning_rate": 4.965926839585688e-07,
"loss": 0.007,
"step": 657
},
{
"epoch": 1.7453580901856764,
"grad_norm": 0.2822390112635331,
"learning_rate": 4.865749934369224e-07,
"loss": 0.0043,
"step": 658
},
{
"epoch": 1.7480106100795756,
"grad_norm": 0.24839423194997115,
"learning_rate": 4.766542078317121e-07,
"loss": 0.0043,
"step": 659
},
{
"epoch": 1.750663129973475,
"grad_norm": 0.24111468553355503,
"learning_rate": 4.668305401459022e-07,
"loss": 0.0043,
"step": 660
},
{
"epoch": 1.7533156498673739,
"grad_norm": 0.2438447012071553,
"learning_rate": 4.571042012972993e-07,
"loss": 0.0043,
"step": 661
},
{
"epoch": 1.7559681697612732,
"grad_norm": 0.24361572051168376,
"learning_rate": 4.4747540011401913e-07,
"loss": 0.0043,
"step": 662
},
{
"epoch": 1.7586206896551724,
"grad_norm": 0.5275300344193206,
"learning_rate": 4.379443433300129e-07,
"loss": 0.0043,
"step": 663
},
{
"epoch": 1.7612732095490715,
"grad_norm": 0.23717312427507825,
"learning_rate": 4.2851123558061927e-07,
"loss": 0.0042,
"step": 664
},
{
"epoch": 1.7639257294429709,
"grad_norm": 0.2571916721136695,
"learning_rate": 4.1917627939817793e-07,
"loss": 0.0043,
"step": 665
},
{
"epoch": 1.76657824933687,
"grad_norm": 0.5553820884441111,
"learning_rate": 4.0993967520767455e-07,
"loss": 0.0044,
"step": 666
},
{
"epoch": 1.7692307692307692,
"grad_norm": 2.246712558405596,
"learning_rate": 4.008016213224408e-07,
"loss": 0.0045,
"step": 667
},
{
"epoch": 1.7718832891246685,
"grad_norm": 0.2358532445026429,
"learning_rate": 3.9176231393990183e-07,
"loss": 0.0042,
"step": 668
},
{
"epoch": 1.7745358090185677,
"grad_norm": 0.23255907341279805,
"learning_rate": 3.8282194713735286e-07,
"loss": 0.0041,
"step": 669
},
{
"epoch": 1.7771883289124668,
"grad_norm": 0.25068833373474997,
"learning_rate": 3.739807128677986e-07,
"loss": 0.0042,
"step": 670
},
{
"epoch": 1.7798408488063662,
"grad_norm": 0.28998360046758886,
"learning_rate": 3.6523880095583554e-07,
"loss": 0.0042,
"step": 671
},
{
"epoch": 1.782493368700265,
"grad_norm": 70.9543828204898,
"learning_rate": 3.5659639909356725e-07,
"loss": 0.0316,
"step": 672
},
{
"epoch": 1.7851458885941645,
"grad_norm": 11.517585657154022,
"learning_rate": 3.480536928365824e-07,
"loss": 0.0065,
"step": 673
},
{
"epoch": 1.7877984084880638,
"grad_norm": 0.24209238846563902,
"learning_rate": 3.39610865599968e-07,
"loss": 0.0042,
"step": 674
},
{
"epoch": 1.7904509283819627,
"grad_norm": 0.23781117806303836,
"learning_rate": 3.3126809865436817e-07,
"loss": 0.0041,
"step": 675
},
{
"epoch": 1.793103448275862,
"grad_norm": 0.2366381303709924,
"learning_rate": 3.230255711220992e-07,
"loss": 0.0042,
"step": 676
},
{
"epoch": 1.7957559681697612,
"grad_norm": 0.5459967361685063,
"learning_rate": 3.1488345997329806e-07,
"loss": 0.0042,
"step": 677
},
{
"epoch": 1.7984084880636604,
"grad_norm": 0.4047990607616113,
"learning_rate": 3.0684194002212287e-07,
"loss": 0.0041,
"step": 678
},
{
"epoch": 1.8010610079575597,
"grad_norm": 0.23141431270577473,
"learning_rate": 2.9890118392300493e-07,
"loss": 0.0041,
"step": 679
},
{
"epoch": 1.8037135278514589,
"grad_norm": 0.24515511136563692,
"learning_rate": 2.910613621669356e-07,
"loss": 0.0041,
"step": 680
},
{
"epoch": 1.806366047745358,
"grad_norm": 1.7050497926298187,
"learning_rate": 2.83322643077808e-07,
"loss": 0.0045,
"step": 681
},
{
"epoch": 1.8090185676392574,
"grad_norm": 0.23635479397726022,
"learning_rate": 2.756851928088056e-07,
"loss": 0.0042,
"step": 682
},
{
"epoch": 1.8116710875331565,
"grad_norm": 0.2342130054640727,
"learning_rate": 2.681491753388282e-07,
"loss": 0.0041,
"step": 683
},
{
"epoch": 1.8143236074270557,
"grad_norm": 0.23203537966106563,
"learning_rate": 2.607147524689829e-07,
"loss": 0.0041,
"step": 684
},
{
"epoch": 1.816976127320955,
"grad_norm": 0.2307593686090978,
"learning_rate": 2.533820838190959e-07,
"loss": 0.0041,
"step": 685
},
{
"epoch": 1.819628647214854,
"grad_norm": 0.2851962284663173,
"learning_rate": 2.461513268242938e-07,
"loss": 0.0042,
"step": 686
},
{
"epoch": 1.8222811671087533,
"grad_norm": 0.33440588668973653,
"learning_rate": 2.390226367316262e-07,
"loss": 0.0041,
"step": 687
},
{
"epoch": 1.8249336870026527,
"grad_norm": 0.23083456015449386,
"learning_rate": 2.3199616659672352e-07,
"loss": 0.0041,
"step": 688
},
{
"epoch": 1.8275862068965516,
"grad_norm": 0.8520328211146436,
"learning_rate": 2.2507206728051732e-07,
"loss": 0.0042,
"step": 689
},
{
"epoch": 1.830238726790451,
"grad_norm": 0.22849031893442262,
"learning_rate": 2.1825048744600062e-07,
"loss": 0.0041,
"step": 690
},
{
"epoch": 1.83289124668435,
"grad_norm": 143.7065462713553,
"learning_rate": 2.1153157355503274e-07,
"loss": 0.0296,
"step": 691
},
{
"epoch": 1.8355437665782492,
"grad_norm": 0.22881014891837861,
"learning_rate": 2.0491546986519896e-07,
"loss": 0.004,
"step": 692
},
{
"epoch": 1.8381962864721486,
"grad_norm": 0.2867682040530811,
"learning_rate": 1.9840231842671087e-07,
"loss": 0.0041,
"step": 693
},
{
"epoch": 1.8408488063660478,
"grad_norm": 0.23516372004144973,
"learning_rate": 1.9199225907935492e-07,
"loss": 0.004,
"step": 694
},
{
"epoch": 1.843501326259947,
"grad_norm": 0.23575605461699675,
"learning_rate": 1.8568542944949474e-07,
"loss": 0.0041,
"step": 695
},
{
"epoch": 1.8461538461538463,
"grad_norm": 0.23120379453374001,
"learning_rate": 1.794819649471119e-07,
"loss": 0.0041,
"step": 696
},
{
"epoch": 1.8488063660477454,
"grad_norm": 0.23494372122100343,
"learning_rate": 1.7338199876289984e-07,
"loss": 0.0041,
"step": 697
},
{
"epoch": 1.8514588859416445,
"grad_norm": 0.2267567245082868,
"learning_rate": 1.6738566186540628e-07,
"loss": 0.004,
"step": 698
},
{
"epoch": 1.854111405835544,
"grad_norm": 0.22946931691211364,
"learning_rate": 1.6149308299821643e-07,
"loss": 0.0041,
"step": 699
},
{
"epoch": 1.8567639257294428,
"grad_norm": 0.22698231845451255,
"learning_rate": 1.5570438867719695e-07,
"loss": 0.004,
"step": 700
},
{
"epoch": 1.8594164456233422,
"grad_norm": 0.22519946509907784,
"learning_rate": 1.500197031877698e-07,
"loss": 0.004,
"step": 701
},
{
"epoch": 1.8620689655172413,
"grad_norm": 0.2357152301031921,
"learning_rate": 1.4443914858224938e-07,
"loss": 0.0041,
"step": 702
},
{
"epoch": 1.8647214854111405,
"grad_norm": 0.23146666552008044,
"learning_rate": 1.3896284467722398e-07,
"loss": 0.0041,
"step": 703
},
{
"epoch": 1.8673740053050398,
"grad_norm": 0.22871909002642044,
"learning_rate": 1.335909090509785e-07,
"loss": 0.0041,
"step": 704
},
{
"epoch": 1.870026525198939,
"grad_norm": 0.2330036838970883,
"learning_rate": 1.2832345704097082e-07,
"loss": 0.0041,
"step": 705
},
{
"epoch": 1.8726790450928381,
"grad_norm": 0.22546368567502312,
"learning_rate": 1.2316060174136e-07,
"loss": 0.004,
"step": 706
},
{
"epoch": 1.8753315649867375,
"grad_norm": 0.2391827765733045,
"learning_rate": 1.1810245400057152e-07,
"loss": 0.0041,
"step": 707
},
{
"epoch": 1.8779840848806366,
"grad_norm": 0.23135321137439943,
"learning_rate": 1.1314912241892184e-07,
"loss": 0.004,
"step": 708
},
{
"epoch": 1.8806366047745358,
"grad_norm": 0.22616985941276838,
"learning_rate": 1.0830071334628655e-07,
"loss": 0.004,
"step": 709
},
{
"epoch": 1.8832891246684351,
"grad_norm": 0.22812730794748792,
"learning_rate": 1.035573308798138e-07,
"loss": 0.004,
"step": 710
},
{
"epoch": 1.8859416445623343,
"grad_norm": 0.2243059239586133,
"learning_rate": 9.891907686169211e-08,
"loss": 0.004,
"step": 711
},
{
"epoch": 1.8885941644562334,
"grad_norm": 0.4588092051271435,
"learning_rate": 9.43860508769645e-08,
"loss": 0.0041,
"step": 712
},
{
"epoch": 1.8912466843501328,
"grad_norm": 0.22536403743459937,
"learning_rate": 8.995835025138677e-08,
"loss": 0.004,
"step": 713
},
{
"epoch": 1.8938992042440317,
"grad_norm": 0.22565203184615454,
"learning_rate": 8.563607004934193e-08,
"loss": 0.004,
"step": 714
},
{
"epoch": 1.896551724137931,
"grad_norm": 0.22366314667273499,
"learning_rate": 8.141930307179468e-08,
"loss": 0.004,
"step": 715
},
{
"epoch": 1.8992042440318302,
"grad_norm": 0.2279783994579146,
"learning_rate": 7.730813985430407e-08,
"loss": 0.004,
"step": 716
},
{
"epoch": 1.9018567639257293,
"grad_norm": 0.24135436794835546,
"learning_rate": 7.330266866507618e-08,
"loss": 0.004,
"step": 717
},
{
"epoch": 1.9045092838196287,
"grad_norm": 0.23433143970955525,
"learning_rate": 6.940297550306895e-08,
"loss": 0.004,
"step": 718
},
{
"epoch": 1.9071618037135278,
"grad_norm": 0.22634949627182713,
"learning_rate": 6.560914409614872e-08,
"loss": 0.004,
"step": 719
},
{
"epoch": 1.909814323607427,
"grad_norm": 0.2387086723321628,
"learning_rate": 6.192125589928821e-08,
"loss": 0.004,
"step": 720
},
{
"epoch": 1.9124668435013263,
"grad_norm": 6.202647111806296,
"learning_rate": 5.833939009282086e-08,
"loss": 0.0053,
"step": 721
},
{
"epoch": 1.9151193633952255,
"grad_norm": 0.22863334490515452,
"learning_rate": 5.486362358074093e-08,
"loss": 0.0041,
"step": 722
},
{
"epoch": 1.9177718832891246,
"grad_norm": 0.22698135338110348,
"learning_rate": 5.1494030989049926e-08,
"loss": 0.004,
"step": 723
},
{
"epoch": 1.920424403183024,
"grad_norm": 0.22590216714405328,
"learning_rate": 4.823068466415615e-08,
"loss": 0.004,
"step": 724
},
{
"epoch": 1.9230769230769231,
"grad_norm": 0.22807141517652188,
"learning_rate": 4.5073654671320965e-08,
"loss": 0.0041,
"step": 725
},
{
"epoch": 1.9257294429708223,
"grad_norm": 1.201301894967121,
"learning_rate": 4.202300879315446e-08,
"loss": 0.0043,
"step": 726
},
{
"epoch": 1.9283819628647216,
"grad_norm": 0.23056138949041405,
"learning_rate": 3.907881252816048e-08,
"loss": 0.0041,
"step": 727
},
{
"epoch": 1.9310344827586206,
"grad_norm": 2.4312545444566362,
"learning_rate": 3.6241129089329416e-08,
"loss": 0.0043,
"step": 728
},
{
"epoch": 1.93368700265252,
"grad_norm": 0.22957360649656,
"learning_rate": 3.351001940278209e-08,
"loss": 0.004,
"step": 729
},
{
"epoch": 1.936339522546419,
"grad_norm": 0.22733977672942887,
"learning_rate": 3.088554210646133e-08,
"loss": 0.004,
"step": 730
},
{
"epoch": 1.9389920424403182,
"grad_norm": 0.22568780368466787,
"learning_rate": 2.8367753548871335e-08,
"loss": 0.004,
"step": 731
},
{
"epoch": 1.9416445623342176,
"grad_norm": 0.22705148098683284,
"learning_rate": 2.595670778787196e-08,
"loss": 0.004,
"step": 732
},
{
"epoch": 1.9442970822281167,
"grad_norm": 0.460666108715405,
"learning_rate": 2.3652456589512983e-08,
"loss": 0.0041,
"step": 733
},
{
"epoch": 1.9469496021220158,
"grad_norm": 0.22609962320812726,
"learning_rate": 2.1455049426926666e-08,
"loss": 0.004,
"step": 734
},
{
"epoch": 1.9496021220159152,
"grad_norm": 0.22507833764853438,
"learning_rate": 1.9364533479263036e-08,
"loss": 0.004,
"step": 735
},
{
"epoch": 1.9522546419098143,
"grad_norm": 0.23888217596144562,
"learning_rate": 1.7380953630678488e-08,
"loss": 0.004,
"step": 736
},
{
"epoch": 1.9549071618037135,
"grad_norm": 0.2330182908978619,
"learning_rate": 1.5504352469371543e-08,
"loss": 0.004,
"step": 737
},
{
"epoch": 1.9575596816976129,
"grad_norm": 0.22417303270400205,
"learning_rate": 1.373477028666803e-08,
"loss": 0.004,
"step": 738
},
{
"epoch": 1.9602122015915118,
"grad_norm": 0.22666476984553308,
"learning_rate": 1.2072245076156786e-08,
"loss": 0.004,
"step": 739
},
{
"epoch": 1.9628647214854111,
"grad_norm": 4.155083837928933,
"learning_rate": 1.0516812532873622e-08,
"loss": 0.005,
"step": 740
},
{
"epoch": 1.9655172413793105,
"grad_norm": 0.2266434772362444,
"learning_rate": 9.068506052534732e-09,
"loss": 0.004,
"step": 741
},
{
"epoch": 1.9681697612732094,
"grad_norm": 0.22759641996655722,
"learning_rate": 7.727356730820035e-09,
"loss": 0.0041,
"step": 742
},
{
"epoch": 1.9708222811671088,
"grad_norm": 0.22640459239936114,
"learning_rate": 6.49339336270427e-09,
"loss": 0.004,
"step": 743
},
{
"epoch": 1.973474801061008,
"grad_norm": 0.2269001817332957,
"learning_rate": 5.366642441841374e-09,
"loss": 0.004,
"step": 744
},
{
"epoch": 1.976127320954907,
"grad_norm": 0.22126857041830134,
"learning_rate": 4.347128159993829e-09,
"loss": 0.0039,
"step": 745
},
{
"epoch": 1.9787798408488064,
"grad_norm": 0.22613994671550425,
"learning_rate": 3.4348724065119687e-09,
"loss": 0.004,
"step": 746
},
{
"epoch": 1.9814323607427056,
"grad_norm": 0.22626971062577017,
"learning_rate": 2.62989476786768e-09,
"loss": 0.004,
"step": 747
},
{
"epoch": 1.9840848806366047,
"grad_norm": 0.22548279505487495,
"learning_rate": 1.9322125272297488e-09,
"loss": 0.004,
"step": 748
},
{
"epoch": 1.986737400530504,
"grad_norm": 0.2274724438060562,
"learning_rate": 1.3418406640969272e-09,
"loss": 0.004,
"step": 749
},
{
"epoch": 1.9893899204244032,
"grad_norm": 0.2259285197005954,
"learning_rate": 8.587918539726403e-10,
"loss": 0.004,
"step": 750
},
{
"epoch": 1.9920424403183024,
"grad_norm": 1.7457170404512745,
"learning_rate": 4.830764680946453e-10,
"loss": 0.0044,
"step": 751
},
{
"epoch": 1.9946949602122017,
"grad_norm": 0.22469337804785391,
"learning_rate": 2.1470257321298815e-10,
"loss": 0.004,
"step": 752
},
{
"epoch": 1.9973474801061006,
"grad_norm": 0.2279365937027892,
"learning_rate": 5.3675931413477156e-11,
"loss": 0.004,
"step": 753
},
{
"epoch": 2.0,
"grad_norm": 0.48703773792450233,
"learning_rate": 0.0,
"loss": 0.0042,
"step": 754
},
{
"epoch": 2.0,
"eval_loss": 0.590460479259491,
"eval_runtime": 175.8842,
"eval_samples_per_second": 12.025,
"eval_steps_per_second": 1.507,
"step": 754
},
{
"epoch": 2.0,
"step": 754,
"total_flos": 22077400891392.0,
"train_loss": 0.6806143896792588,
"train_runtime": 3320.7586,
"train_samples_per_second": 3.632,
"train_steps_per_second": 0.227
}
],
"logging_steps": 1,
"max_steps": 754,
"num_input_tokens_seen": 0,
"num_train_epochs": 2,
"save_steps": 500,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 22077400891392.0,
"train_batch_size": 2,
"trial_name": null,
"trial_params": null
}