diff --git "a/checkpoint-1000/trainer_state.json" "b/checkpoint-1000/trainer_state.json" new file mode 100644--- /dev/null +++ "b/checkpoint-1000/trainer_state.json" @@ -0,0 +1,7033 @@ +{ + "best_metric": null, + "best_model_checkpoint": null, + "epoch": 0.01, + "eval_steps": 500, + "global_step": 1000, + "is_hyper_param_search": false, + "is_local_process_zero": true, + "is_world_process_zero": true, + "log_history": [ + { + "epoch": 1e-05, + "grad_norm": 1.631775788945859, + "learning_rate": 3e-06, + "loss": 10.8658, + "step": 1 + }, + { + "epoch": 2e-05, + "grad_norm": 1.6173222705173065, + "learning_rate": 6e-06, + "loss": 10.8645, + "step": 2 + }, + { + "epoch": 3e-05, + "grad_norm": 1.6387507243802044, + "learning_rate": 9e-06, + "loss": 10.8646, + "step": 3 + }, + { + "epoch": 4e-05, + "grad_norm": 1.5975767011448438, + "learning_rate": 1.2e-05, + "loss": 10.8638, + "step": 4 + }, + { + "epoch": 5e-05, + "grad_norm": 1.6454022013221787, + "learning_rate": 1.5e-05, + "loss": 10.8605, + "step": 5 + }, + { + "epoch": 6e-05, + "grad_norm": 1.6407987097684302, + "learning_rate": 1.8e-05, + "loss": 10.8581, + "step": 6 + }, + { + "epoch": 7e-05, + "grad_norm": 1.609586764602888, + "learning_rate": 2.1000000000000002e-05, + "loss": 10.8444, + "step": 7 + }, + { + "epoch": 8e-05, + "grad_norm": 1.4683048428970586, + "learning_rate": 2.4e-05, + "loss": 10.8173, + "step": 8 + }, + { + "epoch": 9e-05, + "grad_norm": 1.3933728304700357, + "learning_rate": 2.7e-05, + "loss": 10.8102, + "step": 9 + }, + { + "epoch": 0.0001, + "grad_norm": 1.3326098319804733, + "learning_rate": 3e-05, + "loss": 10.7958, + "step": 10 + }, + { + "epoch": 0.00011, + "grad_norm": 1.217237966643813, + "learning_rate": 3.2999999999999996e-05, + "loss": 10.779, + "step": 11 + }, + { + "epoch": 0.00012, + "grad_norm": 1.1764691634458027, + "learning_rate": 3.6e-05, + "loss": 10.7677, + "step": 12 + }, + { + "epoch": 0.00013, + "grad_norm": 1.1304717895604097, + "learning_rate": 3.9e-05, + "loss": 10.7486, + "step": 13 + }, + { + "epoch": 0.00014, + "grad_norm": 1.115888694899127, + "learning_rate": 4.2000000000000004e-05, + "loss": 10.7378, + "step": 14 + }, + { + "epoch": 0.00015, + "grad_norm": 1.1065220670447153, + "learning_rate": 4.4999999999999996e-05, + "loss": 10.7288, + "step": 15 + }, + { + "epoch": 0.00016, + "grad_norm": 1.075226769828573, + "learning_rate": 4.8e-05, + "loss": 10.7115, + "step": 16 + }, + { + "epoch": 0.00017, + "grad_norm": 1.0461893681391048, + "learning_rate": 5.1000000000000006e-05, + "loss": 10.6956, + "step": 17 + }, + { + "epoch": 0.00018, + "grad_norm": 1.0230892582928048, + "learning_rate": 5.4e-05, + "loss": 10.6795, + "step": 18 + }, + { + "epoch": 0.00019, + "grad_norm": 0.9869910790408369, + "learning_rate": 5.7e-05, + "loss": 10.6637, + "step": 19 + }, + { + "epoch": 0.0002, + "grad_norm": 0.9729421058544264, + "learning_rate": 6e-05, + "loss": 10.6515, + "step": 20 + }, + { + "epoch": 0.00021, + "grad_norm": 0.943988636941647, + "learning_rate": 6.3e-05, + "loss": 10.6378, + "step": 21 + }, + { + "epoch": 0.00022, + "grad_norm": 0.9278112602221015, + "learning_rate": 6.599999999999999e-05, + "loss": 10.6233, + "step": 22 + }, + { + "epoch": 0.00023, + "grad_norm": 0.920877936489298, + "learning_rate": 6.9e-05, + "loss": 10.6091, + "step": 23 + }, + { + "epoch": 0.00024, + "grad_norm": 0.9178015901962371, + "learning_rate": 7.2e-05, + "loss": 10.5981, + "step": 24 + }, + { + "epoch": 0.00025, + "grad_norm": 0.9193853191136445, + "learning_rate": 7.500000000000001e-05, + "loss": 10.5835, + "step": 25 + }, + { + "epoch": 0.00026, + "grad_norm": 0.9191743831022944, + "learning_rate": 7.8e-05, + "loss": 10.5705, + "step": 26 + }, + { + "epoch": 0.00027, + "grad_norm": 0.9136913401152261, + "learning_rate": 8.1e-05, + "loss": 10.5585, + "step": 27 + }, + { + "epoch": 0.00028, + "grad_norm": 0.9119853724531574, + "learning_rate": 8.400000000000001e-05, + "loss": 10.5456, + "step": 28 + }, + { + "epoch": 0.00029, + "grad_norm": 0.9130829908837624, + "learning_rate": 8.7e-05, + "loss": 10.5312, + "step": 29 + }, + { + "epoch": 0.0003, + "grad_norm": 0.9186726006674357, + "learning_rate": 8.999999999999999e-05, + "loss": 10.5159, + "step": 30 + }, + { + "epoch": 0.00031, + "grad_norm": 0.9155120967133267, + "learning_rate": 9.3e-05, + "loss": 10.5012, + "step": 31 + }, + { + "epoch": 0.00032, + "grad_norm": 0.9096055134642034, + "learning_rate": 9.6e-05, + "loss": 10.4871, + "step": 32 + }, + { + "epoch": 0.00033, + "grad_norm": 0.91013596598753, + "learning_rate": 9.900000000000001e-05, + "loss": 10.4706, + "step": 33 + }, + { + "epoch": 0.00034, + "grad_norm": 0.9103576711685224, + "learning_rate": 0.00010200000000000001, + "loss": 10.4543, + "step": 34 + }, + { + "epoch": 0.00035, + "grad_norm": 0.9164612613814794, + "learning_rate": 0.00010500000000000002, + "loss": 10.4377, + "step": 35 + }, + { + "epoch": 0.00036, + "grad_norm": 0.9129652960092816, + "learning_rate": 0.000108, + "loss": 10.4202, + "step": 36 + }, + { + "epoch": 0.00037, + "grad_norm": 0.9029365420083331, + "learning_rate": 0.000111, + "loss": 10.4036, + "step": 37 + }, + { + "epoch": 0.00038, + "grad_norm": 0.9075151789728013, + "learning_rate": 0.000114, + "loss": 10.3847, + "step": 38 + }, + { + "epoch": 0.00039, + "grad_norm": 0.9102185085970206, + "learning_rate": 0.000117, + "loss": 10.3654, + "step": 39 + }, + { + "epoch": 0.0004, + "grad_norm": 0.9144897951886523, + "learning_rate": 0.00012, + "loss": 10.3432, + "step": 40 + }, + { + "epoch": 0.00041, + "grad_norm": 0.9044769084629607, + "learning_rate": 0.000123, + "loss": 10.3253, + "step": 41 + }, + { + "epoch": 0.00042, + "grad_norm": 0.9101249031053068, + "learning_rate": 0.000126, + "loss": 10.3047, + "step": 42 + }, + { + "epoch": 0.00043, + "grad_norm": 0.9147428510616606, + "learning_rate": 0.000129, + "loss": 10.282, + "step": 43 + }, + { + "epoch": 0.00044, + "grad_norm": 0.9138907921510238, + "learning_rate": 0.00013199999999999998, + "loss": 10.2606, + "step": 44 + }, + { + "epoch": 0.00045, + "grad_norm": 0.9165726299867081, + "learning_rate": 0.000135, + "loss": 10.2377, + "step": 45 + }, + { + "epoch": 0.00046, + "grad_norm": 0.906013196308877, + "learning_rate": 0.000138, + "loss": 10.216, + "step": 46 + }, + { + "epoch": 0.00047, + "grad_norm": 0.9116233570839986, + "learning_rate": 0.000141, + "loss": 10.1904, + "step": 47 + }, + { + "epoch": 0.00048, + "grad_norm": 0.9086131145608887, + "learning_rate": 0.000144, + "loss": 10.1674, + "step": 48 + }, + { + "epoch": 0.00049, + "grad_norm": 0.9094213201037699, + "learning_rate": 0.000147, + "loss": 10.1435, + "step": 49 + }, + { + "epoch": 0.0005, + "grad_norm": 0.9128293203892458, + "learning_rate": 0.00015000000000000001, + "loss": 10.1173, + "step": 50 + }, + { + "epoch": 0.00051, + "grad_norm": 0.9140261923858894, + "learning_rate": 0.000153, + "loss": 10.0916, + "step": 51 + }, + { + "epoch": 0.00052, + "grad_norm": 0.9121253786029146, + "learning_rate": 0.000156, + "loss": 10.0657, + "step": 52 + }, + { + "epoch": 0.00053, + "grad_norm": 0.9034950978450355, + "learning_rate": 0.000159, + "loss": 10.0413, + "step": 53 + }, + { + "epoch": 0.00054, + "grad_norm": 0.9241206201798855, + "learning_rate": 0.000162, + "loss": 10.0092, + "step": 54 + }, + { + "epoch": 0.00055, + "grad_norm": 0.9149154841379399, + "learning_rate": 0.000165, + "loss": 9.9852, + "step": 55 + }, + { + "epoch": 0.00056, + "grad_norm": 0.9070429364957124, + "learning_rate": 0.00016800000000000002, + "loss": 9.9561, + "step": 56 + }, + { + "epoch": 0.00057, + "grad_norm": 0.9122859536360214, + "learning_rate": 0.000171, + "loss": 9.9285, + "step": 57 + }, + { + "epoch": 0.00058, + "grad_norm": 0.9101615769667563, + "learning_rate": 0.000174, + "loss": 9.9038, + "step": 58 + }, + { + "epoch": 0.00059, + "grad_norm": 0.91462118557662, + "learning_rate": 0.000177, + "loss": 9.872, + "step": 59 + }, + { + "epoch": 0.0006, + "grad_norm": 0.902238261354272, + "learning_rate": 0.00017999999999999998, + "loss": 9.846, + "step": 60 + }, + { + "epoch": 0.00061, + "grad_norm": 0.9010263217287604, + "learning_rate": 0.000183, + "loss": 9.8177, + "step": 61 + }, + { + "epoch": 0.00062, + "grad_norm": 0.9073977759483284, + "learning_rate": 0.000186, + "loss": 9.7857, + "step": 62 + }, + { + "epoch": 0.00063, + "grad_norm": 0.9002035349554564, + "learning_rate": 0.000189, + "loss": 9.7585, + "step": 63 + }, + { + "epoch": 0.00064, + "grad_norm": 0.8964472690991813, + "learning_rate": 0.000192, + "loss": 9.7283, + "step": 64 + }, + { + "epoch": 0.00065, + "grad_norm": 0.9038015098943822, + "learning_rate": 0.00019500000000000002, + "loss": 9.696, + "step": 65 + }, + { + "epoch": 0.00066, + "grad_norm": 0.8969444374465311, + "learning_rate": 0.00019800000000000002, + "loss": 9.6719, + "step": 66 + }, + { + "epoch": 0.00067, + "grad_norm": 0.898249772566312, + "learning_rate": 0.000201, + "loss": 9.642, + "step": 67 + }, + { + "epoch": 0.00068, + "grad_norm": 0.9048900549589218, + "learning_rate": 0.00020400000000000003, + "loss": 9.6091, + "step": 68 + }, + { + "epoch": 0.00069, + "grad_norm": 0.8901553719298861, + "learning_rate": 0.00020700000000000002, + "loss": 9.5816, + "step": 69 + }, + { + "epoch": 0.0007, + "grad_norm": 0.903386863411625, + "learning_rate": 0.00021000000000000004, + "loss": 9.5447, + "step": 70 + }, + { + "epoch": 0.00071, + "grad_norm": 0.8933303377898588, + "learning_rate": 0.00021299999999999997, + "loss": 9.518, + "step": 71 + }, + { + "epoch": 0.00072, + "grad_norm": 0.8969213907091691, + "learning_rate": 0.000216, + "loss": 9.4906, + "step": 72 + }, + { + "epoch": 0.00073, + "grad_norm": 0.8960518018191052, + "learning_rate": 0.00021899999999999998, + "loss": 9.4566, + "step": 73 + }, + { + "epoch": 0.00074, + "grad_norm": 0.8964935065687102, + "learning_rate": 0.000222, + "loss": 9.4297, + "step": 74 + }, + { + "epoch": 0.00075, + "grad_norm": 0.8917043311520961, + "learning_rate": 0.000225, + "loss": 9.3947, + "step": 75 + }, + { + "epoch": 0.00076, + "grad_norm": 0.8997723193205578, + "learning_rate": 0.000228, + "loss": 9.3676, + "step": 76 + }, + { + "epoch": 0.00077, + "grad_norm": 0.8937482133829812, + "learning_rate": 0.000231, + "loss": 9.3393, + "step": 77 + }, + { + "epoch": 0.00078, + "grad_norm": 0.886737139439046, + "learning_rate": 0.000234, + "loss": 9.3091, + "step": 78 + }, + { + "epoch": 0.00079, + "grad_norm": 0.8895258541637481, + "learning_rate": 0.00023700000000000001, + "loss": 9.2733, + "step": 79 + }, + { + "epoch": 0.0008, + "grad_norm": 0.8909958989870267, + "learning_rate": 0.00024, + "loss": 9.2384, + "step": 80 + }, + { + "epoch": 0.00081, + "grad_norm": 0.8966003836256963, + "learning_rate": 0.00024300000000000002, + "loss": 9.2045, + "step": 81 + }, + { + "epoch": 0.00082, + "grad_norm": 0.9055894629552318, + "learning_rate": 0.000246, + "loss": 9.1791, + "step": 82 + }, + { + "epoch": 0.00083, + "grad_norm": 0.8961362604582432, + "learning_rate": 0.00024900000000000004, + "loss": 9.1431, + "step": 83 + }, + { + "epoch": 0.00084, + "grad_norm": 0.8980147614185676, + "learning_rate": 0.000252, + "loss": 9.1116, + "step": 84 + }, + { + "epoch": 0.00085, + "grad_norm": 0.8928314575907911, + "learning_rate": 0.000255, + "loss": 9.0884, + "step": 85 + }, + { + "epoch": 0.00086, + "grad_norm": 0.8950046881177521, + "learning_rate": 0.000258, + "loss": 9.0518, + "step": 86 + }, + { + "epoch": 0.00087, + "grad_norm": 0.8877891352831231, + "learning_rate": 0.000261, + "loss": 9.0297, + "step": 87 + }, + { + "epoch": 0.00088, + "grad_norm": 0.8929677780146621, + "learning_rate": 0.00026399999999999997, + "loss": 8.992, + "step": 88 + }, + { + "epoch": 0.00089, + "grad_norm": 0.8871031960638883, + "learning_rate": 0.000267, + "loss": 8.9663, + "step": 89 + }, + { + "epoch": 0.0009, + "grad_norm": 0.8863614322091727, + "learning_rate": 0.00027, + "loss": 8.9387, + "step": 90 + }, + { + "epoch": 0.00091, + "grad_norm": 0.8806256133072948, + "learning_rate": 0.000273, + "loss": 8.9083, + "step": 91 + }, + { + "epoch": 0.00092, + "grad_norm": 0.8826316050497074, + "learning_rate": 0.000276, + "loss": 8.8757, + "step": 92 + }, + { + "epoch": 0.00093, + "grad_norm": 0.8833603577684496, + "learning_rate": 0.000279, + "loss": 8.8461, + "step": 93 + }, + { + "epoch": 0.00094, + "grad_norm": 0.8819538724809766, + "learning_rate": 0.000282, + "loss": 8.82, + "step": 94 + }, + { + "epoch": 0.00095, + "grad_norm": 0.8776473753829749, + "learning_rate": 0.000285, + "loss": 8.7909, + "step": 95 + }, + { + "epoch": 0.00096, + "grad_norm": 0.8854898687433331, + "learning_rate": 0.000288, + "loss": 8.7608, + "step": 96 + }, + { + "epoch": 0.00097, + "grad_norm": 0.8763526561707659, + "learning_rate": 0.000291, + "loss": 8.7376, + "step": 97 + }, + { + "epoch": 0.00098, + "grad_norm": 0.8773720509513535, + "learning_rate": 0.000294, + "loss": 8.7001, + "step": 98 + }, + { + "epoch": 0.00099, + "grad_norm": 0.8782537783818637, + "learning_rate": 0.000297, + "loss": 8.6785, + "step": 99 + }, + { + "epoch": 0.001, + "grad_norm": 0.876084031734807, + "learning_rate": 0.00030000000000000003, + "loss": 8.6509, + "step": 100 + }, + { + "epoch": 0.00101, + "grad_norm": 0.8775511931302766, + "learning_rate": 0.00030300000000000005, + "loss": 8.6162, + "step": 101 + }, + { + "epoch": 0.00102, + "grad_norm": 0.8666072056658197, + "learning_rate": 0.000306, + "loss": 8.5962, + "step": 102 + }, + { + "epoch": 0.00103, + "grad_norm": 0.8733315070806934, + "learning_rate": 0.000309, + "loss": 8.5716, + "step": 103 + }, + { + "epoch": 0.00104, + "grad_norm": 0.8664419648151436, + "learning_rate": 0.000312, + "loss": 8.5503, + "step": 104 + }, + { + "epoch": 0.00105, + "grad_norm": 0.8699404252946213, + "learning_rate": 0.000315, + "loss": 8.5232, + "step": 105 + }, + { + "epoch": 0.00106, + "grad_norm": 0.8630101619311507, + "learning_rate": 0.000318, + "loss": 8.4946, + "step": 106 + }, + { + "epoch": 0.00107, + "grad_norm": 0.8533006978361278, + "learning_rate": 0.000321, + "loss": 8.4694, + "step": 107 + }, + { + "epoch": 0.00108, + "grad_norm": 0.8571194919918376, + "learning_rate": 0.000324, + "loss": 8.4408, + "step": 108 + }, + { + "epoch": 0.00109, + "grad_norm": 0.8496626885878062, + "learning_rate": 0.000327, + "loss": 8.4213, + "step": 109 + }, + { + "epoch": 0.0011, + "grad_norm": 0.8617458268479945, + "learning_rate": 0.00033, + "loss": 8.3989, + "step": 110 + }, + { + "epoch": 0.00111, + "grad_norm": 0.874730580725934, + "learning_rate": 0.000333, + "loss": 8.3705, + "step": 111 + }, + { + "epoch": 0.00112, + "grad_norm": 0.9211403811176949, + "learning_rate": 0.00033600000000000004, + "loss": 8.351, + "step": 112 + }, + { + "epoch": 0.00113, + "grad_norm": 0.9451163730301329, + "learning_rate": 0.000339, + "loss": 8.3126, + "step": 113 + }, + { + "epoch": 0.00114, + "grad_norm": 0.8518853453666535, + "learning_rate": 0.000342, + "loss": 8.3025, + "step": 114 + }, + { + "epoch": 0.00115, + "grad_norm": 0.8499246309464553, + "learning_rate": 0.00034500000000000004, + "loss": 8.2758, + "step": 115 + }, + { + "epoch": 0.00116, + "grad_norm": 0.8769128820472754, + "learning_rate": 0.000348, + "loss": 8.2536, + "step": 116 + }, + { + "epoch": 0.00117, + "grad_norm": 0.829578266212784, + "learning_rate": 0.000351, + "loss": 8.2211, + "step": 117 + }, + { + "epoch": 0.00118, + "grad_norm": 0.8587574762862499, + "learning_rate": 0.000354, + "loss": 8.2068, + "step": 118 + }, + { + "epoch": 0.00119, + "grad_norm": 0.8383808879241313, + "learning_rate": 0.000357, + "loss": 8.1942, + "step": 119 + }, + { + "epoch": 0.0012, + "grad_norm": 0.8155263912622424, + "learning_rate": 0.00035999999999999997, + "loss": 8.1675, + "step": 120 + }, + { + "epoch": 0.00121, + "grad_norm": 0.8344307084821188, + "learning_rate": 0.000363, + "loss": 8.1409, + "step": 121 + }, + { + "epoch": 0.00122, + "grad_norm": 0.8097993043330719, + "learning_rate": 0.000366, + "loss": 8.1244, + "step": 122 + }, + { + "epoch": 0.00123, + "grad_norm": 0.8029969793277704, + "learning_rate": 0.000369, + "loss": 8.102, + "step": 123 + }, + { + "epoch": 0.00124, + "grad_norm": 0.7829455528112805, + "learning_rate": 0.000372, + "loss": 8.0811, + "step": 124 + }, + { + "epoch": 0.00125, + "grad_norm": 0.8110394816024603, + "learning_rate": 0.000375, + "loss": 8.0581, + "step": 125 + }, + { + "epoch": 0.00126, + "grad_norm": 0.8039928825408066, + "learning_rate": 0.000378, + "loss": 8.0463, + "step": 126 + }, + { + "epoch": 0.00127, + "grad_norm": 0.8369020039958236, + "learning_rate": 0.000381, + "loss": 8.0243, + "step": 127 + }, + { + "epoch": 0.00128, + "grad_norm": 0.9124681746054819, + "learning_rate": 0.000384, + "loss": 8.0062, + "step": 128 + }, + { + "epoch": 0.00129, + "grad_norm": 0.9497790585671452, + "learning_rate": 0.00038700000000000003, + "loss": 7.997, + "step": 129 + }, + { + "epoch": 0.0013, + "grad_norm": 0.895034767303024, + "learning_rate": 0.00039000000000000005, + "loss": 7.9709, + "step": 130 + }, + { + "epoch": 0.00131, + "grad_norm": 0.7344919418773682, + "learning_rate": 0.000393, + "loss": 7.9348, + "step": 131 + }, + { + "epoch": 0.00132, + "grad_norm": 0.8078203789615136, + "learning_rate": 0.00039600000000000003, + "loss": 7.9285, + "step": 132 + }, + { + "epoch": 0.00133, + "grad_norm": 0.8012273324997825, + "learning_rate": 0.00039900000000000005, + "loss": 7.907, + "step": 133 + }, + { + "epoch": 0.00134, + "grad_norm": 0.7227755159346104, + "learning_rate": 0.000402, + "loss": 7.8939, + "step": 134 + }, + { + "epoch": 0.00135, + "grad_norm": 0.7235157354092677, + "learning_rate": 0.00040500000000000003, + "loss": 7.8661, + "step": 135 + }, + { + "epoch": 0.00136, + "grad_norm": 0.7822895950244824, + "learning_rate": 0.00040800000000000005, + "loss": 7.852, + "step": 136 + }, + { + "epoch": 0.00137, + "grad_norm": 0.7608062709985561, + "learning_rate": 0.000411, + "loss": 7.8315, + "step": 137 + }, + { + "epoch": 0.00138, + "grad_norm": 0.7380360973204948, + "learning_rate": 0.00041400000000000003, + "loss": 7.8088, + "step": 138 + }, + { + "epoch": 0.00139, + "grad_norm": 0.684851228306475, + "learning_rate": 0.00041700000000000005, + "loss": 7.7952, + "step": 139 + }, + { + "epoch": 0.0014, + "grad_norm": 0.693462889960616, + "learning_rate": 0.00042000000000000007, + "loss": 7.7794, + "step": 140 + }, + { + "epoch": 0.00141, + "grad_norm": 0.7360428489985282, + "learning_rate": 0.000423, + "loss": 7.7672, + "step": 141 + }, + { + "epoch": 0.00142, + "grad_norm": 0.8088792714181905, + "learning_rate": 0.00042599999999999995, + "loss": 7.7572, + "step": 142 + }, + { + "epoch": 0.00143, + "grad_norm": 0.8184868537412088, + "learning_rate": 0.00042899999999999997, + "loss": 7.7297, + "step": 143 + }, + { + "epoch": 0.00144, + "grad_norm": 0.7328054873489724, + "learning_rate": 0.000432, + "loss": 7.7108, + "step": 144 + }, + { + "epoch": 0.00145, + "grad_norm": 0.742383079085953, + "learning_rate": 0.000435, + "loss": 7.7067, + "step": 145 + }, + { + "epoch": 0.00146, + "grad_norm": 0.8017743593965694, + "learning_rate": 0.00043799999999999997, + "loss": 7.6737, + "step": 146 + }, + { + "epoch": 0.00147, + "grad_norm": 0.7202665590443219, + "learning_rate": 0.000441, + "loss": 7.6628, + "step": 147 + }, + { + "epoch": 0.00148, + "grad_norm": 0.5746342281678257, + "learning_rate": 0.000444, + "loss": 7.6477, + "step": 148 + }, + { + "epoch": 0.00149, + "grad_norm": 0.5929410187247641, + "learning_rate": 0.00044699999999999997, + "loss": 7.6424, + "step": 149 + }, + { + "epoch": 0.0015, + "grad_norm": 0.7151318764270516, + "learning_rate": 0.00045, + "loss": 7.6133, + "step": 150 + }, + { + "epoch": 0.00151, + "grad_norm": 0.6796514647913489, + "learning_rate": 0.000453, + "loss": 7.5947, + "step": 151 + }, + { + "epoch": 0.00152, + "grad_norm": 0.543758313944587, + "learning_rate": 0.000456, + "loss": 7.5852, + "step": 152 + }, + { + "epoch": 0.00153, + "grad_norm": 0.7299692440583161, + "learning_rate": 0.000459, + "loss": 7.5768, + "step": 153 + }, + { + "epoch": 0.00154, + "grad_norm": 0.8631376359970574, + "learning_rate": 0.000462, + "loss": 7.5644, + "step": 154 + }, + { + "epoch": 0.00155, + "grad_norm": 0.9628027146132815, + "learning_rate": 0.000465, + "loss": 7.546, + "step": 155 + }, + { + "epoch": 0.00156, + "grad_norm": 1.6485250632015214, + "learning_rate": 0.000468, + "loss": 7.5336, + "step": 156 + }, + { + "epoch": 0.00157, + "grad_norm": 0.9248857473926935, + "learning_rate": 0.000471, + "loss": 7.5216, + "step": 157 + }, + { + "epoch": 0.00158, + "grad_norm": 0.7465186792591986, + "learning_rate": 0.00047400000000000003, + "loss": 7.4981, + "step": 158 + }, + { + "epoch": 0.00159, + "grad_norm": 0.5902384572619932, + "learning_rate": 0.000477, + "loss": 7.4827, + "step": 159 + }, + { + "epoch": 0.0016, + "grad_norm": 0.799266704152554, + "learning_rate": 0.00048, + "loss": 7.4675, + "step": 160 + }, + { + "epoch": 0.00161, + "grad_norm": 0.7827143911710401, + "learning_rate": 0.00048300000000000003, + "loss": 7.4466, + "step": 161 + }, + { + "epoch": 0.00162, + "grad_norm": 0.9218921339316959, + "learning_rate": 0.00048600000000000005, + "loss": 7.4513, + "step": 162 + }, + { + "epoch": 0.00163, + "grad_norm": 0.7287660540574216, + "learning_rate": 0.0004890000000000001, + "loss": 7.4357, + "step": 163 + }, + { + "epoch": 0.00164, + "grad_norm": 0.5250441215321361, + "learning_rate": 0.000492, + "loss": 7.4302, + "step": 164 + }, + { + "epoch": 0.00165, + "grad_norm": 0.741808200483857, + "learning_rate": 0.000495, + "loss": 7.4077, + "step": 165 + }, + { + "epoch": 0.00166, + "grad_norm": 0.6759515654254841, + "learning_rate": 0.0004980000000000001, + "loss": 7.3847, + "step": 166 + }, + { + "epoch": 0.00167, + "grad_norm": 0.5877266102377413, + "learning_rate": 0.000501, + "loss": 7.373, + "step": 167 + }, + { + "epoch": 0.00168, + "grad_norm": 0.571053515725499, + "learning_rate": 0.000504, + "loss": 7.3498, + "step": 168 + }, + { + "epoch": 0.00169, + "grad_norm": 0.6044099115482432, + "learning_rate": 0.0005070000000000001, + "loss": 7.3473, + "step": 169 + }, + { + "epoch": 0.0017, + "grad_norm": 0.46081682030640647, + "learning_rate": 0.00051, + "loss": 7.3262, + "step": 170 + }, + { + "epoch": 0.00171, + "grad_norm": 0.6775750897944629, + "learning_rate": 0.000513, + "loss": 7.3116, + "step": 171 + }, + { + "epoch": 0.00172, + "grad_norm": 0.57210862548929, + "learning_rate": 0.000516, + "loss": 7.3117, + "step": 172 + }, + { + "epoch": 0.00173, + "grad_norm": 0.5300582190464731, + "learning_rate": 0.0005189999999999999, + "loss": 7.2934, + "step": 173 + }, + { + "epoch": 0.00174, + "grad_norm": 0.7575900839335431, + "learning_rate": 0.000522, + "loss": 7.3114, + "step": 174 + }, + { + "epoch": 0.00175, + "grad_norm": 0.7613961222663432, + "learning_rate": 0.000525, + "loss": 7.2747, + "step": 175 + }, + { + "epoch": 0.00176, + "grad_norm": 0.6765759081090318, + "learning_rate": 0.0005279999999999999, + "loss": 7.2505, + "step": 176 + }, + { + "epoch": 0.00177, + "grad_norm": 0.8933831251648804, + "learning_rate": 0.000531, + "loss": 7.2576, + "step": 177 + }, + { + "epoch": 0.00178, + "grad_norm": 0.7076429855739662, + "learning_rate": 0.000534, + "loss": 7.2471, + "step": 178 + }, + { + "epoch": 0.00179, + "grad_norm": 0.49163795680938555, + "learning_rate": 0.000537, + "loss": 7.2319, + "step": 179 + }, + { + "epoch": 0.0018, + "grad_norm": 0.6284573091622804, + "learning_rate": 0.00054, + "loss": 7.2064, + "step": 180 + }, + { + "epoch": 0.00181, + "grad_norm": 0.5715580305485367, + "learning_rate": 0.000543, + "loss": 7.1941, + "step": 181 + }, + { + "epoch": 0.00182, + "grad_norm": 0.4282299340658738, + "learning_rate": 0.000546, + "loss": 7.1864, + "step": 182 + }, + { + "epoch": 0.00183, + "grad_norm": 0.5948913741099119, + "learning_rate": 0.000549, + "loss": 7.1781, + "step": 183 + }, + { + "epoch": 0.00184, + "grad_norm": 0.4755161712268706, + "learning_rate": 0.000552, + "loss": 7.1637, + "step": 184 + }, + { + "epoch": 0.00185, + "grad_norm": 0.46412280386502286, + "learning_rate": 0.000555, + "loss": 7.14, + "step": 185 + }, + { + "epoch": 0.00186, + "grad_norm": 0.526893652843914, + "learning_rate": 0.000558, + "loss": 7.1495, + "step": 186 + }, + { + "epoch": 0.00187, + "grad_norm": 0.46987960377579885, + "learning_rate": 0.000561, + "loss": 7.1236, + "step": 187 + }, + { + "epoch": 0.00188, + "grad_norm": 0.41993376006980737, + "learning_rate": 0.000564, + "loss": 7.1004, + "step": 188 + }, + { + "epoch": 0.00189, + "grad_norm": 0.5433001024887105, + "learning_rate": 0.000567, + "loss": 7.0986, + "step": 189 + }, + { + "epoch": 0.0019, + "grad_norm": 0.42284758794841465, + "learning_rate": 0.00057, + "loss": 7.0796, + "step": 190 + }, + { + "epoch": 0.00191, + "grad_norm": 0.47158596225286625, + "learning_rate": 0.000573, + "loss": 7.0701, + "step": 191 + }, + { + "epoch": 0.00192, + "grad_norm": 0.4590137866807191, + "learning_rate": 0.000576, + "loss": 7.0611, + "step": 192 + }, + { + "epoch": 0.00193, + "grad_norm": 0.43849327433792495, + "learning_rate": 0.000579, + "loss": 7.0612, + "step": 193 + }, + { + "epoch": 0.00194, + "grad_norm": 0.4326532382468588, + "learning_rate": 0.000582, + "loss": 7.0527, + "step": 194 + }, + { + "epoch": 0.00195, + "grad_norm": 0.508356146068285, + "learning_rate": 0.000585, + "loss": 7.0348, + "step": 195 + }, + { + "epoch": 0.00196, + "grad_norm": 0.534972126993015, + "learning_rate": 0.000588, + "loss": 7.0227, + "step": 196 + }, + { + "epoch": 0.00197, + "grad_norm": 0.693386583628206, + "learning_rate": 0.000591, + "loss": 7.0198, + "step": 197 + }, + { + "epoch": 0.00198, + "grad_norm": 1.4796610947894584, + "learning_rate": 0.000594, + "loss": 7.0311, + "step": 198 + }, + { + "epoch": 0.00199, + "grad_norm": 0.8596271717690976, + "learning_rate": 0.0005970000000000001, + "loss": 7.0019, + "step": 199 + }, + { + "epoch": 0.002, + "grad_norm": 0.5642900417004818, + "learning_rate": 0.0006000000000000001, + "loss": 6.991, + "step": 200 + }, + { + "epoch": 0.00201, + "grad_norm": 0.8692961874823039, + "learning_rate": 0.000603, + "loss": 6.9753, + "step": 201 + }, + { + "epoch": 0.00202, + "grad_norm": 0.6679575424248658, + "learning_rate": 0.0006060000000000001, + "loss": 6.9579, + "step": 202 + }, + { + "epoch": 0.00203, + "grad_norm": 0.8457647455673973, + "learning_rate": 0.0006090000000000001, + "loss": 6.9683, + "step": 203 + }, + { + "epoch": 0.00204, + "grad_norm": 0.9450482633193271, + "learning_rate": 0.000612, + "loss": 6.9556, + "step": 204 + }, + { + "epoch": 0.00205, + "grad_norm": 1.3090292735766231, + "learning_rate": 0.000615, + "loss": 6.9523, + "step": 205 + }, + { + "epoch": 0.00206, + "grad_norm": 0.6638095483572409, + "learning_rate": 0.000618, + "loss": 6.9303, + "step": 206 + }, + { + "epoch": 0.00207, + "grad_norm": 0.4840297186697793, + "learning_rate": 0.000621, + "loss": 6.9174, + "step": 207 + }, + { + "epoch": 0.00208, + "grad_norm": 0.7834386214905589, + "learning_rate": 0.000624, + "loss": 6.9192, + "step": 208 + }, + { + "epoch": 0.00209, + "grad_norm": 0.69675327719343, + "learning_rate": 0.000627, + "loss": 6.9018, + "step": 209 + }, + { + "epoch": 0.0021, + "grad_norm": 0.5517351762495105, + "learning_rate": 0.00063, + "loss": 6.8834, + "step": 210 + }, + { + "epoch": 0.00211, + "grad_norm": 0.6866030941755482, + "learning_rate": 0.000633, + "loss": 6.8831, + "step": 211 + }, + { + "epoch": 0.00212, + "grad_norm": 0.4815850149383259, + "learning_rate": 0.000636, + "loss": 6.8711, + "step": 212 + }, + { + "epoch": 0.00213, + "grad_norm": 0.5445114180165818, + "learning_rate": 0.000639, + "loss": 6.8513, + "step": 213 + }, + { + "epoch": 0.00214, + "grad_norm": 0.5893761744890885, + "learning_rate": 0.000642, + "loss": 6.8511, + "step": 214 + }, + { + "epoch": 0.00215, + "grad_norm": 0.3989068076407255, + "learning_rate": 0.000645, + "loss": 6.8428, + "step": 215 + }, + { + "epoch": 0.00216, + "grad_norm": 0.4674719209271809, + "learning_rate": 0.000648, + "loss": 6.8374, + "step": 216 + }, + { + "epoch": 0.00217, + "grad_norm": 0.556887374004828, + "learning_rate": 0.000651, + "loss": 6.8123, + "step": 217 + }, + { + "epoch": 0.00218, + "grad_norm": 0.5348764881519483, + "learning_rate": 0.000654, + "loss": 6.815, + "step": 218 + }, + { + "epoch": 0.00219, + "grad_norm": 0.6261358280484484, + "learning_rate": 0.000657, + "loss": 6.7906, + "step": 219 + }, + { + "epoch": 0.0022, + "grad_norm": 0.5490386646627615, + "learning_rate": 0.00066, + "loss": 6.8058, + "step": 220 + }, + { + "epoch": 0.00221, + "grad_norm": 0.5297655781082383, + "learning_rate": 0.0006630000000000001, + "loss": 6.7838, + "step": 221 + }, + { + "epoch": 0.00222, + "grad_norm": 0.53116953133404, + "learning_rate": 0.000666, + "loss": 6.7711, + "step": 222 + }, + { + "epoch": 0.00223, + "grad_norm": 0.46985855580572156, + "learning_rate": 0.000669, + "loss": 6.7662, + "step": 223 + }, + { + "epoch": 0.00224, + "grad_norm": 0.48892819667849163, + "learning_rate": 0.0006720000000000001, + "loss": 6.7615, + "step": 224 + }, + { + "epoch": 0.00225, + "grad_norm": 0.5426443115029689, + "learning_rate": 0.000675, + "loss": 6.7505, + "step": 225 + }, + { + "epoch": 0.00226, + "grad_norm": 0.47341745143430014, + "learning_rate": 0.000678, + "loss": 6.7457, + "step": 226 + }, + { + "epoch": 0.00227, + "grad_norm": 0.47753897999990824, + "learning_rate": 0.0006810000000000001, + "loss": 6.7186, + "step": 227 + }, + { + "epoch": 0.00228, + "grad_norm": 0.43835516232945165, + "learning_rate": 0.000684, + "loss": 6.721, + "step": 228 + }, + { + "epoch": 0.00229, + "grad_norm": 0.3666587821660354, + "learning_rate": 0.000687, + "loss": 6.7162, + "step": 229 + }, + { + "epoch": 0.0023, + "grad_norm": 0.5954344273025705, + "learning_rate": 0.0006900000000000001, + "loss": 6.6971, + "step": 230 + }, + { + "epoch": 0.00231, + "grad_norm": 0.8324250780860898, + "learning_rate": 0.000693, + "loss": 6.6912, + "step": 231 + }, + { + "epoch": 0.00232, + "grad_norm": 1.1082992895496584, + "learning_rate": 0.000696, + "loss": 6.7117, + "step": 232 + }, + { + "epoch": 0.00233, + "grad_norm": 0.8989391942429391, + "learning_rate": 0.0006990000000000001, + "loss": 6.6931, + "step": 233 + }, + { + "epoch": 0.00234, + "grad_norm": 0.8501087453831264, + "learning_rate": 0.000702, + "loss": 6.6816, + "step": 234 + }, + { + "epoch": 0.00235, + "grad_norm": 0.9709457331919181, + "learning_rate": 0.000705, + "loss": 6.6715, + "step": 235 + }, + { + "epoch": 0.00236, + "grad_norm": 0.8996312948341649, + "learning_rate": 0.000708, + "loss": 6.6542, + "step": 236 + }, + { + "epoch": 0.00237, + "grad_norm": 0.7941572817187773, + "learning_rate": 0.0007109999999999999, + "loss": 6.6634, + "step": 237 + }, + { + "epoch": 0.00238, + "grad_norm": 0.649710293154646, + "learning_rate": 0.000714, + "loss": 6.6483, + "step": 238 + }, + { + "epoch": 0.00239, + "grad_norm": 0.7175873388046764, + "learning_rate": 0.000717, + "loss": 6.6317, + "step": 239 + }, + { + "epoch": 0.0024, + "grad_norm": 1.0726439429102004, + "learning_rate": 0.0007199999999999999, + "loss": 6.6535, + "step": 240 + }, + { + "epoch": 0.00241, + "grad_norm": 1.1551390926973517, + "learning_rate": 0.000723, + "loss": 6.6508, + "step": 241 + }, + { + "epoch": 0.00242, + "grad_norm": 0.8245355038796127, + "learning_rate": 0.000726, + "loss": 6.615, + "step": 242 + }, + { + "epoch": 0.00243, + "grad_norm": 0.7119399485811939, + "learning_rate": 0.000729, + "loss": 6.6026, + "step": 243 + }, + { + "epoch": 0.00244, + "grad_norm": 0.6396700306701443, + "learning_rate": 0.000732, + "loss": 6.6042, + "step": 244 + }, + { + "epoch": 0.00245, + "grad_norm": 0.668492143187707, + "learning_rate": 0.000735, + "loss": 6.5953, + "step": 245 + }, + { + "epoch": 0.00246, + "grad_norm": 0.6209038847600604, + "learning_rate": 0.000738, + "loss": 6.5759, + "step": 246 + }, + { + "epoch": 0.00247, + "grad_norm": 0.49470830317055475, + "learning_rate": 0.000741, + "loss": 6.5677, + "step": 247 + }, + { + "epoch": 0.00248, + "grad_norm": 0.5745764116827149, + "learning_rate": 0.000744, + "loss": 6.5775, + "step": 248 + }, + { + "epoch": 0.00249, + "grad_norm": 0.5319509172858093, + "learning_rate": 0.000747, + "loss": 6.5558, + "step": 249 + }, + { + "epoch": 0.0025, + "grad_norm": 0.482084360804442, + "learning_rate": 0.00075, + "loss": 6.556, + "step": 250 + }, + { + "epoch": 0.00251, + "grad_norm": 0.46516739275647623, + "learning_rate": 0.000753, + "loss": 6.521, + "step": 251 + }, + { + "epoch": 0.00252, + "grad_norm": 0.4629119115625355, + "learning_rate": 0.000756, + "loss": 6.531, + "step": 252 + }, + { + "epoch": 0.00253, + "grad_norm": 0.37719629506333596, + "learning_rate": 0.000759, + "loss": 6.519, + "step": 253 + }, + { + "epoch": 0.00254, + "grad_norm": 0.44323602664762185, + "learning_rate": 0.000762, + "loss": 6.5149, + "step": 254 + }, + { + "epoch": 0.00255, + "grad_norm": 0.38153047495099895, + "learning_rate": 0.0007650000000000001, + "loss": 6.5129, + "step": 255 + }, + { + "epoch": 0.00256, + "grad_norm": 0.5270908471121704, + "learning_rate": 0.000768, + "loss": 6.4934, + "step": 256 + }, + { + "epoch": 0.00257, + "grad_norm": 0.6201344591076082, + "learning_rate": 0.000771, + "loss": 6.4997, + "step": 257 + }, + { + "epoch": 0.00258, + "grad_norm": 0.6391276132887356, + "learning_rate": 0.0007740000000000001, + "loss": 6.477, + "step": 258 + }, + { + "epoch": 0.00259, + "grad_norm": 0.6374758421191778, + "learning_rate": 0.000777, + "loss": 6.4787, + "step": 259 + }, + { + "epoch": 0.0026, + "grad_norm": 0.5597091224464362, + "learning_rate": 0.0007800000000000001, + "loss": 6.4607, + "step": 260 + }, + { + "epoch": 0.00261, + "grad_norm": 0.587169694241395, + "learning_rate": 0.0007830000000000001, + "loss": 6.4722, + "step": 261 + }, + { + "epoch": 0.00262, + "grad_norm": 0.6112267949829847, + "learning_rate": 0.000786, + "loss": 6.4511, + "step": 262 + }, + { + "epoch": 0.00263, + "grad_norm": 0.5933922824160996, + "learning_rate": 0.0007890000000000001, + "loss": 6.4574, + "step": 263 + }, + { + "epoch": 0.00264, + "grad_norm": 0.6560299493456899, + "learning_rate": 0.0007920000000000001, + "loss": 6.4408, + "step": 264 + }, + { + "epoch": 0.00265, + "grad_norm": 0.9913628812090025, + "learning_rate": 0.000795, + "loss": 6.4422, + "step": 265 + }, + { + "epoch": 0.00266, + "grad_norm": 1.340981155098937, + "learning_rate": 0.0007980000000000001, + "loss": 6.4533, + "step": 266 + }, + { + "epoch": 0.00267, + "grad_norm": 0.8266116512325479, + "learning_rate": 0.0008010000000000001, + "loss": 6.4319, + "step": 267 + }, + { + "epoch": 0.00268, + "grad_norm": 0.9896228951890642, + "learning_rate": 0.000804, + "loss": 6.4378, + "step": 268 + }, + { + "epoch": 0.00269, + "grad_norm": 1.2352739008881923, + "learning_rate": 0.0008070000000000001, + "loss": 6.4279, + "step": 269 + }, + { + "epoch": 0.0027, + "grad_norm": 1.1652427209458782, + "learning_rate": 0.0008100000000000001, + "loss": 6.4326, + "step": 270 + }, + { + "epoch": 0.00271, + "grad_norm": 1.0407181933539849, + "learning_rate": 0.000813, + "loss": 6.4319, + "step": 271 + }, + { + "epoch": 0.00272, + "grad_norm": 0.8880696455452757, + "learning_rate": 0.0008160000000000001, + "loss": 6.4138, + "step": 272 + }, + { + "epoch": 0.00273, + "grad_norm": 0.8477724135782442, + "learning_rate": 0.0008190000000000001, + "loss": 6.404, + "step": 273 + }, + { + "epoch": 0.00274, + "grad_norm": 0.7818547901656048, + "learning_rate": 0.000822, + "loss": 6.383, + "step": 274 + }, + { + "epoch": 0.00275, + "grad_norm": 0.6915610404761925, + "learning_rate": 0.0008250000000000001, + "loss": 6.3888, + "step": 275 + }, + { + "epoch": 0.00276, + "grad_norm": 0.6369714396426732, + "learning_rate": 0.0008280000000000001, + "loss": 6.3775, + "step": 276 + }, + { + "epoch": 0.00277, + "grad_norm": 0.6792843462530734, + "learning_rate": 0.0008310000000000001, + "loss": 6.3726, + "step": 277 + }, + { + "epoch": 0.00278, + "grad_norm": 0.6716653191335978, + "learning_rate": 0.0008340000000000001, + "loss": 6.3561, + "step": 278 + }, + { + "epoch": 0.00279, + "grad_norm": 0.6104270551210891, + "learning_rate": 0.0008370000000000001, + "loss": 6.3562, + "step": 279 + }, + { + "epoch": 0.0028, + "grad_norm": 0.5327216367370322, + "learning_rate": 0.0008400000000000001, + "loss": 6.3379, + "step": 280 + }, + { + "epoch": 0.00281, + "grad_norm": 0.4495801132850456, + "learning_rate": 0.0008430000000000001, + "loss": 6.3253, + "step": 281 + }, + { + "epoch": 0.00282, + "grad_norm": 0.4185635012011635, + "learning_rate": 0.000846, + "loss": 6.3254, + "step": 282 + }, + { + "epoch": 0.00283, + "grad_norm": 0.41306707794715253, + "learning_rate": 0.0008489999999999999, + "loss": 6.3154, + "step": 283 + }, + { + "epoch": 0.00284, + "grad_norm": 0.447351018324713, + "learning_rate": 0.0008519999999999999, + "loss": 6.3075, + "step": 284 + }, + { + "epoch": 0.00285, + "grad_norm": 0.4656314656211844, + "learning_rate": 0.000855, + "loss": 6.3102, + "step": 285 + }, + { + "epoch": 0.00286, + "grad_norm": 0.5287748664566101, + "learning_rate": 0.0008579999999999999, + "loss": 6.2881, + "step": 286 + }, + { + "epoch": 0.00287, + "grad_norm": 0.60454227039484, + "learning_rate": 0.000861, + "loss": 6.2937, + "step": 287 + }, + { + "epoch": 0.00288, + "grad_norm": 0.6409086244349441, + "learning_rate": 0.000864, + "loss": 6.2743, + "step": 288 + }, + { + "epoch": 0.00289, + "grad_norm": 0.7540915605033448, + "learning_rate": 0.0008669999999999999, + "loss": 6.2909, + "step": 289 + }, + { + "epoch": 0.0029, + "grad_norm": 0.9532532853232767, + "learning_rate": 0.00087, + "loss": 6.2777, + "step": 290 + }, + { + "epoch": 0.00291, + "grad_norm": 0.9297633606905631, + "learning_rate": 0.000873, + "loss": 6.278, + "step": 291 + }, + { + "epoch": 0.00292, + "grad_norm": 0.6544361038243887, + "learning_rate": 0.0008759999999999999, + "loss": 6.2635, + "step": 292 + }, + { + "epoch": 0.00293, + "grad_norm": 0.9586546136156446, + "learning_rate": 0.000879, + "loss": 6.2582, + "step": 293 + }, + { + "epoch": 0.00294, + "grad_norm": 0.8674924960686783, + "learning_rate": 0.000882, + "loss": 6.2684, + "step": 294 + }, + { + "epoch": 0.00295, + "grad_norm": 0.8596325280201164, + "learning_rate": 0.0008849999999999999, + "loss": 6.2363, + "step": 295 + }, + { + "epoch": 0.00296, + "grad_norm": 0.9927641151458286, + "learning_rate": 0.000888, + "loss": 6.2541, + "step": 296 + }, + { + "epoch": 0.00297, + "grad_norm": 1.342485766358639, + "learning_rate": 0.000891, + "loss": 6.2408, + "step": 297 + }, + { + "epoch": 0.00298, + "grad_norm": 1.1878316085061287, + "learning_rate": 0.0008939999999999999, + "loss": 6.2537, + "step": 298 + }, + { + "epoch": 0.00299, + "grad_norm": 0.9496422749623654, + "learning_rate": 0.000897, + "loss": 6.2242, + "step": 299 + }, + { + "epoch": 0.003, + "grad_norm": 1.122193003605518, + "learning_rate": 0.0009, + "loss": 6.2361, + "step": 300 + }, + { + "epoch": 0.00301, + "grad_norm": 1.129970594986655, + "learning_rate": 0.0009029999999999999, + "loss": 6.2273, + "step": 301 + }, + { + "epoch": 0.00302, + "grad_norm": 1.0740447263196922, + "learning_rate": 0.000906, + "loss": 6.2071, + "step": 302 + }, + { + "epoch": 0.00303, + "grad_norm": 1.1900410452977912, + "learning_rate": 0.000909, + "loss": 6.2313, + "step": 303 + }, + { + "epoch": 0.00304, + "grad_norm": 0.804691464481299, + "learning_rate": 0.000912, + "loss": 6.2111, + "step": 304 + }, + { + "epoch": 0.00305, + "grad_norm": 0.7167209084416579, + "learning_rate": 0.000915, + "loss": 6.2106, + "step": 305 + }, + { + "epoch": 0.00306, + "grad_norm": 0.5686498260282739, + "learning_rate": 0.000918, + "loss": 6.1897, + "step": 306 + }, + { + "epoch": 0.00307, + "grad_norm": 0.5740516870647188, + "learning_rate": 0.000921, + "loss": 6.1847, + "step": 307 + }, + { + "epoch": 0.00308, + "grad_norm": 0.5214022662741855, + "learning_rate": 0.000924, + "loss": 6.1668, + "step": 308 + }, + { + "epoch": 0.00309, + "grad_norm": 0.489157506496739, + "learning_rate": 0.000927, + "loss": 6.1798, + "step": 309 + }, + { + "epoch": 0.0031, + "grad_norm": 0.4872945232166538, + "learning_rate": 0.00093, + "loss": 6.1622, + "step": 310 + }, + { + "epoch": 0.00311, + "grad_norm": 0.4909949625440354, + "learning_rate": 0.000933, + "loss": 6.1533, + "step": 311 + }, + { + "epoch": 0.00312, + "grad_norm": 0.4186129744309998, + "learning_rate": 0.000936, + "loss": 6.1314, + "step": 312 + }, + { + "epoch": 0.00313, + "grad_norm": 0.36050967020968366, + "learning_rate": 0.0009390000000000001, + "loss": 6.1442, + "step": 313 + }, + { + "epoch": 0.00314, + "grad_norm": 0.3818285660239077, + "learning_rate": 0.000942, + "loss": 6.1495, + "step": 314 + }, + { + "epoch": 0.00315, + "grad_norm": 0.42967169925093956, + "learning_rate": 0.000945, + "loss": 6.126, + "step": 315 + }, + { + "epoch": 0.00316, + "grad_norm": 0.46511434454587514, + "learning_rate": 0.0009480000000000001, + "loss": 6.1004, + "step": 316 + }, + { + "epoch": 0.00317, + "grad_norm": 0.5237888199450732, + "learning_rate": 0.000951, + "loss": 6.1201, + "step": 317 + }, + { + "epoch": 0.00318, + "grad_norm": 0.6610672935792641, + "learning_rate": 0.000954, + "loss": 6.1161, + "step": 318 + }, + { + "epoch": 0.00319, + "grad_norm": 0.9099491192879063, + "learning_rate": 0.0009570000000000001, + "loss": 6.1122, + "step": 319 + }, + { + "epoch": 0.0032, + "grad_norm": 1.0329714723925014, + "learning_rate": 0.00096, + "loss": 6.1198, + "step": 320 + }, + { + "epoch": 0.00321, + "grad_norm": 0.9944509511152075, + "learning_rate": 0.000963, + "loss": 6.1077, + "step": 321 + }, + { + "epoch": 0.00322, + "grad_norm": 1.41191394849347, + "learning_rate": 0.0009660000000000001, + "loss": 6.1101, + "step": 322 + }, + { + "epoch": 0.00323, + "grad_norm": 0.9172707652477707, + "learning_rate": 0.000969, + "loss": 6.097, + "step": 323 + }, + { + "epoch": 0.00324, + "grad_norm": 1.1261423941310122, + "learning_rate": 0.0009720000000000001, + "loss": 6.1132, + "step": 324 + }, + { + "epoch": 0.00325, + "grad_norm": 0.6623590740718236, + "learning_rate": 0.0009750000000000001, + "loss": 6.0626, + "step": 325 + }, + { + "epoch": 0.00326, + "grad_norm": 0.7364273563271467, + "learning_rate": 0.0009780000000000001, + "loss": 6.0809, + "step": 326 + }, + { + "epoch": 0.00327, + "grad_norm": 0.8106867198528734, + "learning_rate": 0.000981, + "loss": 6.0853, + "step": 327 + }, + { + "epoch": 0.00328, + "grad_norm": 0.9008187294951384, + "learning_rate": 0.000984, + "loss": 6.0637, + "step": 328 + }, + { + "epoch": 0.00329, + "grad_norm": 1.0311811602663732, + "learning_rate": 0.000987, + "loss": 6.0736, + "step": 329 + }, + { + "epoch": 0.0033, + "grad_norm": 0.7413155368855245, + "learning_rate": 0.00099, + "loss": 6.0572, + "step": 330 + }, + { + "epoch": 0.00331, + "grad_norm": 0.6745658849207387, + "learning_rate": 0.0009930000000000002, + "loss": 6.0599, + "step": 331 + }, + { + "epoch": 0.00332, + "grad_norm": 0.5913240343902441, + "learning_rate": 0.0009960000000000001, + "loss": 6.041, + "step": 332 + }, + { + "epoch": 0.00333, + "grad_norm": 0.5668749800176679, + "learning_rate": 0.000999, + "loss": 6.025, + "step": 333 + }, + { + "epoch": 0.00334, + "grad_norm": 0.5007608052342689, + "learning_rate": 0.001002, + "loss": 6.0336, + "step": 334 + }, + { + "epoch": 0.00335, + "grad_norm": 0.3983861566645405, + "learning_rate": 0.001005, + "loss": 6.0284, + "step": 335 + }, + { + "epoch": 0.00336, + "grad_norm": 0.4274260388302738, + "learning_rate": 0.001008, + "loss": 6.0181, + "step": 336 + }, + { + "epoch": 0.00337, + "grad_norm": 0.5335498119421307, + "learning_rate": 0.0010110000000000002, + "loss": 6.0004, + "step": 337 + }, + { + "epoch": 0.00338, + "grad_norm": 0.4831054617031532, + "learning_rate": 0.0010140000000000001, + "loss": 6.0126, + "step": 338 + }, + { + "epoch": 0.00339, + "grad_norm": 0.5437142240239534, + "learning_rate": 0.0010170000000000001, + "loss": 6.0197, + "step": 339 + }, + { + "epoch": 0.0034, + "grad_norm": 0.5265472037464025, + "learning_rate": 0.00102, + "loss": 5.9884, + "step": 340 + }, + { + "epoch": 0.00341, + "grad_norm": 0.4912689865797111, + "learning_rate": 0.001023, + "loss": 5.9692, + "step": 341 + }, + { + "epoch": 0.00342, + "grad_norm": 0.42997171211054086, + "learning_rate": 0.001026, + "loss": 5.974, + "step": 342 + }, + { + "epoch": 0.00343, + "grad_norm": 0.5197303002983154, + "learning_rate": 0.0010290000000000002, + "loss": 5.9763, + "step": 343 + }, + { + "epoch": 0.00344, + "grad_norm": 0.8117900330313431, + "learning_rate": 0.001032, + "loss": 5.9747, + "step": 344 + }, + { + "epoch": 0.00345, + "grad_norm": 1.1753813945983669, + "learning_rate": 0.001035, + "loss": 5.9788, + "step": 345 + }, + { + "epoch": 0.00346, + "grad_norm": 0.8135676081857764, + "learning_rate": 0.0010379999999999999, + "loss": 5.9748, + "step": 346 + }, + { + "epoch": 0.00347, + "grad_norm": 1.0182912247404574, + "learning_rate": 0.001041, + "loss": 5.9557, + "step": 347 + }, + { + "epoch": 0.00348, + "grad_norm": 1.1407576555241683, + "learning_rate": 0.001044, + "loss": 5.978, + "step": 348 + }, + { + "epoch": 0.00349, + "grad_norm": 0.7853068136249622, + "learning_rate": 0.001047, + "loss": 5.9412, + "step": 349 + }, + { + "epoch": 0.0035, + "grad_norm": 1.0427704318540805, + "learning_rate": 0.00105, + "loss": 5.9779, + "step": 350 + }, + { + "epoch": 0.00351, + "grad_norm": 0.8821399606009466, + "learning_rate": 0.001053, + "loss": 5.9701, + "step": 351 + }, + { + "epoch": 0.00352, + "grad_norm": 0.9582157894617032, + "learning_rate": 0.0010559999999999999, + "loss": 5.955, + "step": 352 + }, + { + "epoch": 0.00353, + "grad_norm": 1.0526665256553966, + "learning_rate": 0.001059, + "loss": 5.958, + "step": 353 + }, + { + "epoch": 0.00354, + "grad_norm": 1.045275747166985, + "learning_rate": 0.001062, + "loss": 5.9353, + "step": 354 + }, + { + "epoch": 0.00355, + "grad_norm": 1.1505195376317356, + "learning_rate": 0.001065, + "loss": 5.9542, + "step": 355 + }, + { + "epoch": 0.00356, + "grad_norm": 1.0355197503433216, + "learning_rate": 0.001068, + "loss": 5.9425, + "step": 356 + }, + { + "epoch": 0.00357, + "grad_norm": 1.127934795973434, + "learning_rate": 0.001071, + "loss": 5.9396, + "step": 357 + }, + { + "epoch": 0.00358, + "grad_norm": 0.7430425972675007, + "learning_rate": 0.001074, + "loss": 5.9201, + "step": 358 + }, + { + "epoch": 0.00359, + "grad_norm": 0.6597065121039739, + "learning_rate": 0.001077, + "loss": 5.9099, + "step": 359 + }, + { + "epoch": 0.0036, + "grad_norm": 0.6034653307534226, + "learning_rate": 0.00108, + "loss": 5.9081, + "step": 360 + }, + { + "epoch": 0.00361, + "grad_norm": 0.5960018023982208, + "learning_rate": 0.001083, + "loss": 5.9047, + "step": 361 + }, + { + "epoch": 0.00362, + "grad_norm": 0.4563634882449727, + "learning_rate": 0.001086, + "loss": 5.884, + "step": 362 + }, + { + "epoch": 0.00363, + "grad_norm": 0.49274399902142996, + "learning_rate": 0.001089, + "loss": 5.8759, + "step": 363 + }, + { + "epoch": 0.00364, + "grad_norm": 0.4937234603270663, + "learning_rate": 0.001092, + "loss": 5.8901, + "step": 364 + }, + { + "epoch": 0.00365, + "grad_norm": 0.5102012627619638, + "learning_rate": 0.001095, + "loss": 5.888, + "step": 365 + }, + { + "epoch": 0.00366, + "grad_norm": 0.4676595798467989, + "learning_rate": 0.001098, + "loss": 5.862, + "step": 366 + }, + { + "epoch": 0.00367, + "grad_norm": 0.49526135096535867, + "learning_rate": 0.001101, + "loss": 5.8667, + "step": 367 + }, + { + "epoch": 0.00368, + "grad_norm": 0.47887378181150303, + "learning_rate": 0.001104, + "loss": 5.8643, + "step": 368 + }, + { + "epoch": 0.00369, + "grad_norm": 0.48887117156741833, + "learning_rate": 0.001107, + "loss": 5.8686, + "step": 369 + }, + { + "epoch": 0.0037, + "grad_norm": 0.4473709149836047, + "learning_rate": 0.00111, + "loss": 5.8472, + "step": 370 + }, + { + "epoch": 0.00371, + "grad_norm": 0.38589559577094035, + "learning_rate": 0.001113, + "loss": 5.8158, + "step": 371 + }, + { + "epoch": 0.00372, + "grad_norm": 0.3912315505838062, + "learning_rate": 0.001116, + "loss": 5.8379, + "step": 372 + }, + { + "epoch": 0.00373, + "grad_norm": 0.38616823047071297, + "learning_rate": 0.001119, + "loss": 5.8267, + "step": 373 + }, + { + "epoch": 0.00374, + "grad_norm": 0.45854090440574513, + "learning_rate": 0.001122, + "loss": 5.8316, + "step": 374 + }, + { + "epoch": 0.00375, + "grad_norm": 0.5169440196993219, + "learning_rate": 0.0011250000000000001, + "loss": 5.8332, + "step": 375 + }, + { + "epoch": 0.00376, + "grad_norm": 0.5067806568705457, + "learning_rate": 0.001128, + "loss": 5.8287, + "step": 376 + }, + { + "epoch": 0.00377, + "grad_norm": 0.48558945502532774, + "learning_rate": 0.001131, + "loss": 5.8236, + "step": 377 + }, + { + "epoch": 0.00378, + "grad_norm": 0.47384141098896654, + "learning_rate": 0.001134, + "loss": 5.8187, + "step": 378 + }, + { + "epoch": 0.00379, + "grad_norm": 0.5705731390544022, + "learning_rate": 0.001137, + "loss": 5.8065, + "step": 379 + }, + { + "epoch": 0.0038, + "grad_norm": 0.8415616570321116, + "learning_rate": 0.00114, + "loss": 5.8323, + "step": 380 + }, + { + "epoch": 0.00381, + "grad_norm": 1.152388235651458, + "learning_rate": 0.0011430000000000001, + "loss": 5.8155, + "step": 381 + }, + { + "epoch": 0.00382, + "grad_norm": 0.7784536663385624, + "learning_rate": 0.001146, + "loss": 5.7896, + "step": 382 + }, + { + "epoch": 0.00383, + "grad_norm": 1.2096458575940454, + "learning_rate": 0.001149, + "loss": 5.8132, + "step": 383 + }, + { + "epoch": 0.00384, + "grad_norm": 1.2032626959449177, + "learning_rate": 0.001152, + "loss": 5.8295, + "step": 384 + }, + { + "epoch": 0.00385, + "grad_norm": 1.2258405640081835, + "learning_rate": 0.001155, + "loss": 5.8193, + "step": 385 + }, + { + "epoch": 0.00386, + "grad_norm": 1.060557976067675, + "learning_rate": 0.001158, + "loss": 5.8182, + "step": 386 + }, + { + "epoch": 0.00387, + "grad_norm": 1.6852101829047932, + "learning_rate": 0.0011610000000000001, + "loss": 5.8306, + "step": 387 + }, + { + "epoch": 0.00388, + "grad_norm": 0.7125426173667109, + "learning_rate": 0.001164, + "loss": 5.7875, + "step": 388 + }, + { + "epoch": 0.00389, + "grad_norm": 0.9333298966305301, + "learning_rate": 0.001167, + "loss": 5.8092, + "step": 389 + }, + { + "epoch": 0.0039, + "grad_norm": 0.7871116791575423, + "learning_rate": 0.00117, + "loss": 5.7842, + "step": 390 + }, + { + "epoch": 0.00391, + "grad_norm": 0.9033950769229127, + "learning_rate": 0.001173, + "loss": 5.7945, + "step": 391 + }, + { + "epoch": 0.00392, + "grad_norm": 1.0985861295177402, + "learning_rate": 0.001176, + "loss": 5.8091, + "step": 392 + }, + { + "epoch": 0.00393, + "grad_norm": 0.9893983760666882, + "learning_rate": 0.0011790000000000001, + "loss": 5.787, + "step": 393 + }, + { + "epoch": 0.00394, + "grad_norm": 1.0087630537900902, + "learning_rate": 0.001182, + "loss": 5.7718, + "step": 394 + }, + { + "epoch": 0.00395, + "grad_norm": 0.9357634093540522, + "learning_rate": 0.001185, + "loss": 5.7577, + "step": 395 + }, + { + "epoch": 0.00396, + "grad_norm": 0.8613606742928634, + "learning_rate": 0.001188, + "loss": 5.7674, + "step": 396 + }, + { + "epoch": 0.00397, + "grad_norm": 0.9393680367248612, + "learning_rate": 0.001191, + "loss": 5.7666, + "step": 397 + }, + { + "epoch": 0.00398, + "grad_norm": 0.8380984764873387, + "learning_rate": 0.0011940000000000002, + "loss": 5.7669, + "step": 398 + }, + { + "epoch": 0.00399, + "grad_norm": 0.7495495962771003, + "learning_rate": 0.0011970000000000001, + "loss": 5.7689, + "step": 399 + }, + { + "epoch": 0.004, + "grad_norm": 0.6237821646680863, + "learning_rate": 0.0012000000000000001, + "loss": 5.751, + "step": 400 + }, + { + "epoch": 0.00401, + "grad_norm": 0.6042562364668606, + "learning_rate": 0.001203, + "loss": 5.7286, + "step": 401 + }, + { + "epoch": 0.00402, + "grad_norm": 0.6800421237430357, + "learning_rate": 0.001206, + "loss": 5.7387, + "step": 402 + }, + { + "epoch": 0.00403, + "grad_norm": 0.5349967773183291, + "learning_rate": 0.001209, + "loss": 5.7296, + "step": 403 + }, + { + "epoch": 0.00404, + "grad_norm": 0.4491885962138907, + "learning_rate": 0.0012120000000000002, + "loss": 5.7204, + "step": 404 + }, + { + "epoch": 0.00405, + "grad_norm": 0.4231295613571548, + "learning_rate": 0.0012150000000000002, + "loss": 5.7133, + "step": 405 + }, + { + "epoch": 0.00406, + "grad_norm": 0.4344671315280792, + "learning_rate": 0.0012180000000000001, + "loss": 5.7007, + "step": 406 + }, + { + "epoch": 0.00407, + "grad_norm": 0.3917681066216858, + "learning_rate": 0.0012209999999999999, + "loss": 5.6947, + "step": 407 + }, + { + "epoch": 0.00408, + "grad_norm": 0.3526446903520604, + "learning_rate": 0.001224, + "loss": 5.7113, + "step": 408 + }, + { + "epoch": 0.00409, + "grad_norm": 0.3601774169994176, + "learning_rate": 0.001227, + "loss": 5.689, + "step": 409 + }, + { + "epoch": 0.0041, + "grad_norm": 0.3999316895065895, + "learning_rate": 0.00123, + "loss": 5.6821, + "step": 410 + }, + { + "epoch": 0.00411, + "grad_norm": 0.5215468120681382, + "learning_rate": 0.001233, + "loss": 5.6975, + "step": 411 + }, + { + "epoch": 0.00412, + "grad_norm": 0.8731356141140694, + "learning_rate": 0.001236, + "loss": 5.697, + "step": 412 + }, + { + "epoch": 0.00413, + "grad_norm": 1.2920248463477522, + "learning_rate": 0.0012389999999999999, + "loss": 5.7158, + "step": 413 + }, + { + "epoch": 0.00414, + "grad_norm": 0.7474803494460109, + "learning_rate": 0.001242, + "loss": 5.6771, + "step": 414 + }, + { + "epoch": 0.00415, + "grad_norm": 0.9736431117993121, + "learning_rate": 0.001245, + "loss": 5.6888, + "step": 415 + }, + { + "epoch": 0.00416, + "grad_norm": 0.8653333577780613, + "learning_rate": 0.001248, + "loss": 5.6728, + "step": 416 + }, + { + "epoch": 0.00417, + "grad_norm": 0.6891363999339204, + "learning_rate": 0.001251, + "loss": 5.695, + "step": 417 + }, + { + "epoch": 0.00418, + "grad_norm": 0.7955125411502495, + "learning_rate": 0.001254, + "loss": 5.6734, + "step": 418 + }, + { + "epoch": 0.00419, + "grad_norm": 0.8034523576562718, + "learning_rate": 0.0012569999999999999, + "loss": 5.6601, + "step": 419 + }, + { + "epoch": 0.0042, + "grad_norm": 0.7731586474207807, + "learning_rate": 0.00126, + "loss": 5.6748, + "step": 420 + }, + { + "epoch": 0.00421, + "grad_norm": 0.747486262420627, + "learning_rate": 0.001263, + "loss": 5.6666, + "step": 421 + }, + { + "epoch": 0.00422, + "grad_norm": 0.7917981329409665, + "learning_rate": 0.001266, + "loss": 5.6544, + "step": 422 + }, + { + "epoch": 0.00423, + "grad_norm": 1.0889555078416353, + "learning_rate": 0.001269, + "loss": 5.6655, + "step": 423 + }, + { + "epoch": 0.00424, + "grad_norm": 0.9654337501414605, + "learning_rate": 0.001272, + "loss": 5.6614, + "step": 424 + }, + { + "epoch": 0.00425, + "grad_norm": 0.9055610792467201, + "learning_rate": 0.001275, + "loss": 5.6624, + "step": 425 + }, + { + "epoch": 0.00426, + "grad_norm": 0.8212981627676188, + "learning_rate": 0.001278, + "loss": 5.6637, + "step": 426 + }, + { + "epoch": 0.00427, + "grad_norm": 0.7602647353125763, + "learning_rate": 0.001281, + "loss": 5.6467, + "step": 427 + }, + { + "epoch": 0.00428, + "grad_norm": 0.677777266675102, + "learning_rate": 0.001284, + "loss": 5.6204, + "step": 428 + }, + { + "epoch": 0.00429, + "grad_norm": 0.5947091658499406, + "learning_rate": 0.001287, + "loss": 5.6311, + "step": 429 + }, + { + "epoch": 0.0043, + "grad_norm": 0.6377204770277832, + "learning_rate": 0.00129, + "loss": 5.6309, + "step": 430 + }, + { + "epoch": 0.00431, + "grad_norm": 0.6897719280155576, + "learning_rate": 0.001293, + "loss": 5.6193, + "step": 431 + }, + { + "epoch": 0.00432, + "grad_norm": 0.6884919593361081, + "learning_rate": 0.001296, + "loss": 5.6258, + "step": 432 + }, + { + "epoch": 0.00433, + "grad_norm": 0.6913913571918432, + "learning_rate": 0.001299, + "loss": 5.6177, + "step": 433 + }, + { + "epoch": 0.00434, + "grad_norm": 0.7261280979587743, + "learning_rate": 0.001302, + "loss": 5.6176, + "step": 434 + }, + { + "epoch": 0.00435, + "grad_norm": 0.8547702731757605, + "learning_rate": 0.001305, + "loss": 5.6162, + "step": 435 + }, + { + "epoch": 0.00436, + "grad_norm": 0.9457491419795808, + "learning_rate": 0.001308, + "loss": 5.5986, + "step": 436 + }, + { + "epoch": 0.00437, + "grad_norm": 0.9092672289397813, + "learning_rate": 0.001311, + "loss": 5.6144, + "step": 437 + }, + { + "epoch": 0.00438, + "grad_norm": 0.9049337850080227, + "learning_rate": 0.001314, + "loss": 5.6026, + "step": 438 + }, + { + "epoch": 0.00439, + "grad_norm": 0.7237349559204094, + "learning_rate": 0.001317, + "loss": 5.622, + "step": 439 + }, + { + "epoch": 0.0044, + "grad_norm": 0.8693791239531735, + "learning_rate": 0.00132, + "loss": 5.6008, + "step": 440 + }, + { + "epoch": 0.00441, + "grad_norm": 0.8508838859779835, + "learning_rate": 0.001323, + "loss": 5.5985, + "step": 441 + }, + { + "epoch": 0.00442, + "grad_norm": 0.6987140204651114, + "learning_rate": 0.0013260000000000001, + "loss": 5.587, + "step": 442 + }, + { + "epoch": 0.00443, + "grad_norm": 0.5719525863328404, + "learning_rate": 0.001329, + "loss": 5.5843, + "step": 443 + }, + { + "epoch": 0.00444, + "grad_norm": 0.5407139364493208, + "learning_rate": 0.001332, + "loss": 5.5841, + "step": 444 + }, + { + "epoch": 0.00445, + "grad_norm": 0.5170533332401992, + "learning_rate": 0.001335, + "loss": 5.5667, + "step": 445 + }, + { + "epoch": 0.00446, + "grad_norm": 0.43806698904849195, + "learning_rate": 0.001338, + "loss": 5.5666, + "step": 446 + }, + { + "epoch": 0.00447, + "grad_norm": 0.49048920433285326, + "learning_rate": 0.001341, + "loss": 5.5671, + "step": 447 + }, + { + "epoch": 0.00448, + "grad_norm": 0.46215050883864656, + "learning_rate": 0.0013440000000000001, + "loss": 5.5475, + "step": 448 + }, + { + "epoch": 0.00449, + "grad_norm": 0.5259389714982564, + "learning_rate": 0.001347, + "loss": 5.5523, + "step": 449 + }, + { + "epoch": 0.0045, + "grad_norm": 0.6261840891481112, + "learning_rate": 0.00135, + "loss": 5.5542, + "step": 450 + }, + { + "epoch": 0.00451, + "grad_norm": 0.659112753346069, + "learning_rate": 0.001353, + "loss": 5.5431, + "step": 451 + }, + { + "epoch": 0.00452, + "grad_norm": 0.6800042712218282, + "learning_rate": 0.001356, + "loss": 5.5311, + "step": 452 + }, + { + "epoch": 0.00453, + "grad_norm": 0.5745322110996829, + "learning_rate": 0.001359, + "loss": 5.5188, + "step": 453 + }, + { + "epoch": 0.00454, + "grad_norm": 0.5230006416449293, + "learning_rate": 0.0013620000000000001, + "loss": 5.5319, + "step": 454 + }, + { + "epoch": 0.00455, + "grad_norm": 0.5858648520183006, + "learning_rate": 0.0013650000000000001, + "loss": 5.5314, + "step": 455 + }, + { + "epoch": 0.00456, + "grad_norm": 0.5800568870191161, + "learning_rate": 0.001368, + "loss": 5.5269, + "step": 456 + }, + { + "epoch": 0.00457, + "grad_norm": 0.5535894536098482, + "learning_rate": 0.001371, + "loss": 5.5162, + "step": 457 + }, + { + "epoch": 0.00458, + "grad_norm": 0.56095957103827, + "learning_rate": 0.001374, + "loss": 5.5224, + "step": 458 + }, + { + "epoch": 0.00459, + "grad_norm": 0.7492551931077938, + "learning_rate": 0.0013770000000000002, + "loss": 5.517, + "step": 459 + }, + { + "epoch": 0.0046, + "grad_norm": 0.8251083608050601, + "learning_rate": 0.0013800000000000002, + "loss": 5.5084, + "step": 460 + }, + { + "epoch": 0.00461, + "grad_norm": 0.7810512714683711, + "learning_rate": 0.0013830000000000001, + "loss": 5.5038, + "step": 461 + }, + { + "epoch": 0.00462, + "grad_norm": 0.8065032793416945, + "learning_rate": 0.001386, + "loss": 5.5174, + "step": 462 + }, + { + "epoch": 0.00463, + "grad_norm": 1.0894770209329594, + "learning_rate": 0.001389, + "loss": 5.5013, + "step": 463 + }, + { + "epoch": 0.00464, + "grad_norm": 1.3225439160647088, + "learning_rate": 0.001392, + "loss": 5.5348, + "step": 464 + }, + { + "epoch": 0.00465, + "grad_norm": 1.0604196603169807, + "learning_rate": 0.0013950000000000002, + "loss": 5.4973, + "step": 465 + }, + { + "epoch": 0.00466, + "grad_norm": 1.279638390325454, + "learning_rate": 0.0013980000000000002, + "loss": 5.5365, + "step": 466 + }, + { + "epoch": 0.00467, + "grad_norm": 0.8376473861337255, + "learning_rate": 0.0014010000000000001, + "loss": 5.5034, + "step": 467 + }, + { + "epoch": 0.00468, + "grad_norm": 0.8787509291075707, + "learning_rate": 0.001404, + "loss": 5.4981, + "step": 468 + }, + { + "epoch": 0.00469, + "grad_norm": 0.8315105482947757, + "learning_rate": 0.001407, + "loss": 5.4995, + "step": 469 + }, + { + "epoch": 0.0047, + "grad_norm": 0.900233910053011, + "learning_rate": 0.00141, + "loss": 5.5098, + "step": 470 + }, + { + "epoch": 0.00471, + "grad_norm": 1.1782268624389831, + "learning_rate": 0.001413, + "loss": 5.5031, + "step": 471 + }, + { + "epoch": 0.00472, + "grad_norm": 0.8433457613569132, + "learning_rate": 0.001416, + "loss": 5.4989, + "step": 472 + }, + { + "epoch": 0.00473, + "grad_norm": 0.8984284318795871, + "learning_rate": 0.001419, + "loss": 5.5107, + "step": 473 + }, + { + "epoch": 0.00474, + "grad_norm": 0.8057807296189134, + "learning_rate": 0.0014219999999999999, + "loss": 5.4892, + "step": 474 + }, + { + "epoch": 0.00475, + "grad_norm": 0.8485388443307728, + "learning_rate": 0.001425, + "loss": 5.4826, + "step": 475 + }, + { + "epoch": 0.00476, + "grad_norm": 0.9809665505076786, + "learning_rate": 0.001428, + "loss": 5.5192, + "step": 476 + }, + { + "epoch": 0.00477, + "grad_norm": 0.9686040040277449, + "learning_rate": 0.001431, + "loss": 5.4785, + "step": 477 + }, + { + "epoch": 0.00478, + "grad_norm": 0.8580634771679295, + "learning_rate": 0.001434, + "loss": 5.4949, + "step": 478 + }, + { + "epoch": 0.00479, + "grad_norm": 0.9699411511566143, + "learning_rate": 0.001437, + "loss": 5.4782, + "step": 479 + }, + { + "epoch": 0.0048, + "grad_norm": 0.8190893004419723, + "learning_rate": 0.0014399999999999999, + "loss": 5.4711, + "step": 480 + }, + { + "epoch": 0.00481, + "grad_norm": 0.7019568417012634, + "learning_rate": 0.001443, + "loss": 5.4711, + "step": 481 + }, + { + "epoch": 0.00482, + "grad_norm": 0.6677085458766991, + "learning_rate": 0.001446, + "loss": 5.4413, + "step": 482 + }, + { + "epoch": 0.00483, + "grad_norm": 0.6622223138809283, + "learning_rate": 0.001449, + "loss": 5.4499, + "step": 483 + }, + { + "epoch": 0.00484, + "grad_norm": 0.6831178312322733, + "learning_rate": 0.001452, + "loss": 5.4529, + "step": 484 + }, + { + "epoch": 0.00485, + "grad_norm": 0.6786720110326826, + "learning_rate": 0.001455, + "loss": 5.4548, + "step": 485 + }, + { + "epoch": 0.00486, + "grad_norm": 0.639008514866701, + "learning_rate": 0.001458, + "loss": 5.4237, + "step": 486 + }, + { + "epoch": 0.00487, + "grad_norm": 0.6663550617928226, + "learning_rate": 0.001461, + "loss": 5.4384, + "step": 487 + }, + { + "epoch": 0.00488, + "grad_norm": 0.5492133897414355, + "learning_rate": 0.001464, + "loss": 5.4132, + "step": 488 + }, + { + "epoch": 0.00489, + "grad_norm": 0.5801507624750007, + "learning_rate": 0.001467, + "loss": 5.4021, + "step": 489 + }, + { + "epoch": 0.0049, + "grad_norm": 0.7369258331072537, + "learning_rate": 0.00147, + "loss": 5.4206, + "step": 490 + }, + { + "epoch": 0.00491, + "grad_norm": 0.8149385883601376, + "learning_rate": 0.001473, + "loss": 5.4211, + "step": 491 + }, + { + "epoch": 0.00492, + "grad_norm": 0.7605903135404127, + "learning_rate": 0.001476, + "loss": 5.4167, + "step": 492 + }, + { + "epoch": 0.00493, + "grad_norm": 0.5930344173404182, + "learning_rate": 0.001479, + "loss": 5.4047, + "step": 493 + }, + { + "epoch": 0.00494, + "grad_norm": 0.7636413305061132, + "learning_rate": 0.001482, + "loss": 5.4167, + "step": 494 + }, + { + "epoch": 0.00495, + "grad_norm": 0.7369014833667976, + "learning_rate": 0.001485, + "loss": 5.4191, + "step": 495 + }, + { + "epoch": 0.00496, + "grad_norm": 0.8504550792082386, + "learning_rate": 0.001488, + "loss": 5.4031, + "step": 496 + }, + { + "epoch": 0.00497, + "grad_norm": 0.8843425860263048, + "learning_rate": 0.001491, + "loss": 5.3921, + "step": 497 + }, + { + "epoch": 0.00498, + "grad_norm": 0.7438750793797253, + "learning_rate": 0.001494, + "loss": 5.4145, + "step": 498 + }, + { + "epoch": 0.00499, + "grad_norm": 0.7036650868069556, + "learning_rate": 0.001497, + "loss": 5.3822, + "step": 499 + }, + { + "epoch": 0.005, + "grad_norm": 0.7877256477029045, + "learning_rate": 0.0015, + "loss": 5.3896, + "step": 500 + }, + { + "epoch": 0.00501, + "grad_norm": 0.7990985096145019, + "learning_rate": 0.001503, + "loss": 5.3912, + "step": 501 + }, + { + "epoch": 0.00502, + "grad_norm": 0.5932348165440957, + "learning_rate": 0.001506, + "loss": 5.3876, + "step": 502 + }, + { + "epoch": 0.00503, + "grad_norm": 0.6328380348360387, + "learning_rate": 0.0015090000000000001, + "loss": 5.391, + "step": 503 + }, + { + "epoch": 0.00504, + "grad_norm": 0.5819727032922326, + "learning_rate": 0.001512, + "loss": 5.3693, + "step": 504 + }, + { + "epoch": 0.00505, + "grad_norm": 0.5953710061568833, + "learning_rate": 0.001515, + "loss": 5.3594, + "step": 505 + }, + { + "epoch": 0.00506, + "grad_norm": 0.5986845177656173, + "learning_rate": 0.001518, + "loss": 5.3624, + "step": 506 + }, + { + "epoch": 0.00507, + "grad_norm": 0.623690249743195, + "learning_rate": 0.001521, + "loss": 5.3571, + "step": 507 + }, + { + "epoch": 0.00508, + "grad_norm": 0.653996676321799, + "learning_rate": 0.001524, + "loss": 5.3588, + "step": 508 + }, + { + "epoch": 0.00509, + "grad_norm": 0.7417086851753733, + "learning_rate": 0.0015270000000000001, + "loss": 5.3422, + "step": 509 + }, + { + "epoch": 0.0051, + "grad_norm": 0.7033408638361137, + "learning_rate": 0.0015300000000000001, + "loss": 5.3598, + "step": 510 + }, + { + "epoch": 0.00511, + "grad_norm": 0.7013752626190988, + "learning_rate": 0.001533, + "loss": 5.3361, + "step": 511 + }, + { + "epoch": 0.00512, + "grad_norm": 0.7403626060663853, + "learning_rate": 0.001536, + "loss": 5.3344, + "step": 512 + }, + { + "epoch": 0.00513, + "grad_norm": 0.7668257914395668, + "learning_rate": 0.001539, + "loss": 5.3488, + "step": 513 + }, + { + "epoch": 0.00514, + "grad_norm": 0.8677889222141009, + "learning_rate": 0.001542, + "loss": 5.3327, + "step": 514 + }, + { + "epoch": 0.00515, + "grad_norm": 0.896065553430359, + "learning_rate": 0.0015450000000000001, + "loss": 5.3422, + "step": 515 + }, + { + "epoch": 0.00516, + "grad_norm": 1.0837566571017694, + "learning_rate": 0.0015480000000000001, + "loss": 5.3497, + "step": 516 + }, + { + "epoch": 0.00517, + "grad_norm": 0.8071431981996826, + "learning_rate": 0.001551, + "loss": 5.3323, + "step": 517 + }, + { + "epoch": 0.00518, + "grad_norm": 0.7918860105262308, + "learning_rate": 0.001554, + "loss": 5.3156, + "step": 518 + }, + { + "epoch": 0.00519, + "grad_norm": 0.7777992304037674, + "learning_rate": 0.001557, + "loss": 5.3213, + "step": 519 + }, + { + "epoch": 0.0052, + "grad_norm": 0.8275508154311308, + "learning_rate": 0.0015600000000000002, + "loss": 5.3297, + "step": 520 + }, + { + "epoch": 0.00521, + "grad_norm": 1.081326682572488, + "learning_rate": 0.0015630000000000002, + "loss": 5.3161, + "step": 521 + }, + { + "epoch": 0.00522, + "grad_norm": 1.0769033841173197, + "learning_rate": 0.0015660000000000001, + "loss": 5.3235, + "step": 522 + }, + { + "epoch": 0.00523, + "grad_norm": 1.1142920349652348, + "learning_rate": 0.001569, + "loss": 5.3418, + "step": 523 + }, + { + "epoch": 0.00524, + "grad_norm": 0.9680819380144772, + "learning_rate": 0.001572, + "loss": 5.3387, + "step": 524 + }, + { + "epoch": 0.00525, + "grad_norm": 1.042843464512002, + "learning_rate": 0.001575, + "loss": 5.3364, + "step": 525 + }, + { + "epoch": 0.00526, + "grad_norm": 0.8760110028730904, + "learning_rate": 0.0015780000000000002, + "loss": 5.3079, + "step": 526 + }, + { + "epoch": 0.00527, + "grad_norm": 0.7131338611439731, + "learning_rate": 0.0015810000000000002, + "loss": 5.3127, + "step": 527 + }, + { + "epoch": 0.00528, + "grad_norm": 0.6786352126644868, + "learning_rate": 0.0015840000000000001, + "loss": 5.2868, + "step": 528 + }, + { + "epoch": 0.00529, + "grad_norm": 0.6952357562094686, + "learning_rate": 0.001587, + "loss": 5.2922, + "step": 529 + }, + { + "epoch": 0.0053, + "grad_norm": 0.8086799159810172, + "learning_rate": 0.00159, + "loss": 5.3039, + "step": 530 + }, + { + "epoch": 0.00531, + "grad_norm": 0.828973806141186, + "learning_rate": 0.001593, + "loss": 5.2873, + "step": 531 + }, + { + "epoch": 0.00532, + "grad_norm": 0.7467931538676229, + "learning_rate": 0.0015960000000000002, + "loss": 5.2943, + "step": 532 + }, + { + "epoch": 0.00533, + "grad_norm": 0.7141354989500697, + "learning_rate": 0.0015990000000000002, + "loss": 5.2786, + "step": 533 + }, + { + "epoch": 0.00534, + "grad_norm": 0.9764493557723114, + "learning_rate": 0.0016020000000000001, + "loss": 5.2728, + "step": 534 + }, + { + "epoch": 0.00535, + "grad_norm": 1.3880068471110967, + "learning_rate": 0.001605, + "loss": 5.2984, + "step": 535 + }, + { + "epoch": 0.00536, + "grad_norm": 0.9041341459356813, + "learning_rate": 0.001608, + "loss": 5.2855, + "step": 536 + }, + { + "epoch": 0.00537, + "grad_norm": 0.762740194970871, + "learning_rate": 0.0016110000000000002, + "loss": 5.2723, + "step": 537 + }, + { + "epoch": 0.00538, + "grad_norm": 0.787312661332683, + "learning_rate": 0.0016140000000000002, + "loss": 5.2506, + "step": 538 + }, + { + "epoch": 0.00539, + "grad_norm": 0.6102453826005042, + "learning_rate": 0.0016170000000000002, + "loss": 5.2365, + "step": 539 + }, + { + "epoch": 0.0054, + "grad_norm": 0.6664103859218952, + "learning_rate": 0.0016200000000000001, + "loss": 5.2513, + "step": 540 + }, + { + "epoch": 0.00541, + "grad_norm": 0.7228434818484509, + "learning_rate": 0.001623, + "loss": 5.2273, + "step": 541 + }, + { + "epoch": 0.00542, + "grad_norm": 0.9646545444558308, + "learning_rate": 0.001626, + "loss": 5.2641, + "step": 542 + }, + { + "epoch": 0.00543, + "grad_norm": 1.1121220265997553, + "learning_rate": 0.0016290000000000002, + "loss": 5.2329, + "step": 543 + }, + { + "epoch": 0.00544, + "grad_norm": 0.7994777164441184, + "learning_rate": 0.0016320000000000002, + "loss": 5.2404, + "step": 544 + }, + { + "epoch": 0.00545, + "grad_norm": 0.7226008260314222, + "learning_rate": 0.0016350000000000002, + "loss": 5.2461, + "step": 545 + }, + { + "epoch": 0.00546, + "grad_norm": 0.7699535423166085, + "learning_rate": 0.0016380000000000001, + "loss": 5.2193, + "step": 546 + }, + { + "epoch": 0.00547, + "grad_norm": 0.6548240326600666, + "learning_rate": 0.001641, + "loss": 5.2108, + "step": 547 + }, + { + "epoch": 0.00548, + "grad_norm": 0.6332922946851393, + "learning_rate": 0.001644, + "loss": 5.2061, + "step": 548 + }, + { + "epoch": 0.00549, + "grad_norm": 0.6231528959927674, + "learning_rate": 0.0016470000000000002, + "loss": 5.2, + "step": 549 + }, + { + "epoch": 0.0055, + "grad_norm": 0.7419840881368932, + "learning_rate": 0.0016500000000000002, + "loss": 5.2179, + "step": 550 + }, + { + "epoch": 0.00551, + "grad_norm": 0.7180205816820676, + "learning_rate": 0.0016530000000000002, + "loss": 5.2057, + "step": 551 + }, + { + "epoch": 0.00552, + "grad_norm": 0.5920069574731561, + "learning_rate": 0.0016560000000000001, + "loss": 5.1823, + "step": 552 + }, + { + "epoch": 0.00553, + "grad_norm": 0.7996998429214144, + "learning_rate": 0.001659, + "loss": 5.1806, + "step": 553 + }, + { + "epoch": 0.00554, + "grad_norm": 1.0229110500291838, + "learning_rate": 0.0016620000000000003, + "loss": 5.1965, + "step": 554 + }, + { + "epoch": 0.00555, + "grad_norm": 1.1118473608885646, + "learning_rate": 0.0016650000000000002, + "loss": 5.1994, + "step": 555 + }, + { + "epoch": 0.00556, + "grad_norm": 0.9366759039894813, + "learning_rate": 0.0016680000000000002, + "loss": 5.1806, + "step": 556 + }, + { + "epoch": 0.00557, + "grad_norm": 0.9046668934887724, + "learning_rate": 0.0016710000000000002, + "loss": 5.1671, + "step": 557 + }, + { + "epoch": 0.00558, + "grad_norm": 1.142251826676036, + "learning_rate": 0.0016740000000000001, + "loss": 5.2009, + "step": 558 + }, + { + "epoch": 0.00559, + "grad_norm": 1.0520781475504497, + "learning_rate": 0.001677, + "loss": 5.1865, + "step": 559 + }, + { + "epoch": 0.0056, + "grad_norm": 1.0780070897638405, + "learning_rate": 0.0016800000000000003, + "loss": 5.1609, + "step": 560 + }, + { + "epoch": 0.00561, + "grad_norm": 0.8904071170090557, + "learning_rate": 0.0016830000000000003, + "loss": 5.1755, + "step": 561 + }, + { + "epoch": 0.00562, + "grad_norm": 0.8189640026396579, + "learning_rate": 0.0016860000000000002, + "loss": 5.168, + "step": 562 + }, + { + "epoch": 0.00563, + "grad_norm": 0.746495696524217, + "learning_rate": 0.001689, + "loss": 5.1552, + "step": 563 + }, + { + "epoch": 0.00564, + "grad_norm": 0.7249953066463264, + "learning_rate": 0.001692, + "loss": 5.1416, + "step": 564 + }, + { + "epoch": 0.00565, + "grad_norm": 0.6193711615047397, + "learning_rate": 0.001695, + "loss": 5.1336, + "step": 565 + }, + { + "epoch": 0.00566, + "grad_norm": 0.8661212922050541, + "learning_rate": 0.0016979999999999999, + "loss": 5.1381, + "step": 566 + }, + { + "epoch": 0.00567, + "grad_norm": 0.9452019797636565, + "learning_rate": 0.0017009999999999998, + "loss": 5.1333, + "step": 567 + }, + { + "epoch": 0.00568, + "grad_norm": 0.8863756714851743, + "learning_rate": 0.0017039999999999998, + "loss": 5.1455, + "step": 568 + }, + { + "epoch": 0.00569, + "grad_norm": 0.8164512297006329, + "learning_rate": 0.001707, + "loss": 5.1087, + "step": 569 + }, + { + "epoch": 0.0057, + "grad_norm": 0.8055756655780417, + "learning_rate": 0.00171, + "loss": 5.1416, + "step": 570 + }, + { + "epoch": 0.00571, + "grad_norm": 0.9556127682537684, + "learning_rate": 0.001713, + "loss": 5.1421, + "step": 571 + }, + { + "epoch": 0.00572, + "grad_norm": 1.1121438340859977, + "learning_rate": 0.0017159999999999999, + "loss": 5.1242, + "step": 572 + }, + { + "epoch": 0.00573, + "grad_norm": 0.8538691427356556, + "learning_rate": 0.0017189999999999998, + "loss": 5.1261, + "step": 573 + }, + { + "epoch": 0.00574, + "grad_norm": 0.754134808897758, + "learning_rate": 0.001722, + "loss": 5.1186, + "step": 574 + }, + { + "epoch": 0.00575, + "grad_norm": 0.6045959777005846, + "learning_rate": 0.001725, + "loss": 5.0826, + "step": 575 + }, + { + "epoch": 0.00576, + "grad_norm": 0.5849168439848929, + "learning_rate": 0.001728, + "loss": 5.0868, + "step": 576 + }, + { + "epoch": 0.00577, + "grad_norm": 0.5881868482585118, + "learning_rate": 0.001731, + "loss": 5.0984, + "step": 577 + }, + { + "epoch": 0.00578, + "grad_norm": 0.6496481817365951, + "learning_rate": 0.0017339999999999999, + "loss": 5.0795, + "step": 578 + }, + { + "epoch": 0.00579, + "grad_norm": 0.7126042661301508, + "learning_rate": 0.0017369999999999998, + "loss": 5.0702, + "step": 579 + }, + { + "epoch": 0.0058, + "grad_norm": 0.691634070596695, + "learning_rate": 0.00174, + "loss": 5.0826, + "step": 580 + }, + { + "epoch": 0.00581, + "grad_norm": 0.6405819953602082, + "learning_rate": 0.001743, + "loss": 5.0809, + "step": 581 + }, + { + "epoch": 0.00582, + "grad_norm": 0.6144348123489994, + "learning_rate": 0.001746, + "loss": 5.0509, + "step": 582 + }, + { + "epoch": 0.00583, + "grad_norm": 0.5400038424579979, + "learning_rate": 0.001749, + "loss": 5.0752, + "step": 583 + }, + { + "epoch": 0.00584, + "grad_norm": 0.4936797939946059, + "learning_rate": 0.0017519999999999999, + "loss": 5.0634, + "step": 584 + }, + { + "epoch": 0.00585, + "grad_norm": 0.5420757595953297, + "learning_rate": 0.0017549999999999998, + "loss": 5.0509, + "step": 585 + }, + { + "epoch": 0.00586, + "grad_norm": 0.6461298473240921, + "learning_rate": 0.001758, + "loss": 5.0463, + "step": 586 + }, + { + "epoch": 0.00587, + "grad_norm": 1.0127747465457377, + "learning_rate": 0.001761, + "loss": 5.0477, + "step": 587 + }, + { + "epoch": 0.00588, + "grad_norm": 1.312605638466154, + "learning_rate": 0.001764, + "loss": 5.0646, + "step": 588 + }, + { + "epoch": 0.00589, + "grad_norm": 0.7336414507180539, + "learning_rate": 0.001767, + "loss": 5.0246, + "step": 589 + }, + { + "epoch": 0.0059, + "grad_norm": 0.9403709566188089, + "learning_rate": 0.0017699999999999999, + "loss": 5.0344, + "step": 590 + }, + { + "epoch": 0.00591, + "grad_norm": 1.1082528668309917, + "learning_rate": 0.001773, + "loss": 5.0455, + "step": 591 + }, + { + "epoch": 0.00592, + "grad_norm": 1.2840924567627583, + "learning_rate": 0.001776, + "loss": 5.0904, + "step": 592 + }, + { + "epoch": 0.00593, + "grad_norm": 0.7010705426983365, + "learning_rate": 0.001779, + "loss": 5.0507, + "step": 593 + }, + { + "epoch": 0.00594, + "grad_norm": 0.7515579064184676, + "learning_rate": 0.001782, + "loss": 5.0452, + "step": 594 + }, + { + "epoch": 0.00595, + "grad_norm": 0.8237589608472985, + "learning_rate": 0.001785, + "loss": 5.0574, + "step": 595 + }, + { + "epoch": 0.00596, + "grad_norm": 0.7511193245039597, + "learning_rate": 0.0017879999999999999, + "loss": 5.0458, + "step": 596 + }, + { + "epoch": 0.00597, + "grad_norm": 0.6951714106885373, + "learning_rate": 0.001791, + "loss": 5.0059, + "step": 597 + }, + { + "epoch": 0.00598, + "grad_norm": 0.6637745885790589, + "learning_rate": 0.001794, + "loss": 5.0231, + "step": 598 + }, + { + "epoch": 0.00599, + "grad_norm": 0.7127858481763457, + "learning_rate": 0.001797, + "loss": 5.0133, + "step": 599 + }, + { + "epoch": 0.006, + "grad_norm": 0.6761974733345899, + "learning_rate": 0.0018, + "loss": 5.0135, + "step": 600 + }, + { + "epoch": 0.00601, + "grad_norm": 0.6625605364634614, + "learning_rate": 0.001803, + "loss": 5.0015, + "step": 601 + }, + { + "epoch": 0.00602, + "grad_norm": 0.6742478727145375, + "learning_rate": 0.0018059999999999999, + "loss": 4.9862, + "step": 602 + }, + { + "epoch": 0.00603, + "grad_norm": 1.00015732542698, + "learning_rate": 0.001809, + "loss": 5.0193, + "step": 603 + }, + { + "epoch": 0.00604, + "grad_norm": 1.387382192884798, + "learning_rate": 0.001812, + "loss": 5.0251, + "step": 604 + }, + { + "epoch": 0.00605, + "grad_norm": 0.6727404947716551, + "learning_rate": 0.001815, + "loss": 5.0023, + "step": 605 + }, + { + "epoch": 0.00606, + "grad_norm": 0.9044609854709968, + "learning_rate": 0.001818, + "loss": 5.0189, + "step": 606 + }, + { + "epoch": 0.00607, + "grad_norm": 1.08596708759871, + "learning_rate": 0.001821, + "loss": 5.0221, + "step": 607 + }, + { + "epoch": 0.00608, + "grad_norm": 1.0369936566425986, + "learning_rate": 0.001824, + "loss": 5.008, + "step": 608 + }, + { + "epoch": 0.00609, + "grad_norm": 1.0935517991120203, + "learning_rate": 0.001827, + "loss": 5.0109, + "step": 609 + }, + { + "epoch": 0.0061, + "grad_norm": 0.9727711844599547, + "learning_rate": 0.00183, + "loss": 4.9666, + "step": 610 + }, + { + "epoch": 0.00611, + "grad_norm": 0.9492725313696737, + "learning_rate": 0.001833, + "loss": 4.9894, + "step": 611 + }, + { + "epoch": 0.00612, + "grad_norm": 0.845936691035656, + "learning_rate": 0.001836, + "loss": 4.9768, + "step": 612 + }, + { + "epoch": 0.00613, + "grad_norm": 0.917579763764549, + "learning_rate": 0.001839, + "loss": 4.9836, + "step": 613 + }, + { + "epoch": 0.00614, + "grad_norm": 0.8975809320202123, + "learning_rate": 0.001842, + "loss": 5.0024, + "step": 614 + }, + { + "epoch": 0.00615, + "grad_norm": 1.1935315831043936, + "learning_rate": 0.001845, + "loss": 5.0018, + "step": 615 + }, + { + "epoch": 0.00616, + "grad_norm": 0.9948318214992812, + "learning_rate": 0.001848, + "loss": 4.9924, + "step": 616 + }, + { + "epoch": 0.00617, + "grad_norm": 0.8063669388844663, + "learning_rate": 0.001851, + "loss": 4.9919, + "step": 617 + }, + { + "epoch": 0.00618, + "grad_norm": 0.8184910219660716, + "learning_rate": 0.001854, + "loss": 4.9666, + "step": 618 + }, + { + "epoch": 0.00619, + "grad_norm": 0.7780464806882716, + "learning_rate": 0.001857, + "loss": 4.9753, + "step": 619 + }, + { + "epoch": 0.0062, + "grad_norm": 0.7430630101852395, + "learning_rate": 0.00186, + "loss": 4.9566, + "step": 620 + }, + { + "epoch": 0.00621, + "grad_norm": 0.8040699289060931, + "learning_rate": 0.001863, + "loss": 4.9542, + "step": 621 + }, + { + "epoch": 0.00622, + "grad_norm": 0.8423285566803137, + "learning_rate": 0.001866, + "loss": 4.9653, + "step": 622 + }, + { + "epoch": 0.00623, + "grad_norm": 0.6802855865245334, + "learning_rate": 0.001869, + "loss": 4.9365, + "step": 623 + }, + { + "epoch": 0.00624, + "grad_norm": 0.7045868643772514, + "learning_rate": 0.001872, + "loss": 4.9425, + "step": 624 + }, + { + "epoch": 0.00625, + "grad_norm": 0.69605003901388, + "learning_rate": 0.001875, + "loss": 4.9397, + "step": 625 + }, + { + "epoch": 0.00626, + "grad_norm": 0.8788947819856907, + "learning_rate": 0.0018780000000000001, + "loss": 4.9403, + "step": 626 + }, + { + "epoch": 0.00627, + "grad_norm": 0.8580113274469313, + "learning_rate": 0.001881, + "loss": 4.9238, + "step": 627 + }, + { + "epoch": 0.00628, + "grad_norm": 0.7437087045232712, + "learning_rate": 0.001884, + "loss": 4.9553, + "step": 628 + }, + { + "epoch": 0.00629, + "grad_norm": 0.673794469112573, + "learning_rate": 0.001887, + "loss": 4.9059, + "step": 629 + }, + { + "epoch": 0.0063, + "grad_norm": 0.7529443514224647, + "learning_rate": 0.00189, + "loss": 4.9225, + "step": 630 + }, + { + "epoch": 0.00631, + "grad_norm": 0.7882316002133182, + "learning_rate": 0.0018930000000000002, + "loss": 4.9159, + "step": 631 + }, + { + "epoch": 0.00632, + "grad_norm": 0.7345089369079263, + "learning_rate": 0.0018960000000000001, + "loss": 4.9318, + "step": 632 + }, + { + "epoch": 0.00633, + "grad_norm": 0.807557335679046, + "learning_rate": 0.001899, + "loss": 4.9156, + "step": 633 + }, + { + "epoch": 0.00634, + "grad_norm": 0.856273971211143, + "learning_rate": 0.001902, + "loss": 4.9086, + "step": 634 + }, + { + "epoch": 0.00635, + "grad_norm": 0.8041095750030954, + "learning_rate": 0.001905, + "loss": 4.8993, + "step": 635 + }, + { + "epoch": 0.00636, + "grad_norm": 0.8334087326563642, + "learning_rate": 0.001908, + "loss": 4.9117, + "step": 636 + }, + { + "epoch": 0.00637, + "grad_norm": 0.8711627404236827, + "learning_rate": 0.0019110000000000002, + "loss": 4.9242, + "step": 637 + }, + { + "epoch": 0.00638, + "grad_norm": 0.950273886749592, + "learning_rate": 0.0019140000000000001, + "loss": 4.918, + "step": 638 + }, + { + "epoch": 0.00639, + "grad_norm": 0.9763758019156279, + "learning_rate": 0.001917, + "loss": 4.8946, + "step": 639 + }, + { + "epoch": 0.0064, + "grad_norm": 0.9069546349974866, + "learning_rate": 0.00192, + "loss": 4.9049, + "step": 640 + }, + { + "epoch": 0.00641, + "grad_norm": 0.7602914411110755, + "learning_rate": 0.001923, + "loss": 4.8967, + "step": 641 + }, + { + "epoch": 0.00642, + "grad_norm": 0.6358369958975738, + "learning_rate": 0.001926, + "loss": 4.8823, + "step": 642 + }, + { + "epoch": 0.00643, + "grad_norm": 0.6298192488222032, + "learning_rate": 0.0019290000000000002, + "loss": 4.8984, + "step": 643 + }, + { + "epoch": 0.00644, + "grad_norm": 0.5835488483304159, + "learning_rate": 0.0019320000000000001, + "loss": 4.8719, + "step": 644 + }, + { + "epoch": 0.00645, + "grad_norm": 0.509893487039198, + "learning_rate": 0.001935, + "loss": 4.8697, + "step": 645 + }, + { + "epoch": 0.00646, + "grad_norm": 0.5016743009567477, + "learning_rate": 0.001938, + "loss": 4.8822, + "step": 646 + }, + { + "epoch": 0.00647, + "grad_norm": 0.4834671196673339, + "learning_rate": 0.001941, + "loss": 4.8737, + "step": 647 + }, + { + "epoch": 0.00648, + "grad_norm": 0.4900666309904975, + "learning_rate": 0.0019440000000000002, + "loss": 4.8665, + "step": 648 + }, + { + "epoch": 0.00649, + "grad_norm": 0.6242032394190251, + "learning_rate": 0.0019470000000000002, + "loss": 4.8569, + "step": 649 + }, + { + "epoch": 0.0065, + "grad_norm": 0.7946730011730083, + "learning_rate": 0.0019500000000000001, + "loss": 4.8944, + "step": 650 + }, + { + "epoch": 0.00651, + "grad_norm": 0.7813096551019217, + "learning_rate": 0.001953, + "loss": 4.8797, + "step": 651 + }, + { + "epoch": 0.00652, + "grad_norm": 0.5708054833927125, + "learning_rate": 0.0019560000000000003, + "loss": 4.8538, + "step": 652 + }, + { + "epoch": 0.00653, + "grad_norm": 0.6416767177196502, + "learning_rate": 0.0019590000000000002, + "loss": 4.8612, + "step": 653 + }, + { + "epoch": 0.00654, + "grad_norm": 0.6414247107018044, + "learning_rate": 0.001962, + "loss": 4.8324, + "step": 654 + }, + { + "epoch": 0.00655, + "grad_norm": 0.5608777579230684, + "learning_rate": 0.001965, + "loss": 4.8581, + "step": 655 + }, + { + "epoch": 0.00656, + "grad_norm": 0.4812696659686437, + "learning_rate": 0.001968, + "loss": 4.8497, + "step": 656 + }, + { + "epoch": 0.00657, + "grad_norm": 0.5196607021803705, + "learning_rate": 0.001971, + "loss": 4.8212, + "step": 657 + }, + { + "epoch": 0.00658, + "grad_norm": 0.5384007134004025, + "learning_rate": 0.001974, + "loss": 4.8422, + "step": 658 + }, + { + "epoch": 0.00659, + "grad_norm": 0.6084877834513672, + "learning_rate": 0.001977, + "loss": 4.8215, + "step": 659 + }, + { + "epoch": 0.0066, + "grad_norm": 0.7589081819730935, + "learning_rate": 0.00198, + "loss": 4.8434, + "step": 660 + }, + { + "epoch": 0.00661, + "grad_norm": 0.7941713837035096, + "learning_rate": 0.001983, + "loss": 4.8155, + "step": 661 + }, + { + "epoch": 0.00662, + "grad_norm": 0.7978547164974868, + "learning_rate": 0.0019860000000000004, + "loss": 4.8127, + "step": 662 + }, + { + "epoch": 0.00663, + "grad_norm": 0.893378015840618, + "learning_rate": 0.0019890000000000003, + "loss": 4.8265, + "step": 663 + }, + { + "epoch": 0.00664, + "grad_norm": 0.899949440268495, + "learning_rate": 0.0019920000000000003, + "loss": 4.8283, + "step": 664 + }, + { + "epoch": 0.00665, + "grad_norm": 0.8127988991300923, + "learning_rate": 0.0019950000000000002, + "loss": 4.7986, + "step": 665 + }, + { + "epoch": 0.00666, + "grad_norm": 0.9200900241429067, + "learning_rate": 0.001998, + "loss": 4.8226, + "step": 666 + }, + { + "epoch": 0.00667, + "grad_norm": 1.037264674390151, + "learning_rate": 0.002001, + "loss": 4.8324, + "step": 667 + }, + { + "epoch": 0.00668, + "grad_norm": 0.8082146942337904, + "learning_rate": 0.002004, + "loss": 4.8427, + "step": 668 + }, + { + "epoch": 0.00669, + "grad_norm": 0.7033624756486074, + "learning_rate": 0.002007, + "loss": 4.8562, + "step": 669 + }, + { + "epoch": 0.0067, + "grad_norm": 0.751969455636164, + "learning_rate": 0.00201, + "loss": 4.8525, + "step": 670 + }, + { + "epoch": 0.00671, + "grad_norm": 0.736520529365372, + "learning_rate": 0.002013, + "loss": 4.8206, + "step": 671 + }, + { + "epoch": 0.00672, + "grad_norm": 0.7466982774868701, + "learning_rate": 0.002016, + "loss": 4.8129, + "step": 672 + }, + { + "epoch": 0.00673, + "grad_norm": 0.7025220166479262, + "learning_rate": 0.002019, + "loss": 4.8146, + "step": 673 + }, + { + "epoch": 0.00674, + "grad_norm": 0.8461453039283889, + "learning_rate": 0.0020220000000000004, + "loss": 4.8144, + "step": 674 + }, + { + "epoch": 0.00675, + "grad_norm": 0.9399881649158435, + "learning_rate": 0.0020250000000000003, + "loss": 4.8482, + "step": 675 + }, + { + "epoch": 0.00676, + "grad_norm": 0.9357632097468723, + "learning_rate": 0.0020280000000000003, + "loss": 4.8268, + "step": 676 + }, + { + "epoch": 0.00677, + "grad_norm": 0.7758960619033557, + "learning_rate": 0.0020310000000000003, + "loss": 4.8204, + "step": 677 + }, + { + "epoch": 0.00678, + "grad_norm": 0.699292513140664, + "learning_rate": 0.0020340000000000002, + "loss": 4.8248, + "step": 678 + }, + { + "epoch": 0.00679, + "grad_norm": 0.7370787957429817, + "learning_rate": 0.002037, + "loss": 4.816, + "step": 679 + }, + { + "epoch": 0.0068, + "grad_norm": 0.8377547362902558, + "learning_rate": 0.00204, + "loss": 4.8174, + "step": 680 + }, + { + "epoch": 0.00681, + "grad_norm": 0.8259782799224379, + "learning_rate": 0.002043, + "loss": 4.8155, + "step": 681 + }, + { + "epoch": 0.00682, + "grad_norm": 0.7684261091318535, + "learning_rate": 0.002046, + "loss": 4.8082, + "step": 682 + }, + { + "epoch": 0.00683, + "grad_norm": 0.8487367019402318, + "learning_rate": 0.002049, + "loss": 4.7989, + "step": 683 + }, + { + "epoch": 0.00684, + "grad_norm": 0.8838018894616847, + "learning_rate": 0.002052, + "loss": 4.8194, + "step": 684 + }, + { + "epoch": 0.00685, + "grad_norm": 0.8860697203395584, + "learning_rate": 0.0020550000000000004, + "loss": 4.8252, + "step": 685 + }, + { + "epoch": 0.00686, + "grad_norm": 0.7336183086529302, + "learning_rate": 0.0020580000000000004, + "loss": 4.7837, + "step": 686 + }, + { + "epoch": 0.00687, + "grad_norm": 0.8176630379413288, + "learning_rate": 0.0020610000000000003, + "loss": 4.7944, + "step": 687 + }, + { + "epoch": 0.00688, + "grad_norm": 0.7703386551342313, + "learning_rate": 0.002064, + "loss": 4.7926, + "step": 688 + }, + { + "epoch": 0.00689, + "grad_norm": 0.6919162061146223, + "learning_rate": 0.002067, + "loss": 4.7965, + "step": 689 + }, + { + "epoch": 0.0069, + "grad_norm": 0.7424392154268248, + "learning_rate": 0.00207, + "loss": 4.7893, + "step": 690 + }, + { + "epoch": 0.00691, + "grad_norm": 0.6515618524352145, + "learning_rate": 0.0020729999999999998, + "loss": 4.7559, + "step": 691 + }, + { + "epoch": 0.00692, + "grad_norm": 0.6440846578393002, + "learning_rate": 0.0020759999999999997, + "loss": 4.776, + "step": 692 + }, + { + "epoch": 0.00693, + "grad_norm": 0.6847536481828279, + "learning_rate": 0.0020789999999999997, + "loss": 4.7889, + "step": 693 + }, + { + "epoch": 0.00694, + "grad_norm": 0.6321576161870056, + "learning_rate": 0.002082, + "loss": 4.7627, + "step": 694 + }, + { + "epoch": 0.00695, + "grad_norm": 0.5791129920715202, + "learning_rate": 0.002085, + "loss": 4.7609, + "step": 695 + }, + { + "epoch": 0.00696, + "grad_norm": 0.5895865438272808, + "learning_rate": 0.002088, + "loss": 4.7723, + "step": 696 + }, + { + "epoch": 0.00697, + "grad_norm": 0.5008187604770619, + "learning_rate": 0.002091, + "loss": 4.7695, + "step": 697 + }, + { + "epoch": 0.00698, + "grad_norm": 0.6970439697756265, + "learning_rate": 0.002094, + "loss": 4.7619, + "step": 698 + }, + { + "epoch": 0.00699, + "grad_norm": 0.8941704543265332, + "learning_rate": 0.002097, + "loss": 4.7572, + "step": 699 + }, + { + "epoch": 0.007, + "grad_norm": 0.9068627730041655, + "learning_rate": 0.0021, + "loss": 4.787, + "step": 700 + }, + { + "epoch": 0.00701, + "grad_norm": 0.7146483381512303, + "learning_rate": 0.002103, + "loss": 4.7547, + "step": 701 + }, + { + "epoch": 0.00702, + "grad_norm": 0.9172255209446268, + "learning_rate": 0.002106, + "loss": 4.77, + "step": 702 + }, + { + "epoch": 0.00703, + "grad_norm": 0.9047172643914575, + "learning_rate": 0.0021089999999999998, + "loss": 4.7553, + "step": 703 + }, + { + "epoch": 0.00704, + "grad_norm": 0.7853692419556185, + "learning_rate": 0.0021119999999999997, + "loss": 4.7583, + "step": 704 + }, + { + "epoch": 0.00705, + "grad_norm": 0.7199878385614988, + "learning_rate": 0.002115, + "loss": 4.7725, + "step": 705 + }, + { + "epoch": 0.00706, + "grad_norm": 0.7213393080579115, + "learning_rate": 0.002118, + "loss": 4.7581, + "step": 706 + }, + { + "epoch": 0.00707, + "grad_norm": 0.7597119331851468, + "learning_rate": 0.002121, + "loss": 4.7413, + "step": 707 + }, + { + "epoch": 0.00708, + "grad_norm": 0.6864102182118973, + "learning_rate": 0.002124, + "loss": 4.7187, + "step": 708 + }, + { + "epoch": 0.00709, + "grad_norm": 0.7815902187763394, + "learning_rate": 0.002127, + "loss": 4.7572, + "step": 709 + }, + { + "epoch": 0.0071, + "grad_norm": 0.8451784595752648, + "learning_rate": 0.00213, + "loss": 4.7552, + "step": 710 + }, + { + "epoch": 0.00711, + "grad_norm": 1.0054655399528605, + "learning_rate": 0.002133, + "loss": 4.7414, + "step": 711 + }, + { + "epoch": 0.00712, + "grad_norm": 0.9031323884556907, + "learning_rate": 0.002136, + "loss": 4.7728, + "step": 712 + }, + { + "epoch": 0.00713, + "grad_norm": 0.961250906275713, + "learning_rate": 0.002139, + "loss": 4.7862, + "step": 713 + }, + { + "epoch": 0.00714, + "grad_norm": 0.9556615314074448, + "learning_rate": 0.002142, + "loss": 4.7819, + "step": 714 + }, + { + "epoch": 0.00715, + "grad_norm": 0.837203607680531, + "learning_rate": 0.0021449999999999998, + "loss": 4.7417, + "step": 715 + }, + { + "epoch": 0.00716, + "grad_norm": 0.7607986282551458, + "learning_rate": 0.002148, + "loss": 4.7359, + "step": 716 + }, + { + "epoch": 0.00717, + "grad_norm": 0.8703365352693242, + "learning_rate": 0.002151, + "loss": 4.7519, + "step": 717 + }, + { + "epoch": 0.00718, + "grad_norm": 0.8830641357048177, + "learning_rate": 0.002154, + "loss": 4.7536, + "step": 718 + }, + { + "epoch": 0.00719, + "grad_norm": 0.8090298717986324, + "learning_rate": 0.002157, + "loss": 4.7586, + "step": 719 + }, + { + "epoch": 0.0072, + "grad_norm": 0.7002439324520396, + "learning_rate": 0.00216, + "loss": 4.7466, + "step": 720 + }, + { + "epoch": 0.00721, + "grad_norm": 0.7540412799334538, + "learning_rate": 0.002163, + "loss": 4.7512, + "step": 721 + }, + { + "epoch": 0.00722, + "grad_norm": 0.7234067697970273, + "learning_rate": 0.002166, + "loss": 4.7241, + "step": 722 + }, + { + "epoch": 0.00723, + "grad_norm": 0.5796869415275953, + "learning_rate": 0.002169, + "loss": 4.73, + "step": 723 + }, + { + "epoch": 0.00724, + "grad_norm": 0.6360613090935692, + "learning_rate": 0.002172, + "loss": 4.7294, + "step": 724 + }, + { + "epoch": 0.00725, + "grad_norm": 0.6592111108932344, + "learning_rate": 0.002175, + "loss": 4.7232, + "step": 725 + }, + { + "epoch": 0.00726, + "grad_norm": 0.7000176967246123, + "learning_rate": 0.002178, + "loss": 4.7406, + "step": 726 + }, + { + "epoch": 0.00727, + "grad_norm": 0.6658154130327723, + "learning_rate": 0.0021809999999999998, + "loss": 4.7131, + "step": 727 + }, + { + "epoch": 0.00728, + "grad_norm": 0.500886702178687, + "learning_rate": 0.002184, + "loss": 4.7222, + "step": 728 + }, + { + "epoch": 0.00729, + "grad_norm": 0.553445989931654, + "learning_rate": 0.002187, + "loss": 4.7196, + "step": 729 + }, + { + "epoch": 0.0073, + "grad_norm": 0.5928953773304845, + "learning_rate": 0.00219, + "loss": 4.7153, + "step": 730 + }, + { + "epoch": 0.00731, + "grad_norm": 0.5280339609019513, + "learning_rate": 0.002193, + "loss": 4.7069, + "step": 731 + }, + { + "epoch": 0.00732, + "grad_norm": 0.4601497488067425, + "learning_rate": 0.002196, + "loss": 4.7146, + "step": 732 + }, + { + "epoch": 0.00733, + "grad_norm": 0.4831437067076967, + "learning_rate": 0.002199, + "loss": 4.6865, + "step": 733 + }, + { + "epoch": 0.00734, + "grad_norm": 0.48957731222976764, + "learning_rate": 0.002202, + "loss": 4.7176, + "step": 734 + }, + { + "epoch": 0.00735, + "grad_norm": 0.5029506248084066, + "learning_rate": 0.002205, + "loss": 4.7231, + "step": 735 + }, + { + "epoch": 0.00736, + "grad_norm": 0.5300436466729722, + "learning_rate": 0.002208, + "loss": 4.7045, + "step": 736 + }, + { + "epoch": 0.00737, + "grad_norm": 0.5354857814520255, + "learning_rate": 0.002211, + "loss": 4.6701, + "step": 737 + }, + { + "epoch": 0.00738, + "grad_norm": 0.6855959285026678, + "learning_rate": 0.002214, + "loss": 4.6857, + "step": 738 + }, + { + "epoch": 0.00739, + "grad_norm": 0.7193696222416395, + "learning_rate": 0.0022170000000000002, + "loss": 4.6773, + "step": 739 + }, + { + "epoch": 0.0074, + "grad_norm": 0.7000843676029133, + "learning_rate": 0.00222, + "loss": 4.686, + "step": 740 + }, + { + "epoch": 0.00741, + "grad_norm": 0.8262482718120322, + "learning_rate": 0.002223, + "loss": 4.6648, + "step": 741 + }, + { + "epoch": 0.00742, + "grad_norm": 0.8068053565529363, + "learning_rate": 0.002226, + "loss": 4.71, + "step": 742 + }, + { + "epoch": 0.00743, + "grad_norm": 0.7713935209386231, + "learning_rate": 0.002229, + "loss": 4.6667, + "step": 743 + }, + { + "epoch": 0.00744, + "grad_norm": 0.6806090978340125, + "learning_rate": 0.002232, + "loss": 4.668, + "step": 744 + }, + { + "epoch": 0.00745, + "grad_norm": 0.8150134425373752, + "learning_rate": 0.002235, + "loss": 4.6906, + "step": 745 + }, + { + "epoch": 0.00746, + "grad_norm": 0.9083405480081935, + "learning_rate": 0.002238, + "loss": 4.6909, + "step": 746 + }, + { + "epoch": 0.00747, + "grad_norm": 1.1312224699232258, + "learning_rate": 0.002241, + "loss": 4.6956, + "step": 747 + }, + { + "epoch": 0.00748, + "grad_norm": 0.8174601291810354, + "learning_rate": 0.002244, + "loss": 4.6973, + "step": 748 + }, + { + "epoch": 0.00749, + "grad_norm": 0.8423282795209855, + "learning_rate": 0.002247, + "loss": 4.6802, + "step": 749 + }, + { + "epoch": 0.0075, + "grad_norm": 0.8679250685858194, + "learning_rate": 0.0022500000000000003, + "loss": 4.7268, + "step": 750 + }, + { + "epoch": 0.00751, + "grad_norm": 1.0939871221004271, + "learning_rate": 0.0022530000000000002, + "loss": 4.7337, + "step": 751 + }, + { + "epoch": 0.00752, + "grad_norm": 0.9886618564303525, + "learning_rate": 0.002256, + "loss": 4.6822, + "step": 752 + }, + { + "epoch": 0.00753, + "grad_norm": 0.9258452428585717, + "learning_rate": 0.002259, + "loss": 4.7192, + "step": 753 + }, + { + "epoch": 0.00754, + "grad_norm": 0.8790583060068752, + "learning_rate": 0.002262, + "loss": 4.7345, + "step": 754 + }, + { + "epoch": 0.00755, + "grad_norm": 0.7727162685258321, + "learning_rate": 0.002265, + "loss": 4.6919, + "step": 755 + }, + { + "epoch": 0.00756, + "grad_norm": 0.8048369196552551, + "learning_rate": 0.002268, + "loss": 4.6801, + "step": 756 + }, + { + "epoch": 0.00757, + "grad_norm": 0.7307749692176225, + "learning_rate": 0.002271, + "loss": 4.6902, + "step": 757 + }, + { + "epoch": 0.00758, + "grad_norm": 0.7628918458541498, + "learning_rate": 0.002274, + "loss": 4.6882, + "step": 758 + }, + { + "epoch": 0.00759, + "grad_norm": 0.6811469027490352, + "learning_rate": 0.002277, + "loss": 4.693, + "step": 759 + }, + { + "epoch": 0.0076, + "grad_norm": 0.5530412513377371, + "learning_rate": 0.00228, + "loss": 4.6735, + "step": 760 + }, + { + "epoch": 0.00761, + "grad_norm": 0.5221449888101848, + "learning_rate": 0.002283, + "loss": 4.6405, + "step": 761 + }, + { + "epoch": 0.00762, + "grad_norm": 0.5687089394846262, + "learning_rate": 0.0022860000000000003, + "loss": 4.6208, + "step": 762 + }, + { + "epoch": 0.00763, + "grad_norm": 0.5814285508645848, + "learning_rate": 0.0022890000000000002, + "loss": 4.6718, + "step": 763 + }, + { + "epoch": 0.00764, + "grad_norm": 0.6388540284979518, + "learning_rate": 0.002292, + "loss": 4.683, + "step": 764 + }, + { + "epoch": 0.00765, + "grad_norm": 0.7278589788698283, + "learning_rate": 0.002295, + "loss": 4.6752, + "step": 765 + }, + { + "epoch": 0.00766, + "grad_norm": 0.7050667087459527, + "learning_rate": 0.002298, + "loss": 4.6715, + "step": 766 + }, + { + "epoch": 0.00767, + "grad_norm": 0.6023307971425665, + "learning_rate": 0.002301, + "loss": 4.6623, + "step": 767 + }, + { + "epoch": 0.00768, + "grad_norm": 0.7162182495110988, + "learning_rate": 0.002304, + "loss": 4.6687, + "step": 768 + }, + { + "epoch": 0.00769, + "grad_norm": 0.8754398583131885, + "learning_rate": 0.002307, + "loss": 4.6855, + "step": 769 + }, + { + "epoch": 0.0077, + "grad_norm": 0.9282950433205286, + "learning_rate": 0.00231, + "loss": 4.656, + "step": 770 + }, + { + "epoch": 0.00771, + "grad_norm": 0.8826983762517153, + "learning_rate": 0.002313, + "loss": 4.6927, + "step": 771 + }, + { + "epoch": 0.00772, + "grad_norm": 0.7955428299875815, + "learning_rate": 0.002316, + "loss": 4.6752, + "step": 772 + }, + { + "epoch": 0.00773, + "grad_norm": 0.6879017191297421, + "learning_rate": 0.0023190000000000003, + "loss": 4.6732, + "step": 773 + }, + { + "epoch": 0.00774, + "grad_norm": 0.5805904836674535, + "learning_rate": 0.0023220000000000003, + "loss": 4.6842, + "step": 774 + }, + { + "epoch": 0.00775, + "grad_norm": 0.5872309146101224, + "learning_rate": 0.0023250000000000002, + "loss": 4.6741, + "step": 775 + }, + { + "epoch": 0.00776, + "grad_norm": 0.6663800275805344, + "learning_rate": 0.002328, + "loss": 4.6762, + "step": 776 + }, + { + "epoch": 0.00777, + "grad_norm": 0.6475349218207965, + "learning_rate": 0.002331, + "loss": 4.6499, + "step": 777 + }, + { + "epoch": 0.00778, + "grad_norm": 0.6498373909650491, + "learning_rate": 0.002334, + "loss": 4.6643, + "step": 778 + }, + { + "epoch": 0.00779, + "grad_norm": 0.6357690952406082, + "learning_rate": 0.002337, + "loss": 4.6181, + "step": 779 + }, + { + "epoch": 0.0078, + "grad_norm": 0.6241849680287349, + "learning_rate": 0.00234, + "loss": 4.6419, + "step": 780 + }, + { + "epoch": 0.00781, + "grad_norm": 0.6808062655697796, + "learning_rate": 0.002343, + "loss": 4.6451, + "step": 781 + }, + { + "epoch": 0.00782, + "grad_norm": 0.7065267585011001, + "learning_rate": 0.002346, + "loss": 4.6436, + "step": 782 + }, + { + "epoch": 0.00783, + "grad_norm": 0.6381701638777938, + "learning_rate": 0.002349, + "loss": 4.6242, + "step": 783 + }, + { + "epoch": 0.00784, + "grad_norm": 0.647841330234918, + "learning_rate": 0.002352, + "loss": 4.6355, + "step": 784 + }, + { + "epoch": 0.00785, + "grad_norm": 0.60562299847623, + "learning_rate": 0.0023550000000000003, + "loss": 4.6345, + "step": 785 + }, + { + "epoch": 0.00786, + "grad_norm": 0.5148036449557751, + "learning_rate": 0.0023580000000000003, + "loss": 4.628, + "step": 786 + }, + { + "epoch": 0.00787, + "grad_norm": 0.7217903580979332, + "learning_rate": 0.0023610000000000003, + "loss": 4.641, + "step": 787 + }, + { + "epoch": 0.00788, + "grad_norm": 0.8515178253715305, + "learning_rate": 0.002364, + "loss": 4.6466, + "step": 788 + }, + { + "epoch": 0.00789, + "grad_norm": 0.8831947515557061, + "learning_rate": 0.002367, + "loss": 4.6753, + "step": 789 + }, + { + "epoch": 0.0079, + "grad_norm": 0.9816312249435458, + "learning_rate": 0.00237, + "loss": 4.6574, + "step": 790 + }, + { + "epoch": 0.00791, + "grad_norm": 0.8257428278422617, + "learning_rate": 0.002373, + "loss": 4.6081, + "step": 791 + }, + { + "epoch": 0.00792, + "grad_norm": 0.6889392893349975, + "learning_rate": 0.002376, + "loss": 4.6372, + "step": 792 + }, + { + "epoch": 0.00793, + "grad_norm": 0.5470340397913868, + "learning_rate": 0.002379, + "loss": 4.6272, + "step": 793 + }, + { + "epoch": 0.00794, + "grad_norm": 0.5333909794818302, + "learning_rate": 0.002382, + "loss": 4.6359, + "step": 794 + }, + { + "epoch": 0.00795, + "grad_norm": 0.47330116805918854, + "learning_rate": 0.002385, + "loss": 4.6105, + "step": 795 + }, + { + "epoch": 0.00796, + "grad_norm": 0.46996625387544017, + "learning_rate": 0.0023880000000000004, + "loss": 4.6049, + "step": 796 + }, + { + "epoch": 0.00797, + "grad_norm": 0.4648603328586337, + "learning_rate": 0.0023910000000000003, + "loss": 4.6461, + "step": 797 + }, + { + "epoch": 0.00798, + "grad_norm": 0.48147045136320854, + "learning_rate": 0.0023940000000000003, + "loss": 4.6125, + "step": 798 + }, + { + "epoch": 0.00799, + "grad_norm": 0.5520002928611956, + "learning_rate": 0.0023970000000000003, + "loss": 4.6461, + "step": 799 + }, + { + "epoch": 0.008, + "grad_norm": 0.49520453644625784, + "learning_rate": 0.0024000000000000002, + "loss": 4.5958, + "step": 800 + }, + { + "epoch": 0.00801, + "grad_norm": 0.4617883447215667, + "learning_rate": 0.002403, + "loss": 4.6244, + "step": 801 + }, + { + "epoch": 0.00802, + "grad_norm": 0.6171622349156032, + "learning_rate": 0.002406, + "loss": 4.6206, + "step": 802 + }, + { + "epoch": 0.00803, + "grad_norm": 0.7819651777797723, + "learning_rate": 0.002409, + "loss": 4.605, + "step": 803 + }, + { + "epoch": 0.00804, + "grad_norm": 0.8072878053268496, + "learning_rate": 0.002412, + "loss": 4.6195, + "step": 804 + }, + { + "epoch": 0.00805, + "grad_norm": 0.6878812715646375, + "learning_rate": 0.002415, + "loss": 4.6081, + "step": 805 + }, + { + "epoch": 0.00806, + "grad_norm": 0.7628005366591507, + "learning_rate": 0.002418, + "loss": 4.6308, + "step": 806 + }, + { + "epoch": 0.00807, + "grad_norm": 0.7958527408861041, + "learning_rate": 0.0024210000000000004, + "loss": 4.6228, + "step": 807 + }, + { + "epoch": 0.00808, + "grad_norm": 0.7899389450719584, + "learning_rate": 0.0024240000000000004, + "loss": 4.6083, + "step": 808 + }, + { + "epoch": 0.00809, + "grad_norm": 1.193539574978161, + "learning_rate": 0.0024270000000000003, + "loss": 4.6201, + "step": 809 + }, + { + "epoch": 0.0081, + "grad_norm": 1.0067442084818319, + "learning_rate": 0.0024300000000000003, + "loss": 4.6554, + "step": 810 + }, + { + "epoch": 0.00811, + "grad_norm": 1.0302946243785736, + "learning_rate": 0.0024330000000000003, + "loss": 4.6338, + "step": 811 + }, + { + "epoch": 0.00812, + "grad_norm": 0.7966936317025509, + "learning_rate": 0.0024360000000000002, + "loss": 4.6044, + "step": 812 + }, + { + "epoch": 0.00813, + "grad_norm": 0.7843512654561826, + "learning_rate": 0.0024389999999999998, + "loss": 4.6449, + "step": 813 + }, + { + "epoch": 0.00814, + "grad_norm": 0.7981724618057067, + "learning_rate": 0.0024419999999999997, + "loss": 4.6482, + "step": 814 + }, + { + "epoch": 0.00815, + "grad_norm": 0.8382113380987876, + "learning_rate": 0.0024449999999999997, + "loss": 4.6588, + "step": 815 + }, + { + "epoch": 0.00816, + "grad_norm": 0.8204581966398267, + "learning_rate": 0.002448, + "loss": 4.6311, + "step": 816 + }, + { + "epoch": 0.00817, + "grad_norm": 1.02336938073518, + "learning_rate": 0.002451, + "loss": 4.6583, + "step": 817 + }, + { + "epoch": 0.00818, + "grad_norm": 0.815478540285485, + "learning_rate": 0.002454, + "loss": 4.6309, + "step": 818 + }, + { + "epoch": 0.00819, + "grad_norm": 0.9108711148393207, + "learning_rate": 0.002457, + "loss": 4.6483, + "step": 819 + }, + { + "epoch": 0.0082, + "grad_norm": 0.8773364113378127, + "learning_rate": 0.00246, + "loss": 4.6373, + "step": 820 + }, + { + "epoch": 0.00821, + "grad_norm": 0.8093045902813614, + "learning_rate": 0.002463, + "loss": 4.6126, + "step": 821 + }, + { + "epoch": 0.00822, + "grad_norm": 0.8775561270633004, + "learning_rate": 0.002466, + "loss": 4.6164, + "step": 822 + }, + { + "epoch": 0.00823, + "grad_norm": 0.8916321669966187, + "learning_rate": 0.002469, + "loss": 4.6298, + "step": 823 + }, + { + "epoch": 0.00824, + "grad_norm": 0.7939051533904264, + "learning_rate": 0.002472, + "loss": 4.6511, + "step": 824 + }, + { + "epoch": 0.00825, + "grad_norm": 0.9509206049767348, + "learning_rate": 0.0024749999999999998, + "loss": 4.6625, + "step": 825 + }, + { + "epoch": 0.00826, + "grad_norm": 1.0914612729506281, + "learning_rate": 0.0024779999999999997, + "loss": 4.6356, + "step": 826 + }, + { + "epoch": 0.00827, + "grad_norm": 1.0440371247088225, + "learning_rate": 0.002481, + "loss": 4.6709, + "step": 827 + }, + { + "epoch": 0.00828, + "grad_norm": 1.0292672719671891, + "learning_rate": 0.002484, + "loss": 4.6644, + "step": 828 + }, + { + "epoch": 0.00829, + "grad_norm": 1.002549149572049, + "learning_rate": 0.002487, + "loss": 4.6608, + "step": 829 + }, + { + "epoch": 0.0083, + "grad_norm": 0.8229445260626227, + "learning_rate": 0.00249, + "loss": 4.6642, + "step": 830 + }, + { + "epoch": 0.00831, + "grad_norm": 0.7223475739297199, + "learning_rate": 0.002493, + "loss": 4.6379, + "step": 831 + }, + { + "epoch": 0.00832, + "grad_norm": 0.6422365780456449, + "learning_rate": 0.002496, + "loss": 4.6349, + "step": 832 + }, + { + "epoch": 0.00833, + "grad_norm": 0.6433276699815419, + "learning_rate": 0.002499, + "loss": 4.626, + "step": 833 + }, + { + "epoch": 0.00834, + "grad_norm": 0.6136587911860008, + "learning_rate": 0.002502, + "loss": 4.6284, + "step": 834 + }, + { + "epoch": 0.00835, + "grad_norm": 0.5892258230535582, + "learning_rate": 0.002505, + "loss": 4.617, + "step": 835 + }, + { + "epoch": 0.00836, + "grad_norm": 0.5407454758774727, + "learning_rate": 0.002508, + "loss": 4.6416, + "step": 836 + }, + { + "epoch": 0.00837, + "grad_norm": 0.5840603338652609, + "learning_rate": 0.0025109999999999998, + "loss": 4.582, + "step": 837 + }, + { + "epoch": 0.00838, + "grad_norm": 0.5192725472759927, + "learning_rate": 0.0025139999999999997, + "loss": 4.6102, + "step": 838 + }, + { + "epoch": 0.00839, + "grad_norm": 0.5064380785759203, + "learning_rate": 0.002517, + "loss": 4.6034, + "step": 839 + }, + { + "epoch": 0.0084, + "grad_norm": 0.48476724687493267, + "learning_rate": 0.00252, + "loss": 4.5854, + "step": 840 + }, + { + "epoch": 0.00841, + "grad_norm": 0.44496684540968734, + "learning_rate": 0.002523, + "loss": 4.5786, + "step": 841 + }, + { + "epoch": 0.00842, + "grad_norm": 0.42609007840223895, + "learning_rate": 0.002526, + "loss": 4.5771, + "step": 842 + }, + { + "epoch": 0.00843, + "grad_norm": 0.4703393687667864, + "learning_rate": 0.002529, + "loss": 4.5751, + "step": 843 + }, + { + "epoch": 0.00844, + "grad_norm": 0.4976619641323943, + "learning_rate": 0.002532, + "loss": 4.5956, + "step": 844 + }, + { + "epoch": 0.00845, + "grad_norm": 0.530881004052944, + "learning_rate": 0.002535, + "loss": 4.5625, + "step": 845 + }, + { + "epoch": 0.00846, + "grad_norm": 0.5069253535552343, + "learning_rate": 0.002538, + "loss": 4.584, + "step": 846 + }, + { + "epoch": 0.00847, + "grad_norm": 0.4900054637856495, + "learning_rate": 0.002541, + "loss": 4.5637, + "step": 847 + }, + { + "epoch": 0.00848, + "grad_norm": 0.5361511355183629, + "learning_rate": 0.002544, + "loss": 4.5693, + "step": 848 + }, + { + "epoch": 0.00849, + "grad_norm": 0.6067359238432654, + "learning_rate": 0.002547, + "loss": 4.5644, + "step": 849 + }, + { + "epoch": 0.0085, + "grad_norm": 0.5519192846207763, + "learning_rate": 0.00255, + "loss": 4.573, + "step": 850 + }, + { + "epoch": 0.00851, + "grad_norm": 0.46694877598438245, + "learning_rate": 0.002553, + "loss": 4.5875, + "step": 851 + }, + { + "epoch": 0.00852, + "grad_norm": 0.477565098915178, + "learning_rate": 0.002556, + "loss": 4.5765, + "step": 852 + }, + { + "epoch": 0.00853, + "grad_norm": 0.5020213435824815, + "learning_rate": 0.002559, + "loss": 4.5556, + "step": 853 + }, + { + "epoch": 0.00854, + "grad_norm": 0.5171409161048013, + "learning_rate": 0.002562, + "loss": 4.5495, + "step": 854 + }, + { + "epoch": 0.00855, + "grad_norm": 0.46627459343076927, + "learning_rate": 0.002565, + "loss": 4.5252, + "step": 855 + }, + { + "epoch": 0.00856, + "grad_norm": 0.5139521756940325, + "learning_rate": 0.002568, + "loss": 4.5623, + "step": 856 + }, + { + "epoch": 0.00857, + "grad_norm": 0.6011403998041547, + "learning_rate": 0.002571, + "loss": 4.5577, + "step": 857 + }, + { + "epoch": 0.00858, + "grad_norm": 0.554768384377006, + "learning_rate": 0.002574, + "loss": 4.5487, + "step": 858 + }, + { + "epoch": 0.00859, + "grad_norm": 0.539858542755145, + "learning_rate": 0.002577, + "loss": 4.5383, + "step": 859 + }, + { + "epoch": 0.0086, + "grad_norm": 0.586599307397693, + "learning_rate": 0.00258, + "loss": 4.571, + "step": 860 + }, + { + "epoch": 0.00861, + "grad_norm": 0.7154666400015554, + "learning_rate": 0.0025830000000000002, + "loss": 4.5928, + "step": 861 + }, + { + "epoch": 0.00862, + "grad_norm": 0.7345971792604707, + "learning_rate": 0.002586, + "loss": 4.5402, + "step": 862 + }, + { + "epoch": 0.00863, + "grad_norm": 0.7491760821516434, + "learning_rate": 0.002589, + "loss": 4.5765, + "step": 863 + }, + { + "epoch": 0.00864, + "grad_norm": 0.9255705361922033, + "learning_rate": 0.002592, + "loss": 4.5322, + "step": 864 + }, + { + "epoch": 0.00865, + "grad_norm": 0.9964986146275199, + "learning_rate": 0.002595, + "loss": 4.5664, + "step": 865 + }, + { + "epoch": 0.00866, + "grad_norm": 0.7618488122087141, + "learning_rate": 0.002598, + "loss": 4.5836, + "step": 866 + }, + { + "epoch": 0.00867, + "grad_norm": 0.8524780083566116, + "learning_rate": 0.002601, + "loss": 4.5742, + "step": 867 + }, + { + "epoch": 0.00868, + "grad_norm": 0.9692981312410378, + "learning_rate": 0.002604, + "loss": 4.5808, + "step": 868 + }, + { + "epoch": 0.00869, + "grad_norm": 1.1822252043975705, + "learning_rate": 0.002607, + "loss": 4.6073, + "step": 869 + }, + { + "epoch": 0.0087, + "grad_norm": 0.9057663759386707, + "learning_rate": 0.00261, + "loss": 4.5844, + "step": 870 + }, + { + "epoch": 0.00871, + "grad_norm": 0.9457338978675252, + "learning_rate": 0.002613, + "loss": 4.6115, + "step": 871 + }, + { + "epoch": 0.00872, + "grad_norm": 0.9845348105394848, + "learning_rate": 0.002616, + "loss": 4.5975, + "step": 872 + }, + { + "epoch": 0.00873, + "grad_norm": 0.8202179076205192, + "learning_rate": 0.0026190000000000002, + "loss": 4.5967, + "step": 873 + }, + { + "epoch": 0.00874, + "grad_norm": 0.6587988147688274, + "learning_rate": 0.002622, + "loss": 4.6142, + "step": 874 + }, + { + "epoch": 0.00875, + "grad_norm": 0.6312495265838277, + "learning_rate": 0.002625, + "loss": 4.5549, + "step": 875 + }, + { + "epoch": 0.00876, + "grad_norm": 0.6646817876274769, + "learning_rate": 0.002628, + "loss": 4.5838, + "step": 876 + }, + { + "epoch": 0.00877, + "grad_norm": 0.632354886157607, + "learning_rate": 0.002631, + "loss": 4.6011, + "step": 877 + }, + { + "epoch": 0.00878, + "grad_norm": 0.5630676163174572, + "learning_rate": 0.002634, + "loss": 4.5288, + "step": 878 + }, + { + "epoch": 0.00879, + "grad_norm": 0.4918531988998375, + "learning_rate": 0.002637, + "loss": 4.5559, + "step": 879 + }, + { + "epoch": 0.0088, + "grad_norm": 0.42476181684324305, + "learning_rate": 0.00264, + "loss": 4.5634, + "step": 880 + }, + { + "epoch": 0.00881, + "grad_norm": 0.4573573466722849, + "learning_rate": 0.002643, + "loss": 4.5644, + "step": 881 + }, + { + "epoch": 0.00882, + "grad_norm": 0.5881448674370812, + "learning_rate": 0.002646, + "loss": 4.5659, + "step": 882 + }, + { + "epoch": 0.00883, + "grad_norm": 0.7764456560266775, + "learning_rate": 0.002649, + "loss": 4.5645, + "step": 883 + }, + { + "epoch": 0.00884, + "grad_norm": 0.9651176541039754, + "learning_rate": 0.0026520000000000003, + "loss": 4.5629, + "step": 884 + }, + { + "epoch": 0.00885, + "grad_norm": 1.0453630458113787, + "learning_rate": 0.0026550000000000002, + "loss": 4.5675, + "step": 885 + }, + { + "epoch": 0.00886, + "grad_norm": 0.8249295551343052, + "learning_rate": 0.002658, + "loss": 4.5811, + "step": 886 + }, + { + "epoch": 0.00887, + "grad_norm": 0.6632170677276661, + "learning_rate": 0.002661, + "loss": 4.5643, + "step": 887 + }, + { + "epoch": 0.00888, + "grad_norm": 0.7818922566742896, + "learning_rate": 0.002664, + "loss": 4.5709, + "step": 888 + }, + { + "epoch": 0.00889, + "grad_norm": 0.8131057291041344, + "learning_rate": 0.002667, + "loss": 4.5276, + "step": 889 + }, + { + "epoch": 0.0089, + "grad_norm": 0.7364786352062309, + "learning_rate": 0.00267, + "loss": 4.5735, + "step": 890 + }, + { + "epoch": 0.00891, + "grad_norm": 0.6174568923006037, + "learning_rate": 0.002673, + "loss": 4.5538, + "step": 891 + }, + { + "epoch": 0.00892, + "grad_norm": 0.6060396654742667, + "learning_rate": 0.002676, + "loss": 4.5678, + "step": 892 + }, + { + "epoch": 0.00893, + "grad_norm": 0.6503337239639668, + "learning_rate": 0.002679, + "loss": 4.5453, + "step": 893 + }, + { + "epoch": 0.00894, + "grad_norm": 0.6599395002026207, + "learning_rate": 0.002682, + "loss": 4.5291, + "step": 894 + }, + { + "epoch": 0.00895, + "grad_norm": 0.5989877186645693, + "learning_rate": 0.0026850000000000003, + "loss": 4.5412, + "step": 895 + }, + { + "epoch": 0.00896, + "grad_norm": 0.5286031214975206, + "learning_rate": 0.0026880000000000003, + "loss": 4.5273, + "step": 896 + }, + { + "epoch": 0.00897, + "grad_norm": 0.6246596167729576, + "learning_rate": 0.0026910000000000002, + "loss": 4.5504, + "step": 897 + }, + { + "epoch": 0.00898, + "grad_norm": 0.6886920087577523, + "learning_rate": 0.002694, + "loss": 4.5437, + "step": 898 + }, + { + "epoch": 0.00899, + "grad_norm": 0.7603324493631337, + "learning_rate": 0.002697, + "loss": 4.5543, + "step": 899 + }, + { + "epoch": 0.009, + "grad_norm": 0.7773743953648492, + "learning_rate": 0.0027, + "loss": 4.5794, + "step": 900 + }, + { + "epoch": 0.00901, + "grad_norm": 0.683256197441996, + "learning_rate": 0.002703, + "loss": 4.5307, + "step": 901 + }, + { + "epoch": 0.00902, + "grad_norm": 0.5681357763332335, + "learning_rate": 0.002706, + "loss": 4.5356, + "step": 902 + }, + { + "epoch": 0.00903, + "grad_norm": 0.5420591540444755, + "learning_rate": 0.002709, + "loss": 4.5338, + "step": 903 + }, + { + "epoch": 0.00904, + "grad_norm": 0.5224631659490503, + "learning_rate": 0.002712, + "loss": 4.5093, + "step": 904 + }, + { + "epoch": 0.00905, + "grad_norm": 0.5026034590467293, + "learning_rate": 0.002715, + "loss": 4.5252, + "step": 905 + }, + { + "epoch": 0.00906, + "grad_norm": 0.5177890071237494, + "learning_rate": 0.002718, + "loss": 4.5378, + "step": 906 + }, + { + "epoch": 0.00907, + "grad_norm": 0.5764689015080159, + "learning_rate": 0.0027210000000000003, + "loss": 4.5536, + "step": 907 + }, + { + "epoch": 0.00908, + "grad_norm": 0.6259624722487185, + "learning_rate": 0.0027240000000000003, + "loss": 4.5265, + "step": 908 + }, + { + "epoch": 0.00909, + "grad_norm": 0.670091172363038, + "learning_rate": 0.0027270000000000003, + "loss": 4.5481, + "step": 909 + }, + { + "epoch": 0.0091, + "grad_norm": 0.7211417475777565, + "learning_rate": 0.0027300000000000002, + "loss": 4.555, + "step": 910 + }, + { + "epoch": 0.00911, + "grad_norm": 0.6734826041799787, + "learning_rate": 0.002733, + "loss": 4.5118, + "step": 911 + }, + { + "epoch": 0.00912, + "grad_norm": 0.5721394003951694, + "learning_rate": 0.002736, + "loss": 4.5134, + "step": 912 + }, + { + "epoch": 0.00913, + "grad_norm": 0.5576363751689392, + "learning_rate": 0.002739, + "loss": 4.5062, + "step": 913 + }, + { + "epoch": 0.00914, + "grad_norm": 0.659556953854551, + "learning_rate": 0.002742, + "loss": 4.5426, + "step": 914 + }, + { + "epoch": 0.00915, + "grad_norm": 0.7056386423863461, + "learning_rate": 0.002745, + "loss": 4.5041, + "step": 915 + }, + { + "epoch": 0.00916, + "grad_norm": 0.6615396800381155, + "learning_rate": 0.002748, + "loss": 4.4829, + "step": 916 + }, + { + "epoch": 0.00917, + "grad_norm": 0.6203895946828626, + "learning_rate": 0.002751, + "loss": 4.4952, + "step": 917 + }, + { + "epoch": 0.00918, + "grad_norm": 0.6455523293432982, + "learning_rate": 0.0027540000000000004, + "loss": 4.5159, + "step": 918 + }, + { + "epoch": 0.00919, + "grad_norm": 0.6153975854015812, + "learning_rate": 0.0027570000000000003, + "loss": 4.4956, + "step": 919 + }, + { + "epoch": 0.0092, + "grad_norm": 0.6747074236078813, + "learning_rate": 0.0027600000000000003, + "loss": 4.516, + "step": 920 + }, + { + "epoch": 0.00921, + "grad_norm": 0.7525938258933302, + "learning_rate": 0.0027630000000000003, + "loss": 4.5035, + "step": 921 + }, + { + "epoch": 0.00922, + "grad_norm": 0.6613679668687953, + "learning_rate": 0.0027660000000000002, + "loss": 4.5161, + "step": 922 + }, + { + "epoch": 0.00923, + "grad_norm": 0.657751627761755, + "learning_rate": 0.002769, + "loss": 4.4882, + "step": 923 + }, + { + "epoch": 0.00924, + "grad_norm": 0.6747361910557791, + "learning_rate": 0.002772, + "loss": 4.515, + "step": 924 + }, + { + "epoch": 0.00925, + "grad_norm": 0.6239646474738244, + "learning_rate": 0.002775, + "loss": 4.4929, + "step": 925 + }, + { + "epoch": 0.00926, + "grad_norm": 0.599067117804374, + "learning_rate": 0.002778, + "loss": 4.5097, + "step": 926 + }, + { + "epoch": 0.00927, + "grad_norm": 0.5594951339370652, + "learning_rate": 0.002781, + "loss": 4.4719, + "step": 927 + }, + { + "epoch": 0.00928, + "grad_norm": 0.6063845847981192, + "learning_rate": 0.002784, + "loss": 4.518, + "step": 928 + }, + { + "epoch": 0.00929, + "grad_norm": 0.6363503981385901, + "learning_rate": 0.0027870000000000004, + "loss": 4.5031, + "step": 929 + }, + { + "epoch": 0.0093, + "grad_norm": 0.7396258221206569, + "learning_rate": 0.0027900000000000004, + "loss": 4.4944, + "step": 930 + }, + { + "epoch": 0.00931, + "grad_norm": 0.8942550404249334, + "learning_rate": 0.0027930000000000003, + "loss": 4.517, + "step": 931 + }, + { + "epoch": 0.00932, + "grad_norm": 1.0354660733966428, + "learning_rate": 0.0027960000000000003, + "loss": 4.5402, + "step": 932 + }, + { + "epoch": 0.00933, + "grad_norm": 1.180367237398422, + "learning_rate": 0.0027990000000000003, + "loss": 4.525, + "step": 933 + }, + { + "epoch": 0.00934, + "grad_norm": 0.9502642927196222, + "learning_rate": 0.0028020000000000002, + "loss": 4.5541, + "step": 934 + }, + { + "epoch": 0.00935, + "grad_norm": 0.8858808588378486, + "learning_rate": 0.002805, + "loss": 4.5601, + "step": 935 + }, + { + "epoch": 0.00936, + "grad_norm": 0.9244572615029755, + "learning_rate": 0.002808, + "loss": 4.569, + "step": 936 + }, + { + "epoch": 0.00937, + "grad_norm": 0.9225067704838915, + "learning_rate": 0.002811, + "loss": 4.5806, + "step": 937 + }, + { + "epoch": 0.00938, + "grad_norm": 0.8534895885659719, + "learning_rate": 0.002814, + "loss": 4.5604, + "step": 938 + }, + { + "epoch": 0.00939, + "grad_norm": 0.9046137087836131, + "learning_rate": 0.002817, + "loss": 4.5554, + "step": 939 + }, + { + "epoch": 0.0094, + "grad_norm": 0.7476401969651744, + "learning_rate": 0.00282, + "loss": 4.5482, + "step": 940 + }, + { + "epoch": 0.00941, + "grad_norm": 0.7066455268972154, + "learning_rate": 0.002823, + "loss": 4.5225, + "step": 941 + }, + { + "epoch": 0.00942, + "grad_norm": 0.6459119835251312, + "learning_rate": 0.002826, + "loss": 4.544, + "step": 942 + }, + { + "epoch": 0.00943, + "grad_norm": 0.6055625869260791, + "learning_rate": 0.002829, + "loss": 4.5363, + "step": 943 + }, + { + "epoch": 0.00944, + "grad_norm": 0.5293280202161804, + "learning_rate": 0.002832, + "loss": 4.5307, + "step": 944 + }, + { + "epoch": 0.00945, + "grad_norm": 0.5438831110130244, + "learning_rate": 0.002835, + "loss": 4.5334, + "step": 945 + }, + { + "epoch": 0.00946, + "grad_norm": 0.4766420743521973, + "learning_rate": 0.002838, + "loss": 4.5433, + "step": 946 + }, + { + "epoch": 0.00947, + "grad_norm": 0.40195567884756706, + "learning_rate": 0.0028409999999999998, + "loss": 4.5081, + "step": 947 + }, + { + "epoch": 0.00948, + "grad_norm": 0.3783844921089427, + "learning_rate": 0.0028439999999999997, + "loss": 4.4865, + "step": 948 + }, + { + "epoch": 0.00949, + "grad_norm": 0.4197576569837563, + "learning_rate": 0.002847, + "loss": 4.5306, + "step": 949 + }, + { + "epoch": 0.0095, + "grad_norm": 0.45947372263331304, + "learning_rate": 0.00285, + "loss": 4.5193, + "step": 950 + }, + { + "epoch": 0.00951, + "grad_norm": 0.5187245758366383, + "learning_rate": 0.002853, + "loss": 4.4969, + "step": 951 + }, + { + "epoch": 0.00952, + "grad_norm": 0.5515692080168162, + "learning_rate": 0.002856, + "loss": 4.5218, + "step": 952 + }, + { + "epoch": 0.00953, + "grad_norm": 0.501582875002041, + "learning_rate": 0.002859, + "loss": 4.4606, + "step": 953 + }, + { + "epoch": 0.00954, + "grad_norm": 0.5014106294436917, + "learning_rate": 0.002862, + "loss": 4.5197, + "step": 954 + }, + { + "epoch": 0.00955, + "grad_norm": 0.6047606934565909, + "learning_rate": 0.002865, + "loss": 4.5086, + "step": 955 + }, + { + "epoch": 0.00956, + "grad_norm": 0.6661868633369662, + "learning_rate": 0.002868, + "loss": 4.4921, + "step": 956 + }, + { + "epoch": 0.00957, + "grad_norm": 0.6511713371124522, + "learning_rate": 0.002871, + "loss": 4.514, + "step": 957 + }, + { + "epoch": 0.00958, + "grad_norm": 0.5733443203887492, + "learning_rate": 0.002874, + "loss": 4.4931, + "step": 958 + }, + { + "epoch": 0.00959, + "grad_norm": 0.6024952806359369, + "learning_rate": 0.002877, + "loss": 4.4895, + "step": 959 + }, + { + "epoch": 0.0096, + "grad_norm": 0.6029559818977924, + "learning_rate": 0.0028799999999999997, + "loss": 4.4868, + "step": 960 + }, + { + "epoch": 0.00961, + "grad_norm": 0.5721073369283843, + "learning_rate": 0.002883, + "loss": 4.4604, + "step": 961 + }, + { + "epoch": 0.00962, + "grad_norm": 0.5737900491823522, + "learning_rate": 0.002886, + "loss": 4.4898, + "step": 962 + }, + { + "epoch": 0.00963, + "grad_norm": 0.5323481251626608, + "learning_rate": 0.002889, + "loss": 4.4867, + "step": 963 + }, + { + "epoch": 0.00964, + "grad_norm": 0.5436801325002781, + "learning_rate": 0.002892, + "loss": 4.4807, + "step": 964 + }, + { + "epoch": 0.00965, + "grad_norm": 0.60229729083351, + "learning_rate": 0.002895, + "loss": 4.4568, + "step": 965 + }, + { + "epoch": 0.00966, + "grad_norm": 0.6629818387101306, + "learning_rate": 0.002898, + "loss": 4.4766, + "step": 966 + }, + { + "epoch": 0.00967, + "grad_norm": 0.6748155978904155, + "learning_rate": 0.002901, + "loss": 4.5156, + "step": 967 + }, + { + "epoch": 0.00968, + "grad_norm": 0.7427494012599226, + "learning_rate": 0.002904, + "loss": 4.4866, + "step": 968 + }, + { + "epoch": 0.00969, + "grad_norm": 0.8794961931178971, + "learning_rate": 0.002907, + "loss": 4.5273, + "step": 969 + }, + { + "epoch": 0.0097, + "grad_norm": 0.8586008854691127, + "learning_rate": 0.00291, + "loss": 4.4896, + "step": 970 + }, + { + "epoch": 0.00971, + "grad_norm": 0.8273563438543869, + "learning_rate": 0.002913, + "loss": 4.4955, + "step": 971 + }, + { + "epoch": 0.00972, + "grad_norm": 0.7536097688784559, + "learning_rate": 0.002916, + "loss": 4.5029, + "step": 972 + }, + { + "epoch": 0.00973, + "grad_norm": 0.7541845251322323, + "learning_rate": 0.002919, + "loss": 4.4985, + "step": 973 + }, + { + "epoch": 0.00974, + "grad_norm": 0.6473607436694337, + "learning_rate": 0.002922, + "loss": 4.4796, + "step": 974 + }, + { + "epoch": 0.00975, + "grad_norm": 0.7361402706574074, + "learning_rate": 0.002925, + "loss": 4.5044, + "step": 975 + }, + { + "epoch": 0.00976, + "grad_norm": 0.868936228763895, + "learning_rate": 0.002928, + "loss": 4.508, + "step": 976 + }, + { + "epoch": 0.00977, + "grad_norm": 0.8813019375073942, + "learning_rate": 0.002931, + "loss": 4.5408, + "step": 977 + }, + { + "epoch": 0.00978, + "grad_norm": 0.9426880168937273, + "learning_rate": 0.002934, + "loss": 4.5239, + "step": 978 + }, + { + "epoch": 0.00979, + "grad_norm": 0.8776049562434768, + "learning_rate": 0.002937, + "loss": 4.5177, + "step": 979 + }, + { + "epoch": 0.0098, + "grad_norm": 0.7621714209410982, + "learning_rate": 0.00294, + "loss": 4.5005, + "step": 980 + }, + { + "epoch": 0.00981, + "grad_norm": 0.7607321859563556, + "learning_rate": 0.002943, + "loss": 4.5183, + "step": 981 + }, + { + "epoch": 0.00982, + "grad_norm": 0.8148690145722087, + "learning_rate": 0.002946, + "loss": 4.5101, + "step": 982 + }, + { + "epoch": 0.00983, + "grad_norm": 0.8602879224908239, + "learning_rate": 0.0029490000000000002, + "loss": 4.5186, + "step": 983 + }, + { + "epoch": 0.00984, + "grad_norm": 0.9348586711938943, + "learning_rate": 0.002952, + "loss": 4.5105, + "step": 984 + }, + { + "epoch": 0.00985, + "grad_norm": 0.9010607870226212, + "learning_rate": 0.002955, + "loss": 4.5341, + "step": 985 + }, + { + "epoch": 0.00986, + "grad_norm": 0.8225410280635316, + "learning_rate": 0.002958, + "loss": 4.497, + "step": 986 + }, + { + "epoch": 0.00987, + "grad_norm": 0.820560994458863, + "learning_rate": 0.002961, + "loss": 4.5111, + "step": 987 + }, + { + "epoch": 0.00988, + "grad_norm": 0.7430257271274537, + "learning_rate": 0.002964, + "loss": 4.5437, + "step": 988 + }, + { + "epoch": 0.00989, + "grad_norm": 0.7193873820034543, + "learning_rate": 0.002967, + "loss": 4.4838, + "step": 989 + }, + { + "epoch": 0.0099, + "grad_norm": 0.7329220852773792, + "learning_rate": 0.00297, + "loss": 4.5219, + "step": 990 + }, + { + "epoch": 0.00991, + "grad_norm": 0.7694030006138932, + "learning_rate": 0.002973, + "loss": 4.5213, + "step": 991 + }, + { + "epoch": 0.00992, + "grad_norm": 0.7726534251991994, + "learning_rate": 0.002976, + "loss": 4.5153, + "step": 992 + }, + { + "epoch": 0.00993, + "grad_norm": 0.6817474065224322, + "learning_rate": 0.002979, + "loss": 4.511, + "step": 993 + }, + { + "epoch": 0.00994, + "grad_norm": 0.6628189744120299, + "learning_rate": 0.002982, + "loss": 4.5078, + "step": 994 + }, + { + "epoch": 0.00995, + "grad_norm": 0.6249124245549155, + "learning_rate": 0.0029850000000000002, + "loss": 4.5069, + "step": 995 + }, + { + "epoch": 0.00996, + "grad_norm": 0.54278083452404, + "learning_rate": 0.002988, + "loss": 4.503, + "step": 996 + }, + { + "epoch": 0.00997, + "grad_norm": 0.5131542547273349, + "learning_rate": 0.002991, + "loss": 4.49, + "step": 997 + }, + { + "epoch": 0.00998, + "grad_norm": 0.4760166868407609, + "learning_rate": 0.002994, + "loss": 4.4895, + "step": 998 + }, + { + "epoch": 0.00999, + "grad_norm": 0.4855483581267517, + "learning_rate": 0.002997, + "loss": 4.4707, + "step": 999 + }, + { + "epoch": 0.01, + "grad_norm": 0.44929338024832627, + "learning_rate": 0.003, + "loss": 4.4897, + "step": 1000 + } + ], + "logging_steps": 1, + "max_steps": 100000, + "num_input_tokens_seen": 0, + "num_train_epochs": 1, + "save_steps": 1000, + "stateful_callbacks": { + "TrainerControl": { + "args": { + "should_epoch_stop": false, + "should_evaluate": false, + "should_log": false, + "should_save": true, + "should_training_stop": false + }, + "attributes": {} + } + }, + "total_flos": 3.9643642855424e+16, + "train_batch_size": 1024, + "trial_name": null, + "trial_params": null +}