|
{ |
|
"best_metric": 0.6673134565353394, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-100", |
|
"epoch": 0.333889816360601, |
|
"eval_steps": 50, |
|
"global_step": 100, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.00333889816360601, |
|
"grad_norm": 0.6808415651321411, |
|
"learning_rate": 1.6666666666666668e-07, |
|
"loss": 0.6329, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.00333889816360601, |
|
"eval_loss": 1.3715972900390625, |
|
"eval_runtime": 14.6478, |
|
"eval_samples_per_second": 34.476, |
|
"eval_steps_per_second": 4.369, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.00667779632721202, |
|
"grad_norm": 0.9426732659339905, |
|
"learning_rate": 3.3333333333333335e-07, |
|
"loss": 0.8749, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.01001669449081803, |
|
"grad_norm": 1.0648221969604492, |
|
"learning_rate": 5.000000000000001e-07, |
|
"loss": 0.8861, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.01335559265442404, |
|
"grad_norm": 1.0404049158096313, |
|
"learning_rate": 6.666666666666667e-07, |
|
"loss": 0.9215, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.01669449081803005, |
|
"grad_norm": 1.0711796283721924, |
|
"learning_rate": 8.333333333333333e-07, |
|
"loss": 0.9232, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.02003338898163606, |
|
"grad_norm": 1.204472541809082, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 0.9701, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.02337228714524207, |
|
"grad_norm": 1.364141583442688, |
|
"learning_rate": 1.1666666666666668e-06, |
|
"loss": 1.1403, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.02671118530884808, |
|
"grad_norm": 1.2284804582595825, |
|
"learning_rate": 1.3333333333333334e-06, |
|
"loss": 0.9842, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.03005008347245409, |
|
"grad_norm": 1.3882466554641724, |
|
"learning_rate": 1.5e-06, |
|
"loss": 1.0306, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.0333889816360601, |
|
"grad_norm": 1.3619149923324585, |
|
"learning_rate": 1.6666666666666667e-06, |
|
"loss": 1.1067, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.03672787979966611, |
|
"grad_norm": 1.3187345266342163, |
|
"learning_rate": 1.8333333333333333e-06, |
|
"loss": 1.0109, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.04006677796327212, |
|
"grad_norm": 1.3394725322723389, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 1.0854, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.04340567612687813, |
|
"grad_norm": 1.3412361145019531, |
|
"learning_rate": 2.166666666666667e-06, |
|
"loss": 1.0576, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.04674457429048414, |
|
"grad_norm": 1.3448151350021362, |
|
"learning_rate": 2.3333333333333336e-06, |
|
"loss": 1.0436, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.05008347245409015, |
|
"grad_norm": 1.5222915410995483, |
|
"learning_rate": 2.5e-06, |
|
"loss": 1.234, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.05342237061769616, |
|
"grad_norm": 1.5176973342895508, |
|
"learning_rate": 2.666666666666667e-06, |
|
"loss": 1.0968, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.05676126878130217, |
|
"grad_norm": 1.4612019062042236, |
|
"learning_rate": 2.8333333333333335e-06, |
|
"loss": 1.1438, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.06010016694490818, |
|
"grad_norm": 1.4692972898483276, |
|
"learning_rate": 3e-06, |
|
"loss": 1.0425, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.06343906510851419, |
|
"grad_norm": 1.5923115015029907, |
|
"learning_rate": 3.1666666666666667e-06, |
|
"loss": 1.0416, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.0667779632721202, |
|
"grad_norm": 1.6494970321655273, |
|
"learning_rate": 3.3333333333333333e-06, |
|
"loss": 1.1061, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.07011686143572621, |
|
"grad_norm": 1.588154673576355, |
|
"learning_rate": 3.5e-06, |
|
"loss": 0.9903, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.07345575959933222, |
|
"grad_norm": 1.5404223203659058, |
|
"learning_rate": 3.6666666666666666e-06, |
|
"loss": 1.093, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.07679465776293823, |
|
"grad_norm": 1.5791702270507812, |
|
"learning_rate": 3.833333333333334e-06, |
|
"loss": 1.053, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.08013355592654424, |
|
"grad_norm": 1.8075627088546753, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 1.0991, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.08347245409015025, |
|
"grad_norm": 1.8390108346939087, |
|
"learning_rate": 4.166666666666667e-06, |
|
"loss": 0.9902, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.08681135225375626, |
|
"grad_norm": 1.7681668996810913, |
|
"learning_rate": 4.333333333333334e-06, |
|
"loss": 1.0757, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.09015025041736227, |
|
"grad_norm": 1.6858803033828735, |
|
"learning_rate": 4.5e-06, |
|
"loss": 0.9493, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.09348914858096828, |
|
"grad_norm": 1.818709135055542, |
|
"learning_rate": 4.666666666666667e-06, |
|
"loss": 0.9834, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.09682804674457429, |
|
"grad_norm": 1.8249784708023071, |
|
"learning_rate": 4.833333333333333e-06, |
|
"loss": 1.0134, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.1001669449081803, |
|
"grad_norm": 1.7814394235610962, |
|
"learning_rate": 5e-06, |
|
"loss": 0.8181, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.10350584307178631, |
|
"grad_norm": 2.066978931427002, |
|
"learning_rate": 4.997482666353287e-06, |
|
"loss": 0.9081, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.10684474123539232, |
|
"grad_norm": 1.8057966232299805, |
|
"learning_rate": 4.989935734988098e-06, |
|
"loss": 0.8483, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.11018363939899833, |
|
"grad_norm": 1.9546151161193848, |
|
"learning_rate": 4.977374404419838e-06, |
|
"loss": 0.8044, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.11352253756260434, |
|
"grad_norm": 1.980887770652771, |
|
"learning_rate": 4.959823971496575e-06, |
|
"loss": 1.038, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.11686143572621036, |
|
"grad_norm": 1.793314814567566, |
|
"learning_rate": 4.937319780454559e-06, |
|
"loss": 0.8432, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.12020033388981637, |
|
"grad_norm": 1.9506480693817139, |
|
"learning_rate": 4.909907151739634e-06, |
|
"loss": 0.9311, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.12353923205342238, |
|
"grad_norm": 2.0865514278411865, |
|
"learning_rate": 4.8776412907378845e-06, |
|
"loss": 0.8967, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.12687813021702837, |
|
"grad_norm": 2.0743038654327393, |
|
"learning_rate": 4.8405871765993435e-06, |
|
"loss": 0.9611, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.1302170283806344, |
|
"grad_norm": 1.8424172401428223, |
|
"learning_rate": 4.7988194313786275e-06, |
|
"loss": 0.7384, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.1335559265442404, |
|
"grad_norm": 1.9602130651474, |
|
"learning_rate": 4.752422169756048e-06, |
|
"loss": 0.771, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.13689482470784642, |
|
"grad_norm": 2.0811424255371094, |
|
"learning_rate": 4.701488829641845e-06, |
|
"loss": 0.8183, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.14023372287145242, |
|
"grad_norm": 2.1757004261016846, |
|
"learning_rate": 4.646121984004666e-06, |
|
"loss": 0.9766, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.14357262103505844, |
|
"grad_norm": 2.3917033672332764, |
|
"learning_rate": 4.586433134303257e-06, |
|
"loss": 0.7409, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.14691151919866444, |
|
"grad_norm": 2.366704225540161, |
|
"learning_rate": 4.522542485937369e-06, |
|
"loss": 0.9042, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.15025041736227046, |
|
"grad_norm": 2.359403133392334, |
|
"learning_rate": 4.454578706170075e-06, |
|
"loss": 0.898, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.15358931552587646, |
|
"grad_norm": 3.6760094165802, |
|
"learning_rate": 4.382678665009028e-06, |
|
"loss": 0.9776, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.15692821368948248, |
|
"grad_norm": 5.558593273162842, |
|
"learning_rate": 4.3069871595684795e-06, |
|
"loss": 1.5063, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.16026711185308848, |
|
"grad_norm": 8.863731384277344, |
|
"learning_rate": 4.227656622467162e-06, |
|
"loss": 2.3487, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.1636060100166945, |
|
"grad_norm": 14.891101837158203, |
|
"learning_rate": 4.144846814849282e-06, |
|
"loss": 3.9516, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.1669449081803005, |
|
"grad_norm": 52.261959075927734, |
|
"learning_rate": 4.058724504646834e-06, |
|
"loss": 8.3347, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.1669449081803005, |
|
"eval_loss": 0.8487523794174194, |
|
"eval_runtime": 14.2649, |
|
"eval_samples_per_second": 35.402, |
|
"eval_steps_per_second": 4.487, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.17028380634390652, |
|
"grad_norm": 2.774435043334961, |
|
"learning_rate": 3.969463130731183e-06, |
|
"loss": 0.5446, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.17362270450751252, |
|
"grad_norm": 3.7148523330688477, |
|
"learning_rate": 3.8772424536302565e-06, |
|
"loss": 0.5936, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.17696160267111852, |
|
"grad_norm": 3.6507351398468018, |
|
"learning_rate": 3.782248193514766e-06, |
|
"loss": 0.7174, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.18030050083472454, |
|
"grad_norm": 3.6278743743896484, |
|
"learning_rate": 3.684671656182497e-06, |
|
"loss": 0.6394, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.18363939899833054, |
|
"grad_norm": 3.270397186279297, |
|
"learning_rate": 3.5847093477938955e-06, |
|
"loss": 0.6554, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.18697829716193656, |
|
"grad_norm": 2.640061140060425, |
|
"learning_rate": 3.4825625791348093e-06, |
|
"loss": 0.6313, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.19031719532554256, |
|
"grad_norm": 2.5344226360321045, |
|
"learning_rate": 3.3784370602033572e-06, |
|
"loss": 0.6616, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.19365609348914858, |
|
"grad_norm": 1.8079237937927246, |
|
"learning_rate": 3.272542485937369e-06, |
|
"loss": 0.6266, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.19699499165275458, |
|
"grad_norm": 1.5996860265731812, |
|
"learning_rate": 3.165092113916688e-06, |
|
"loss": 0.6096, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.2003338898163606, |
|
"grad_norm": 1.3042566776275635, |
|
"learning_rate": 3.056302334890786e-06, |
|
"loss": 0.6548, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.2036727879799666, |
|
"grad_norm": 1.1529649496078491, |
|
"learning_rate": 2.946392236996592e-06, |
|
"loss": 0.6394, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.20701168614357263, |
|
"grad_norm": 1.0117923021316528, |
|
"learning_rate": 2.835583164544139e-06, |
|
"loss": 0.6212, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.21035058430717862, |
|
"grad_norm": 0.8776710033416748, |
|
"learning_rate": 2.724098272258584e-06, |
|
"loss": 0.5597, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.21368948247078465, |
|
"grad_norm": 0.9580402970314026, |
|
"learning_rate": 2.6121620758762877e-06, |
|
"loss": 0.6268, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.21702838063439064, |
|
"grad_norm": 0.9075307250022888, |
|
"learning_rate": 2.5e-06, |
|
"loss": 0.5357, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.22036727879799667, |
|
"grad_norm": 0.9319755434989929, |
|
"learning_rate": 2.3878379241237136e-06, |
|
"loss": 0.5899, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.22370617696160267, |
|
"grad_norm": 0.9590993523597717, |
|
"learning_rate": 2.2759017277414165e-06, |
|
"loss": 0.618, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.2270450751252087, |
|
"grad_norm": 1.0425666570663452, |
|
"learning_rate": 2.1644168354558623e-06, |
|
"loss": 0.5349, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.2303839732888147, |
|
"grad_norm": 0.9576693177223206, |
|
"learning_rate": 2.053607763003409e-06, |
|
"loss": 0.5221, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.2337228714524207, |
|
"grad_norm": 0.8880669474601746, |
|
"learning_rate": 1.9436976651092143e-06, |
|
"loss": 0.493, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.2370617696160267, |
|
"grad_norm": 1.1609593629837036, |
|
"learning_rate": 1.8349078860833125e-06, |
|
"loss": 0.62, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.24040066777963273, |
|
"grad_norm": 1.0580207109451294, |
|
"learning_rate": 1.7274575140626318e-06, |
|
"loss": 0.5414, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.24373956594323873, |
|
"grad_norm": 0.9047161340713501, |
|
"learning_rate": 1.6215629397966432e-06, |
|
"loss": 0.4679, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.24707846410684475, |
|
"grad_norm": 1.0645842552185059, |
|
"learning_rate": 1.5174374208651913e-06, |
|
"loss": 0.5592, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.25041736227045075, |
|
"grad_norm": 1.0351444482803345, |
|
"learning_rate": 1.415290652206105e-06, |
|
"loss": 0.4938, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.25375626043405675, |
|
"grad_norm": 1.0303648710250854, |
|
"learning_rate": 1.3153283438175036e-06, |
|
"loss": 0.5191, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.2570951585976628, |
|
"grad_norm": 1.0689729452133179, |
|
"learning_rate": 1.217751806485235e-06, |
|
"loss": 0.5241, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.2604340567612688, |
|
"grad_norm": 1.0949710607528687, |
|
"learning_rate": 1.122757546369744e-06, |
|
"loss": 0.5091, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.2637729549248748, |
|
"grad_norm": 1.066139578819275, |
|
"learning_rate": 1.0305368692688175e-06, |
|
"loss": 0.5245, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.2671118530884808, |
|
"grad_norm": 1.0166308879852295, |
|
"learning_rate": 9.412754953531664e-07, |
|
"loss": 0.4759, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.2704507512520868, |
|
"grad_norm": 1.113553524017334, |
|
"learning_rate": 8.551531851507186e-07, |
|
"loss": 0.4975, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.27378964941569284, |
|
"grad_norm": 1.1330246925354004, |
|
"learning_rate": 7.723433775328385e-07, |
|
"loss": 0.5459, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.27712854757929883, |
|
"grad_norm": 1.0714497566223145, |
|
"learning_rate": 6.930128404315214e-07, |
|
"loss": 0.4841, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.28046744574290483, |
|
"grad_norm": 1.0576684474945068, |
|
"learning_rate": 6.17321334990973e-07, |
|
"loss": 0.4325, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.2838063439065108, |
|
"grad_norm": 1.2464218139648438, |
|
"learning_rate": 5.454212938299256e-07, |
|
"loss": 0.5759, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.2871452420701169, |
|
"grad_norm": 1.2016359567642212, |
|
"learning_rate": 4.774575140626317e-07, |
|
"loss": 0.4957, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.2904841402337229, |
|
"grad_norm": 1.2413355112075806, |
|
"learning_rate": 4.1356686569674344e-07, |
|
"loss": 0.5146, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.2938230383973289, |
|
"grad_norm": 1.3268682956695557, |
|
"learning_rate": 3.538780159953348e-07, |
|
"loss": 0.6438, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.29716193656093487, |
|
"grad_norm": 1.2803572416305542, |
|
"learning_rate": 2.98511170358155e-07, |
|
"loss": 0.5471, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.3005008347245409, |
|
"grad_norm": 1.4874416589736938, |
|
"learning_rate": 2.4757783024395244e-07, |
|
"loss": 0.548, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.3038397328881469, |
|
"grad_norm": 1.5271040201187134, |
|
"learning_rate": 2.0118056862137358e-07, |
|
"loss": 0.5197, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.3071786310517529, |
|
"grad_norm": 1.6254510879516602, |
|
"learning_rate": 1.59412823400657e-07, |
|
"loss": 0.6227, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.3105175292153589, |
|
"grad_norm": 1.617385983467102, |
|
"learning_rate": 1.223587092621162e-07, |
|
"loss": 0.5972, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.31385642737896496, |
|
"grad_norm": 1.9136462211608887, |
|
"learning_rate": 9.00928482603669e-08, |
|
"loss": 0.7396, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.31719532554257096, |
|
"grad_norm": 2.216195821762085, |
|
"learning_rate": 6.268021954544095e-08, |
|
"loss": 0.6338, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.32053422370617696, |
|
"grad_norm": 2.8479862213134766, |
|
"learning_rate": 4.017602850342584e-08, |
|
"loss": 0.725, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.32387312186978295, |
|
"grad_norm": 4.363030910491943, |
|
"learning_rate": 2.262559558016325e-08, |
|
"loss": 1.1272, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.327212020033389, |
|
"grad_norm": 8.434501647949219, |
|
"learning_rate": 1.006426501190233e-08, |
|
"loss": 1.9515, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.330550918196995, |
|
"grad_norm": 16.16083335876465, |
|
"learning_rate": 2.5173336467135266e-09, |
|
"loss": 3.21, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.333889816360601, |
|
"grad_norm": 50.90562057495117, |
|
"learning_rate": 0.0, |
|
"loss": 7.3095, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.333889816360601, |
|
"eval_loss": 0.6673134565353394, |
|
"eval_runtime": 14.2635, |
|
"eval_samples_per_second": 35.405, |
|
"eval_steps_per_second": 4.487, |
|
"step": 100 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 100, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2.728250925121536e+16, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|