|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.99949609473419, |
|
"eval_steps": 500, |
|
"global_step": 683, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.016082826556767352, |
|
"grad_norm": 65.62212371826172, |
|
"learning_rate": 7.1428571428571436e-06, |
|
"loss": 0.8077, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.032165653113534705, |
|
"grad_norm": 50.16935729980469, |
|
"learning_rate": 1.4285714285714287e-05, |
|
"loss": 0.6155, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.048248479670302054, |
|
"grad_norm": 32.083778381347656, |
|
"learning_rate": 2.1428571428571428e-05, |
|
"loss": 0.5188, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.06433130622706941, |
|
"grad_norm": 27.791507720947266, |
|
"learning_rate": 2.8571428571428574e-05, |
|
"loss": 0.4476, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.08041413278383676, |
|
"grad_norm": 33.207706451416016, |
|
"learning_rate": 3.571428571428572e-05, |
|
"loss": 0.4035, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.09649695934060411, |
|
"grad_norm": 29.39824104309082, |
|
"learning_rate": 3.99995163828731e-05, |
|
"loss": 0.373, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.11257978589737146, |
|
"grad_norm": 27.801868438720703, |
|
"learning_rate": 3.999407595879474e-05, |
|
"loss": 0.33, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.12866261245413882, |
|
"grad_norm": 27.443641662597656, |
|
"learning_rate": 3.998259223910273e-05, |
|
"loss": 0.3121, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.14474543901090617, |
|
"grad_norm": 27.026424407958984, |
|
"learning_rate": 3.9965068694800805e-05, |
|
"loss": 0.2868, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.16082826556767352, |
|
"grad_norm": 27.81942367553711, |
|
"learning_rate": 3.9941510622455766e-05, |
|
"loss": 0.2675, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.17691109212444087, |
|
"grad_norm": 27.47068214416504, |
|
"learning_rate": 3.9911925142596474e-05, |
|
"loss": 0.252, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.19299391868120822, |
|
"grad_norm": 27.166418075561523, |
|
"learning_rate": 3.9876321197561714e-05, |
|
"loss": 0.233, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.20907674523797556, |
|
"grad_norm": 26.941362380981445, |
|
"learning_rate": 3.983470954879728e-05, |
|
"loss": 0.2216, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.2251595717947429, |
|
"grad_norm": 25.84792709350586, |
|
"learning_rate": 3.9787102773603325e-05, |
|
"loss": 0.2057, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.2412423983515103, |
|
"grad_norm": 24.429794311523438, |
|
"learning_rate": 3.973351526133279e-05, |
|
"loss": 0.1997, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.25732522490827764, |
|
"grad_norm": 25.138715744018555, |
|
"learning_rate": 3.967396320904215e-05, |
|
"loss": 0.1799, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.273408051465045, |
|
"grad_norm": 23.31828498840332, |
|
"learning_rate": 3.960846461659586e-05, |
|
"loss": 0.1778, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.28949087802181234, |
|
"grad_norm": 27.868120193481445, |
|
"learning_rate": 3.953703928122569e-05, |
|
"loss": 0.17, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.018596079745351, |
|
"grad_norm": 23.8084659576416, |
|
"learning_rate": 3.945970879154704e-05, |
|
"loss": 0.1812, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.0722063997319484, |
|
"grad_norm": 28.152708053588867, |
|
"learning_rate": 3.9376496521033636e-05, |
|
"loss": 0.1811, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.1258167197185458, |
|
"grad_norm": 25.456472396850586, |
|
"learning_rate": 3.92874276209528e-05, |
|
"loss": 0.1703, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 1.1794270397051432, |
|
"grad_norm": 24.26827049255371, |
|
"learning_rate": 3.919252901276339e-05, |
|
"loss": 0.1772, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 1.2330373596917408, |
|
"grad_norm": 23.77770233154297, |
|
"learning_rate": 3.909182937997866e-05, |
|
"loss": 0.1654, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 1.2866476796783382, |
|
"grad_norm": 24.42105484008789, |
|
"learning_rate": 3.8985359159496516e-05, |
|
"loss": 0.1504, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 1.3402579996649355, |
|
"grad_norm": 24.336668014526367, |
|
"learning_rate": 3.8873150532399885e-05, |
|
"loss": 0.1368, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 1.393868319651533, |
|
"grad_norm": 24.254329681396484, |
|
"learning_rate": 3.8755237414229834e-05, |
|
"loss": 0.1454, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 1.4474786396381303, |
|
"grad_norm": 25.233129501342773, |
|
"learning_rate": 3.8631655444734414e-05, |
|
"loss": 0.1373, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 1.5010889596247279, |
|
"grad_norm": 23.18859100341797, |
|
"learning_rate": 3.850244197709641e-05, |
|
"loss": 0.1341, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 1.554699279611325, |
|
"grad_norm": 24.764631271362305, |
|
"learning_rate": 3.8367636066643224e-05, |
|
"loss": 0.139, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 1.6083095995979226, |
|
"grad_norm": 23.146198272705078, |
|
"learning_rate": 3.822727845904216e-05, |
|
"loss": 0.1256, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 1.66191991958452, |
|
"grad_norm": 23.295623779296875, |
|
"learning_rate": 3.808141157798492e-05, |
|
"loss": 0.1206, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 1.7155302395711174, |
|
"grad_norm": 21.969825744628906, |
|
"learning_rate": 3.793007951236485e-05, |
|
"loss": 0.12, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 1.769140559557715, |
|
"grad_norm": 21.317886352539062, |
|
"learning_rate": 3.777332800295088e-05, |
|
"loss": 0.1116, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 1.8227508795443121, |
|
"grad_norm": 23.39339256286621, |
|
"learning_rate": 3.7611204428562175e-05, |
|
"loss": 0.1129, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 1.8763611995309097, |
|
"grad_norm": 27.18583869934082, |
|
"learning_rate": 3.74437577917477e-05, |
|
"loss": 0.109, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 1.9299715195175071, |
|
"grad_norm": 21.890743255615234, |
|
"learning_rate": 3.727103870397495e-05, |
|
"loss": 0.1021, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.9835818395041045, |
|
"grad_norm": 23.11468505859375, |
|
"learning_rate": 3.709309937033247e-05, |
|
"loss": 0.1003, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 2.022323003275384, |
|
"grad_norm": 25.56001853942871, |
|
"learning_rate": 3.6909993573750586e-05, |
|
"loss": 0.1186, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 2.054572940287226, |
|
"grad_norm": 23.89845085144043, |
|
"learning_rate": 3.6721776658745335e-05, |
|
"loss": 0.1298, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 2.086822877299068, |
|
"grad_norm": 24.464975357055664, |
|
"learning_rate": 3.652850551469031e-05, |
|
"loss": 0.1265, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 2.1190728143109094, |
|
"grad_norm": 23.46689796447754, |
|
"learning_rate": 3.63302385586216e-05, |
|
"loss": 0.1248, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 2.1513227513227515, |
|
"grad_norm": 21.452667236328125, |
|
"learning_rate": 3.6127035717581035e-05, |
|
"loss": 0.1128, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 2.183572688334593, |
|
"grad_norm": 22.750587463378906, |
|
"learning_rate": 3.59189584105029e-05, |
|
"loss": 0.1068, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 2.215822625346435, |
|
"grad_norm": 23.539155960083008, |
|
"learning_rate": 3.570606952964991e-05, |
|
"loss": 0.1089, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 2.2480725623582765, |
|
"grad_norm": 22.286226272583008, |
|
"learning_rate": 3.54884334216036e-05, |
|
"loss": 0.1012, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 2.2803224993701185, |
|
"grad_norm": 25.91316795349121, |
|
"learning_rate": 3.526611586781547e-05, |
|
"loss": 0.1085, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 2.31257243638196, |
|
"grad_norm": 25.620323181152344, |
|
"learning_rate": 3.503918406472411e-05, |
|
"loss": 0.0935, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 2.344822373393802, |
|
"grad_norm": 20.377058029174805, |
|
"learning_rate": 3.480770660344486e-05, |
|
"loss": 0.0982, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 2.377072310405644, |
|
"grad_norm": 20.553796768188477, |
|
"learning_rate": 3.457175344903783e-05, |
|
"loss": 0.084, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 2.4093222474174856, |
|
"grad_norm": 20.02320671081543, |
|
"learning_rate": 3.433139591936068e-05, |
|
"loss": 0.0964, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 2.441572184429327, |
|
"grad_norm": 21.293182373046875, |
|
"learning_rate": 3.4086706663512454e-05, |
|
"loss": 0.0835, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 2.4738221214411693, |
|
"grad_norm": 19.96319580078125, |
|
"learning_rate": 3.3837759639875113e-05, |
|
"loss": 0.0785, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 2.506072058453011, |
|
"grad_norm": 22.20425796508789, |
|
"learning_rate": 3.358463009375927e-05, |
|
"loss": 0.0779, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 2.5383219954648526, |
|
"grad_norm": 18.57112693786621, |
|
"learning_rate": 3.332739453466098e-05, |
|
"loss": 0.0814, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 2.5705719324766942, |
|
"grad_norm": 20.270761489868164, |
|
"learning_rate": 3.306613071313641e-05, |
|
"loss": 0.0808, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 2.602821869488536, |
|
"grad_norm": 19.787559509277344, |
|
"learning_rate": 3.280091759730144e-05, |
|
"loss": 0.0743, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 2.635071806500378, |
|
"grad_norm": 18.70611572265625, |
|
"learning_rate": 3.25318353489631e-05, |
|
"loss": 0.0757, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 2.6673217435122196, |
|
"grad_norm": 20.557205200195312, |
|
"learning_rate": 3.2258965299390465e-05, |
|
"loss": 0.0692, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 2.6995716805240617, |
|
"grad_norm": 20.602092742919922, |
|
"learning_rate": 3.1982389924731805e-05, |
|
"loss": 0.0699, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 2.7318216175359034, |
|
"grad_norm": 22.87337303161621, |
|
"learning_rate": 3.17021928210859e-05, |
|
"loss": 0.0662, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 2.764071554547745, |
|
"grad_norm": 22.488433837890625, |
|
"learning_rate": 3.141845867923469e-05, |
|
"loss": 0.0799, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 2.7963214915595866, |
|
"grad_norm": 20.05706787109375, |
|
"learning_rate": 3.113127325904519e-05, |
|
"loss": 0.0665, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 2.8285714285714287, |
|
"grad_norm": 18.822357177734375, |
|
"learning_rate": 3.084072336354812e-05, |
|
"loss": 0.0668, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 2.8608213655832704, |
|
"grad_norm": 17.79743194580078, |
|
"learning_rate": 3.05468968127014e-05, |
|
"loss": 0.0604, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 2.893071302595112, |
|
"grad_norm": 19.10596466064453, |
|
"learning_rate": 3.0249882416846117e-05, |
|
"loss": 0.059, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 2.925321239606954, |
|
"grad_norm": 20.25804328918457, |
|
"learning_rate": 2.9949769949863266e-05, |
|
"loss": 0.0646, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 2.9575711766187958, |
|
"grad_norm": 19.09357452392578, |
|
"learning_rate": 2.96466501220392e-05, |
|
"loss": 0.0569, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 2.9898211136306374, |
|
"grad_norm": 15.293481826782227, |
|
"learning_rate": 2.934061455264802e-05, |
|
"loss": 0.0574, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 2.99949609473419, |
|
"step": 683, |
|
"total_flos": 6.564245582272266e+17, |
|
"train_loss": 0.1658007610769258, |
|
"train_runtime": 25935.882, |
|
"train_samples_per_second": 9.206, |
|
"train_steps_per_second": 0.072 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 1863, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 6.564245582272266e+17, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|