|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"eval_steps": 500, |
|
"global_step": 81, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.012345679012345678, |
|
"grad_norm": 3.7708919048309326, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 2.2074, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.024691358024691357, |
|
"grad_norm": 4.930561065673828, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 2.439, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.037037037037037035, |
|
"grad_norm": 4.928946495056152, |
|
"learning_rate": 3e-06, |
|
"loss": 2.2376, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.04938271604938271, |
|
"grad_norm": 5.485366344451904, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 2.3633, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.06172839506172839, |
|
"grad_norm": 6.013079643249512, |
|
"learning_rate": 5e-06, |
|
"loss": 2.5097, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.07407407407407407, |
|
"grad_norm": 6.560212135314941, |
|
"learning_rate": 6e-06, |
|
"loss": 2.548, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.08641975308641975, |
|
"grad_norm": 6.793174743652344, |
|
"learning_rate": 7.000000000000001e-06, |
|
"loss": 2.6148, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.09876543209876543, |
|
"grad_norm": 7.56729793548584, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 2.5419, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.1111111111111111, |
|
"grad_norm": 7.1028265953063965, |
|
"learning_rate": 9e-06, |
|
"loss": 2.4437, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.12345679012345678, |
|
"grad_norm": 8.72243881225586, |
|
"learning_rate": 1e-05, |
|
"loss": 2.6641, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.13580246913580246, |
|
"grad_norm": 8.16983413696289, |
|
"learning_rate": 1.1000000000000001e-05, |
|
"loss": 2.6787, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.14814814814814814, |
|
"grad_norm": 9.106399536132812, |
|
"learning_rate": 1.2e-05, |
|
"loss": 2.8477, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.16049382716049382, |
|
"grad_norm": 10.121397972106934, |
|
"learning_rate": 1.3000000000000001e-05, |
|
"loss": 2.9649, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.1728395061728395, |
|
"grad_norm": 10.397982597351074, |
|
"learning_rate": 1.4000000000000001e-05, |
|
"loss": 3.0027, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.18518518518518517, |
|
"grad_norm": 10.967755317687988, |
|
"learning_rate": 1.5e-05, |
|
"loss": 3.0125, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.19753086419753085, |
|
"grad_norm": 11.275672912597656, |
|
"learning_rate": 1.6000000000000003e-05, |
|
"loss": 3.1969, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.20987654320987653, |
|
"grad_norm": 13.128063201904297, |
|
"learning_rate": 1.7000000000000003e-05, |
|
"loss": 3.1763, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.2222222222222222, |
|
"grad_norm": 16.17818260192871, |
|
"learning_rate": 1.8e-05, |
|
"loss": 3.3604, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.2345679012345679, |
|
"grad_norm": 18.02959442138672, |
|
"learning_rate": 1.9e-05, |
|
"loss": 3.3451, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.24691358024691357, |
|
"grad_norm": 23.64628791809082, |
|
"learning_rate": 2e-05, |
|
"loss": 3.9989, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.25925925925925924, |
|
"grad_norm": 3.101980209350586, |
|
"learning_rate": 2.1e-05, |
|
"loss": 2.1226, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.2716049382716049, |
|
"grad_norm": 3.7152328491210938, |
|
"learning_rate": 2.2000000000000003e-05, |
|
"loss": 2.2547, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.2839506172839506, |
|
"grad_norm": 3.7375121116638184, |
|
"learning_rate": 2.3000000000000003e-05, |
|
"loss": 2.0953, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.2962962962962963, |
|
"grad_norm": 3.882807493209839, |
|
"learning_rate": 2.4e-05, |
|
"loss": 1.9422, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.30864197530864196, |
|
"grad_norm": 4.328245639801025, |
|
"learning_rate": 2.5e-05, |
|
"loss": 1.9509, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.32098765432098764, |
|
"grad_norm": 4.226940155029297, |
|
"learning_rate": 2.6000000000000002e-05, |
|
"loss": 2.0598, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.3333333333333333, |
|
"grad_norm": 4.362301826477051, |
|
"learning_rate": 2.7000000000000002e-05, |
|
"loss": 2.0143, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.345679012345679, |
|
"grad_norm": 4.777379035949707, |
|
"learning_rate": 2.8000000000000003e-05, |
|
"loss": 2.0038, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.35802469135802467, |
|
"grad_norm": 5.263576030731201, |
|
"learning_rate": 2.9e-05, |
|
"loss": 1.983, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.37037037037037035, |
|
"grad_norm": 5.005555152893066, |
|
"learning_rate": 3e-05, |
|
"loss": 1.9798, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.38271604938271603, |
|
"grad_norm": 5.490865707397461, |
|
"learning_rate": 3.1e-05, |
|
"loss": 2.0266, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.3950617283950617, |
|
"grad_norm": 5.963639736175537, |
|
"learning_rate": 3.2000000000000005e-05, |
|
"loss": 1.8442, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.4074074074074074, |
|
"grad_norm": 5.804817199707031, |
|
"learning_rate": 3.3e-05, |
|
"loss": 1.8383, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.41975308641975306, |
|
"grad_norm": 6.9888763427734375, |
|
"learning_rate": 3.4000000000000007e-05, |
|
"loss": 1.9665, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.43209876543209874, |
|
"grad_norm": 6.524566650390625, |
|
"learning_rate": 3.5e-05, |
|
"loss": 1.622, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.4444444444444444, |
|
"grad_norm": 6.752888202667236, |
|
"learning_rate": 3.6e-05, |
|
"loss": 1.9759, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.4567901234567901, |
|
"grad_norm": 7.297061443328857, |
|
"learning_rate": 3.7e-05, |
|
"loss": 1.6826, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.4691358024691358, |
|
"grad_norm": 8.368556022644043, |
|
"learning_rate": 3.8e-05, |
|
"loss": 1.7017, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.48148148148148145, |
|
"grad_norm": 8.047032356262207, |
|
"learning_rate": 3.9000000000000006e-05, |
|
"loss": 1.4993, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.49382716049382713, |
|
"grad_norm": 14.948863983154297, |
|
"learning_rate": 4e-05, |
|
"loss": 1.5853, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.5061728395061729, |
|
"grad_norm": 4.967220783233643, |
|
"learning_rate": 4.1e-05, |
|
"loss": 1.9802, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.5185185185185185, |
|
"grad_norm": 5.796135902404785, |
|
"learning_rate": 4.2e-05, |
|
"loss": 1.9479, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.5308641975308642, |
|
"grad_norm": 4.888399600982666, |
|
"learning_rate": 4.3e-05, |
|
"loss": 2.0503, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.5432098765432098, |
|
"grad_norm": 3.456890106201172, |
|
"learning_rate": 4.4000000000000006e-05, |
|
"loss": 1.8608, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.5555555555555556, |
|
"grad_norm": 3.64420747756958, |
|
"learning_rate": 4.5e-05, |
|
"loss": 1.7385, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.5679012345679012, |
|
"grad_norm": 3.9788224697113037, |
|
"learning_rate": 4.600000000000001e-05, |
|
"loss": 1.8041, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.5802469135802469, |
|
"grad_norm": 4.261048793792725, |
|
"learning_rate": 4.7e-05, |
|
"loss": 1.8889, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.5925925925925926, |
|
"grad_norm": 3.8799641132354736, |
|
"learning_rate": 4.8e-05, |
|
"loss": 1.7051, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.6049382716049383, |
|
"grad_norm": 4.188144683837891, |
|
"learning_rate": 4.9e-05, |
|
"loss": 1.7398, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.6172839506172839, |
|
"grad_norm": 4.114570617675781, |
|
"learning_rate": 5e-05, |
|
"loss": 1.8819, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.6296296296296297, |
|
"grad_norm": 4.061178207397461, |
|
"learning_rate": 5.1000000000000006e-05, |
|
"loss": 1.5768, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.6419753086419753, |
|
"grad_norm": 4.49856424331665, |
|
"learning_rate": 5.2000000000000004e-05, |
|
"loss": 1.8716, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.654320987654321, |
|
"grad_norm": 4.215878963470459, |
|
"learning_rate": 5.300000000000001e-05, |
|
"loss": 1.6363, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.6666666666666666, |
|
"grad_norm": 4.765596389770508, |
|
"learning_rate": 5.4000000000000005e-05, |
|
"loss": 1.6679, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.6790123456790124, |
|
"grad_norm": 5.05023193359375, |
|
"learning_rate": 5.500000000000001e-05, |
|
"loss": 1.7256, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.691358024691358, |
|
"grad_norm": 5.904537200927734, |
|
"learning_rate": 5.6000000000000006e-05, |
|
"loss": 1.7554, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.7037037037037037, |
|
"grad_norm": 5.709670543670654, |
|
"learning_rate": 5.6999999999999996e-05, |
|
"loss": 1.5552, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.7160493827160493, |
|
"grad_norm": 6.057749271392822, |
|
"learning_rate": 5.8e-05, |
|
"loss": 1.5226, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.7283950617283951, |
|
"grad_norm": 7.13749361038208, |
|
"learning_rate": 5.9e-05, |
|
"loss": 1.6127, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.7407407407407407, |
|
"grad_norm": 6.580483436584473, |
|
"learning_rate": 6e-05, |
|
"loss": 1.0641, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.7530864197530864, |
|
"grad_norm": 4.535214900970459, |
|
"learning_rate": 6.1e-05, |
|
"loss": 1.7883, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.7654320987654321, |
|
"grad_norm": 3.5383188724517822, |
|
"learning_rate": 6.2e-05, |
|
"loss": 1.8828, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.7777777777777778, |
|
"grad_norm": 4.3437418937683105, |
|
"learning_rate": 6.3e-05, |
|
"loss": 1.9061, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.7901234567901234, |
|
"grad_norm": 3.5287466049194336, |
|
"learning_rate": 6.400000000000001e-05, |
|
"loss": 1.7427, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.8024691358024691, |
|
"grad_norm": 3.3431613445281982, |
|
"learning_rate": 6.500000000000001e-05, |
|
"loss": 1.6704, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.8148148148148148, |
|
"grad_norm": 3.0107169151306152, |
|
"learning_rate": 6.6e-05, |
|
"loss": 1.8381, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.8271604938271605, |
|
"grad_norm": 3.2886972427368164, |
|
"learning_rate": 6.7e-05, |
|
"loss": 1.6069, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.8395061728395061, |
|
"grad_norm": 3.437831401824951, |
|
"learning_rate": 6.800000000000001e-05, |
|
"loss": 1.5228, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.8518518518518519, |
|
"grad_norm": 3.818103075027466, |
|
"learning_rate": 6.9e-05, |
|
"loss": 1.6951, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.8641975308641975, |
|
"grad_norm": 3.975029945373535, |
|
"learning_rate": 7e-05, |
|
"loss": 1.8103, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.8765432098765432, |
|
"grad_norm": 4.448912620544434, |
|
"learning_rate": 7.1e-05, |
|
"loss": 1.8292, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.8888888888888888, |
|
"grad_norm": 4.634730815887451, |
|
"learning_rate": 7.2e-05, |
|
"loss": 1.8566, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.9012345679012346, |
|
"grad_norm": 4.999331474304199, |
|
"learning_rate": 7.3e-05, |
|
"loss": 1.8153, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.9135802469135802, |
|
"grad_norm": 5.28339147567749, |
|
"learning_rate": 7.4e-05, |
|
"loss": 1.8426, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.9259259259259259, |
|
"grad_norm": 5.228104591369629, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 1.7594, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.9382716049382716, |
|
"grad_norm": 4.795072078704834, |
|
"learning_rate": 7.6e-05, |
|
"loss": 1.6741, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.9506172839506173, |
|
"grad_norm": 5.2640252113342285, |
|
"learning_rate": 7.7e-05, |
|
"loss": 1.7086, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.9629629629629629, |
|
"grad_norm": 5.912917613983154, |
|
"learning_rate": 7.800000000000001e-05, |
|
"loss": 1.486, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.9753086419753086, |
|
"grad_norm": 6.387777805328369, |
|
"learning_rate": 7.900000000000001e-05, |
|
"loss": 1.7058, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.9876543209876543, |
|
"grad_norm": 8.678359985351562, |
|
"learning_rate": 8e-05, |
|
"loss": 1.2198, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 4.613696098327637, |
|
"learning_rate": 8.1e-05, |
|
"loss": 1.5046, |
|
"step": 81 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 81, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 239, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 3.2379732108509184e+16, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|