|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 4.987613542526837, |
|
"eval_steps": 500, |
|
"global_step": 1510, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 6.482018947601318, |
|
"learning_rate": 4.768211920529802e-06, |
|
"loss": 5.8299, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 4.826422691345215, |
|
"learning_rate": 9.735099337748345e-06, |
|
"loss": 5.1849, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 5.315689563751221, |
|
"learning_rate": 1.4701986754966887e-05, |
|
"loss": 4.4164, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 2.3576250076293945, |
|
"learning_rate": 1.9668874172185433e-05, |
|
"loss": 3.5949, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 1.602609634399414, |
|
"learning_rate": 2.4635761589403972e-05, |
|
"loss": 2.9948, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 1.4007989168167114, |
|
"learning_rate": 2.9602649006622518e-05, |
|
"loss": 2.6452, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 2.107353687286377, |
|
"learning_rate": 2.9492273730684327e-05, |
|
"loss": 2.4714, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 1.3134864568710327, |
|
"learning_rate": 2.894039735099338e-05, |
|
"loss": 2.3634, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 1.580680251121521, |
|
"learning_rate": 2.838852097130243e-05, |
|
"loss": 2.3032, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 1.3618550300598145, |
|
"learning_rate": 2.783664459161148e-05, |
|
"loss": 2.2501, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 1.531614899635315, |
|
"learning_rate": 2.728476821192053e-05, |
|
"loss": 2.2265, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 1.4849926233291626, |
|
"learning_rate": 2.673289183222958e-05, |
|
"loss": 2.1844, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 1.6642751693725586, |
|
"learning_rate": 2.6181015452538634e-05, |
|
"loss": 2.1166, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"grad_norm": 1.7284085750579834, |
|
"learning_rate": 2.5629139072847685e-05, |
|
"loss": 2.1233, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"grad_norm": 1.6671867370605469, |
|
"learning_rate": 2.5077262693156735e-05, |
|
"loss": 2.1212, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"grad_norm": 1.756274938583374, |
|
"learning_rate": 2.4525386313465785e-05, |
|
"loss": 2.0726, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"grad_norm": 1.8096344470977783, |
|
"learning_rate": 2.3973509933774835e-05, |
|
"loss": 2.0875, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"grad_norm": 1.7899348735809326, |
|
"learning_rate": 2.3421633554083885e-05, |
|
"loss": 2.0833, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"grad_norm": 1.917384386062622, |
|
"learning_rate": 2.286975717439294e-05, |
|
"loss": 2.0743, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"grad_norm": 1.8895052671432495, |
|
"learning_rate": 2.231788079470199e-05, |
|
"loss": 2.0609, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"grad_norm": 1.9963146448135376, |
|
"learning_rate": 2.176600441501104e-05, |
|
"loss": 2.0656, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"grad_norm": 1.9002119302749634, |
|
"learning_rate": 2.121412803532009e-05, |
|
"loss": 2.0541, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"grad_norm": 1.874874472618103, |
|
"learning_rate": 2.066225165562914e-05, |
|
"loss": 2.0416, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"grad_norm": 1.7960671186447144, |
|
"learning_rate": 2.011037527593819e-05, |
|
"loss": 2.0311, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"grad_norm": 2.1183598041534424, |
|
"learning_rate": 1.9558498896247242e-05, |
|
"loss": 2.0009, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"grad_norm": 1.9861488342285156, |
|
"learning_rate": 1.9006622516556292e-05, |
|
"loss": 1.9688, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"grad_norm": 2.235743284225464, |
|
"learning_rate": 1.8454746136865343e-05, |
|
"loss": 1.9989, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"grad_norm": 1.955734133720398, |
|
"learning_rate": 1.7902869757174393e-05, |
|
"loss": 1.9733, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"grad_norm": 2.044551372528076, |
|
"learning_rate": 1.7350993377483443e-05, |
|
"loss": 1.9574, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"grad_norm": 2.117047071456909, |
|
"learning_rate": 1.6799116997792496e-05, |
|
"loss": 1.9654, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"grad_norm": 2.02316951751709, |
|
"learning_rate": 1.6247240618101546e-05, |
|
"loss": 1.9498, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"grad_norm": 2.2580747604370117, |
|
"learning_rate": 1.5695364238410596e-05, |
|
"loss": 1.9748, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"grad_norm": 2.0818331241607666, |
|
"learning_rate": 1.5143487858719647e-05, |
|
"loss": 1.984, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"grad_norm": 2.1940929889678955, |
|
"learning_rate": 1.4591611479028698e-05, |
|
"loss": 1.9646, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"grad_norm": 2.1697773933410645, |
|
"learning_rate": 1.4039735099337748e-05, |
|
"loss": 1.958, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"grad_norm": 2.0976200103759766, |
|
"learning_rate": 1.3487858719646799e-05, |
|
"loss": 1.946, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"grad_norm": 2.160543441772461, |
|
"learning_rate": 1.293598233995585e-05, |
|
"loss": 1.9398, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"grad_norm": 2.161280393600464, |
|
"learning_rate": 1.23841059602649e-05, |
|
"loss": 1.9008, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"grad_norm": 2.2662644386291504, |
|
"learning_rate": 1.183222958057395e-05, |
|
"loss": 1.9111, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"grad_norm": 2.374934673309326, |
|
"learning_rate": 1.1280353200883002e-05, |
|
"loss": 1.9087, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"grad_norm": 2.3813648223876953, |
|
"learning_rate": 1.0728476821192052e-05, |
|
"loss": 1.9116, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"grad_norm": 2.115699529647827, |
|
"learning_rate": 1.0176600441501102e-05, |
|
"loss": 1.92, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"grad_norm": 2.41668963432312, |
|
"learning_rate": 9.624724061810154e-06, |
|
"loss": 1.9452, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"grad_norm": 2.383557081222534, |
|
"learning_rate": 9.072847682119206e-06, |
|
"loss": 1.9209, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 3.72, |
|
"grad_norm": 2.2272610664367676, |
|
"learning_rate": 8.520971302428258e-06, |
|
"loss": 1.9057, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"grad_norm": 2.5420844554901123, |
|
"learning_rate": 7.969094922737308e-06, |
|
"loss": 1.9025, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"grad_norm": 2.2384705543518066, |
|
"learning_rate": 7.417218543046358e-06, |
|
"loss": 1.9072, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"grad_norm": 2.2767913341522217, |
|
"learning_rate": 6.865342163355408e-06, |
|
"loss": 1.8947, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 4.05, |
|
"grad_norm": 2.2728919982910156, |
|
"learning_rate": 6.313465783664459e-06, |
|
"loss": 1.8819, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 4.13, |
|
"grad_norm": 2.4492197036743164, |
|
"learning_rate": 5.76158940397351e-06, |
|
"loss": 1.8914, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 4.21, |
|
"grad_norm": 2.238704204559326, |
|
"learning_rate": 5.20971302428256e-06, |
|
"loss": 1.8723, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 4.29, |
|
"grad_norm": 2.341646194458008, |
|
"learning_rate": 4.657836644591612e-06, |
|
"loss": 1.8808, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 4.38, |
|
"grad_norm": 2.2996695041656494, |
|
"learning_rate": 4.105960264900663e-06, |
|
"loss": 1.88, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 4.46, |
|
"grad_norm": 2.213731050491333, |
|
"learning_rate": 3.554083885209713e-06, |
|
"loss": 1.8597, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 4.54, |
|
"grad_norm": 2.0925798416137695, |
|
"learning_rate": 3.002207505518764e-06, |
|
"loss": 1.853, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 4.62, |
|
"grad_norm": 2.3262617588043213, |
|
"learning_rate": 2.450331125827815e-06, |
|
"loss": 1.8795, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 4.71, |
|
"grad_norm": 2.3422157764434814, |
|
"learning_rate": 1.8984547461368651e-06, |
|
"loss": 1.865, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 4.79, |
|
"grad_norm": 2.6429784297943115, |
|
"learning_rate": 1.3465783664459163e-06, |
|
"loss": 1.8871, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 4.87, |
|
"grad_norm": 2.334404945373535, |
|
"learning_rate": 7.947019867549669e-07, |
|
"loss": 1.8886, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 4.95, |
|
"grad_norm": 2.4886159896850586, |
|
"learning_rate": 2.428256070640177e-07, |
|
"loss": 1.8998, |
|
"step": 1500 |
|
} |
|
], |
|
"logging_steps": 25, |
|
"max_steps": 1510, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 5, |
|
"save_steps": 500, |
|
"total_flos": 7.427503857598464e+16, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|