|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.9984301412872841, |
|
"eval_steps": 100, |
|
"global_step": 477, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 0.400390625, |
|
"learning_rate": 1.0416666666666667e-07, |
|
"logits/chosen": -2.2547454833984375, |
|
"logits/rejected": -2.401865005493164, |
|
"logps/chosen": -53.759212493896484, |
|
"logps/rejected": -48.83185958862305, |
|
"loss": 0.6931, |
|
"pred_label": 0.0, |
|
"rewards/accuracies": 0.0, |
|
"rewards/chosen": 0.0, |
|
"rewards/margins": 0.0, |
|
"rewards/rejected": 0.0, |
|
"step": 1, |
|
"use_label": 10.0 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.4609375, |
|
"learning_rate": 1.0416666666666667e-06, |
|
"logits/chosen": -2.2421205043792725, |
|
"logits/rejected": -2.2769112586975098, |
|
"logps/chosen": -51.97997283935547, |
|
"logps/rejected": -64.98096466064453, |
|
"loss": 0.6929, |
|
"pred_label": 0.0, |
|
"rewards/accuracies": 0.2222222238779068, |
|
"rewards/chosen": 0.0019939513877034187, |
|
"rewards/margins": 0.0007003004429861903, |
|
"rewards/rejected": 0.0012936509447172284, |
|
"step": 10, |
|
"use_label": 90.0 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.39453125, |
|
"learning_rate": 2.0833333333333334e-06, |
|
"logits/chosen": -2.2527966499328613, |
|
"logits/rejected": -2.256462812423706, |
|
"logps/chosen": -62.502418518066406, |
|
"logps/rejected": -72.6461181640625, |
|
"loss": 0.6919, |
|
"pred_label": 0.0, |
|
"rewards/accuracies": 0.2750000059604645, |
|
"rewards/chosen": 0.01591477356851101, |
|
"rewards/margins": 0.0011298481840640306, |
|
"rewards/rejected": 0.014784926548600197, |
|
"step": 20, |
|
"use_label": 242.0 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.51171875, |
|
"learning_rate": 3.125e-06, |
|
"logits/chosen": -2.342513084411621, |
|
"logits/rejected": -2.35528564453125, |
|
"logps/chosen": -79.1588134765625, |
|
"logps/rejected": -98.83000946044922, |
|
"loss": 0.6898, |
|
"pred_label": 0.0, |
|
"rewards/accuracies": 0.2750000059604645, |
|
"rewards/chosen": 0.030831044539809227, |
|
"rewards/margins": 0.002872847020626068, |
|
"rewards/rejected": 0.027958199381828308, |
|
"step": 30, |
|
"use_label": 402.0 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.51953125, |
|
"learning_rate": 4.166666666666667e-06, |
|
"logits/chosen": -2.322958469390869, |
|
"logits/rejected": -2.3010201454162598, |
|
"logps/chosen": -82.86949157714844, |
|
"logps/rejected": -82.41117858886719, |
|
"loss": 0.6866, |
|
"pred_label": 0.0, |
|
"rewards/accuracies": 0.29374998807907104, |
|
"rewards/chosen": 0.03322647884488106, |
|
"rewards/margins": 0.01188388466835022, |
|
"rewards/rejected": 0.021342596039175987, |
|
"step": 40, |
|
"use_label": 562.0 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.66015625, |
|
"learning_rate": 4.999731868769027e-06, |
|
"logits/chosen": -2.2394285202026367, |
|
"logits/rejected": -2.2620723247528076, |
|
"logps/chosen": -67.9144058227539, |
|
"logps/rejected": -81.85662841796875, |
|
"loss": 0.6805, |
|
"pred_label": 0.0, |
|
"rewards/accuracies": 0.32499998807907104, |
|
"rewards/chosen": 0.009164649061858654, |
|
"rewards/margins": 0.030334800481796265, |
|
"rewards/rejected": -0.021170150488615036, |
|
"step": 50, |
|
"use_label": 722.0 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.94921875, |
|
"learning_rate": 4.9903533134293035e-06, |
|
"logits/chosen": -2.215353488922119, |
|
"logits/rejected": -2.156195640563965, |
|
"logps/chosen": -62.76350784301758, |
|
"logps/rejected": -72.54745483398438, |
|
"loss": 0.6752, |
|
"pred_label": 0.0, |
|
"rewards/accuracies": 0.3125, |
|
"rewards/chosen": -0.030372655019164085, |
|
"rewards/margins": 0.04541187360882759, |
|
"rewards/rejected": -0.07578452676534653, |
|
"step": 60, |
|
"use_label": 882.0 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 1.140625, |
|
"learning_rate": 4.967625656594782e-06, |
|
"logits/chosen": -2.1206488609313965, |
|
"logits/rejected": -2.117661952972412, |
|
"logps/chosen": -63.197784423828125, |
|
"logps/rejected": -76.79959869384766, |
|
"loss": 0.6656, |
|
"pred_label": 0.0, |
|
"rewards/accuracies": 0.24375000596046448, |
|
"rewards/chosen": -0.07486678659915924, |
|
"rewards/margins": 0.03511539101600647, |
|
"rewards/rejected": -0.10998217016458511, |
|
"step": 70, |
|
"use_label": 1042.0 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.8125, |
|
"learning_rate": 4.93167072587771e-06, |
|
"logits/chosen": -2.209770679473877, |
|
"logits/rejected": -2.155240058898926, |
|
"logps/chosen": -62.25128936767578, |
|
"logps/rejected": -75.9639663696289, |
|
"loss": 0.6592, |
|
"pred_label": 1.6749999523162842, |
|
"rewards/accuracies": 0.2750000059604645, |
|
"rewards/chosen": -0.1372038871049881, |
|
"rewards/margins": 0.0906611904501915, |
|
"rewards/rejected": -0.227865070104599, |
|
"step": 80, |
|
"use_label": 1200.324951171875 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.046875, |
|
"learning_rate": 4.882681251368549e-06, |
|
"logits/chosen": -2.0902276039123535, |
|
"logits/rejected": -2.0773346424102783, |
|
"logps/chosen": -77.01739501953125, |
|
"logps/rejected": -97.2451400756836, |
|
"loss": 0.6533, |
|
"pred_label": 7.675000190734863, |
|
"rewards/accuracies": 0.3062500059604645, |
|
"rewards/chosen": -0.18635347485542297, |
|
"rewards/margins": 0.09845630824565887, |
|
"rewards/rejected": -0.28480976819992065, |
|
"step": 90, |
|
"use_label": 1354.324951171875 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 1.0625, |
|
"learning_rate": 4.8209198325401815e-06, |
|
"logits/chosen": -2.1706321239471436, |
|
"logits/rejected": -2.1635570526123047, |
|
"logps/chosen": -92.91756439208984, |
|
"logps/rejected": -83.92691802978516, |
|
"loss": 0.6535, |
|
"pred_label": 10.050000190734863, |
|
"rewards/accuracies": 0.3125, |
|
"rewards/chosen": -0.12887680530548096, |
|
"rewards/margins": 0.07172463834285736, |
|
"rewards/rejected": -0.20060142874717712, |
|
"step": 100, |
|
"use_label": 1511.949951171875 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"eval_logits/chosen": -2.0656356811523438, |
|
"eval_logits/rejected": -2.053668260574341, |
|
"eval_logps/chosen": -84.40631866455078, |
|
"eval_logps/rejected": -113.12586975097656, |
|
"eval_loss": 0.6432419419288635, |
|
"eval_pred_label": 18.5, |
|
"eval_rewards/accuracies": 0.3515625, |
|
"eval_rewards/chosen": -0.20494069159030914, |
|
"eval_rewards/margins": 0.1543886363506317, |
|
"eval_rewards/rejected": -0.35932934284210205, |
|
"eval_runtime": 125.4389, |
|
"eval_samples_per_second": 15.944, |
|
"eval_steps_per_second": 0.255, |
|
"eval_use_label": 1713.5, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 2.125, |
|
"learning_rate": 4.746717530629565e-06, |
|
"logits/chosen": -2.043905019760132, |
|
"logits/rejected": -2.0294950008392334, |
|
"logps/chosen": -101.01715850830078, |
|
"logps/rejected": -123.53236389160156, |
|
"loss": 0.6416, |
|
"pred_label": 28.799999237060547, |
|
"rewards/accuracies": 0.3499999940395355, |
|
"rewards/chosen": -0.2991637587547302, |
|
"rewards/margins": 0.15025287866592407, |
|
"rewards/rejected": -0.4494166374206543, |
|
"step": 110, |
|
"use_label": 1909.199951171875 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.578125, |
|
"learning_rate": 4.660472094042121e-06, |
|
"logits/chosen": -1.6428496837615967, |
|
"logits/rejected": -1.5296450853347778, |
|
"logps/chosen": -109.25709533691406, |
|
"logps/rejected": -133.13401794433594, |
|
"loss": 0.6325, |
|
"pred_label": 42.32500076293945, |
|
"rewards/accuracies": 0.375, |
|
"rewards/chosen": -0.3931151032447815, |
|
"rewards/margins": 0.20903488993644714, |
|
"rewards/rejected": -0.602150022983551, |
|
"step": 120, |
|
"use_label": 2055.675048828125 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 1.8203125, |
|
"learning_rate": 4.5626458262912745e-06, |
|
"logits/chosen": -1.0915193557739258, |
|
"logits/rejected": -1.0688496828079224, |
|
"logps/chosen": -101.99517822265625, |
|
"logps/rejected": -131.51425170898438, |
|
"loss": 0.6265, |
|
"pred_label": 60.599998474121094, |
|
"rewards/accuracies": 0.3375000059604645, |
|
"rewards/chosen": -0.3621678650379181, |
|
"rewards/margins": 0.22451019287109375, |
|
"rewards/rejected": -0.5866780877113342, |
|
"step": 130, |
|
"use_label": 2197.39990234375 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 2.65625, |
|
"learning_rate": 4.453763107901676e-06, |
|
"logits/chosen": -0.5608137845993042, |
|
"logits/rejected": -0.7015228271484375, |
|
"logps/chosen": -131.24168395996094, |
|
"logps/rejected": -148.6112060546875, |
|
"loss": 0.6032, |
|
"pred_label": 81.6500015258789, |
|
"rewards/accuracies": 0.30000001192092896, |
|
"rewards/chosen": -0.5049411654472351, |
|
"rewards/margins": 0.19011390209197998, |
|
"rewards/rejected": -0.6950551271438599, |
|
"step": 140, |
|
"use_label": 2336.35009765625 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 3.828125, |
|
"learning_rate": 4.33440758555951e-06, |
|
"logits/chosen": -0.30833983421325684, |
|
"logits/rejected": -0.2849891781806946, |
|
"logps/chosen": -146.22640991210938, |
|
"logps/rejected": -189.76602172851562, |
|
"loss": 0.5689, |
|
"pred_label": 109.57499694824219, |
|
"rewards/accuracies": 0.3187499940395355, |
|
"rewards/chosen": -0.8107970952987671, |
|
"rewards/margins": 0.4084743559360504, |
|
"rewards/rejected": -1.2192714214324951, |
|
"step": 150, |
|
"use_label": 2468.425048828125 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 3.90625, |
|
"learning_rate": 4.205219043576955e-06, |
|
"logits/chosen": 0.23603327572345734, |
|
"logits/rejected": 0.16418711841106415, |
|
"logps/chosen": -161.76339721679688, |
|
"logps/rejected": -198.48782348632812, |
|
"loss": 0.5333, |
|
"pred_label": 145.22500610351562, |
|
"rewards/accuracies": 0.26875001192092896, |
|
"rewards/chosen": -0.9743334650993347, |
|
"rewards/margins": 0.23641912639141083, |
|
"rewards/rejected": -1.2107526063919067, |
|
"step": 160, |
|
"use_label": 2592.77490234375 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 3.453125, |
|
"learning_rate": 4.066889974440757e-06, |
|
"logits/chosen": 0.4600732922554016, |
|
"logits/rejected": 0.5158972144126892, |
|
"logps/chosen": -129.09141540527344, |
|
"logps/rejected": -170.87411499023438, |
|
"loss": 0.5489, |
|
"pred_label": 192.35000610351562, |
|
"rewards/accuracies": 0.32499998807907104, |
|
"rewards/chosen": -0.7358335256576538, |
|
"rewards/margins": 0.3306979537010193, |
|
"rewards/rejected": -1.0665314197540283, |
|
"step": 170, |
|
"use_label": 2705.64990234375 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 3.65625, |
|
"learning_rate": 3.92016186682789e-06, |
|
"logits/chosen": 0.6188533902168274, |
|
"logits/rejected": 0.7289873361587524, |
|
"logps/chosen": -150.7683868408203, |
|
"logps/rejected": -179.30160522460938, |
|
"loss": 0.5565, |
|
"pred_label": 230.1750030517578, |
|
"rewards/accuracies": 0.34375, |
|
"rewards/chosen": -0.8798072934150696, |
|
"rewards/margins": 0.35036540031433105, |
|
"rewards/rejected": -1.230172872543335, |
|
"step": 180, |
|
"use_label": 2827.824951171875 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 4.15625, |
|
"learning_rate": 3.7658212309857576e-06, |
|
"logits/chosen": 0.8427504301071167, |
|
"logits/rejected": 1.2680375576019287, |
|
"logps/chosen": -149.38197326660156, |
|
"logps/rejected": -201.4063262939453, |
|
"loss": 0.5452, |
|
"pred_label": 274.17498779296875, |
|
"rewards/accuracies": 0.3375000059604645, |
|
"rewards/chosen": -0.9027034044265747, |
|
"rewards/margins": 0.46735334396362305, |
|
"rewards/rejected": -1.3700568675994873, |
|
"step": 190, |
|
"use_label": 2943.824951171875 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 3.109375, |
|
"learning_rate": 3.604695382782159e-06, |
|
"logits/chosen": 1.0629971027374268, |
|
"logits/rejected": 1.2417268753051758, |
|
"logps/chosen": -202.11306762695312, |
|
"logps/rejected": -230.6200714111328, |
|
"loss": 0.507, |
|
"pred_label": 315.54998779296875, |
|
"rewards/accuracies": 0.29374998807907104, |
|
"rewards/chosen": -1.204367756843567, |
|
"rewards/margins": 0.37038713693618774, |
|
"rewards/rejected": -1.5747547149658203, |
|
"step": 200, |
|
"use_label": 3062.449951171875 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"eval_logits/chosen": 2.09150767326355, |
|
"eval_logits/rejected": 2.1625571250915527, |
|
"eval_logps/chosen": -231.13876342773438, |
|
"eval_logps/rejected": -311.849365234375, |
|
"eval_loss": 0.504833996295929, |
|
"eval_pred_label": 373.4375, |
|
"eval_rewards/accuracies": 0.359375, |
|
"eval_rewards/chosen": -1.6722650527954102, |
|
"eval_rewards/margins": 0.6742992401123047, |
|
"eval_rewards/rejected": -2.346564292907715, |
|
"eval_runtime": 125.4772, |
|
"eval_samples_per_second": 15.939, |
|
"eval_steps_per_second": 0.255, |
|
"eval_use_label": 3214.5625, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 2.859375, |
|
"learning_rate": 3.437648009023905e-06, |
|
"logits/chosen": 1.6046574115753174, |
|
"logits/rejected": 1.5769492387771606, |
|
"logps/chosen": -200.22195434570312, |
|
"logps/rejected": -262.50018310546875, |
|
"loss": 0.5179, |
|
"pred_label": 434.75, |
|
"rewards/accuracies": 0.3062500059604645, |
|
"rewards/chosen": -1.4424717426300049, |
|
"rewards/margins": 0.5527372360229492, |
|
"rewards/rejected": -1.995208740234375, |
|
"step": 210, |
|
"use_label": 3359.25 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 3.46875, |
|
"learning_rate": 3.265574537815398e-06, |
|
"logits/chosen": 1.2694753408432007, |
|
"logits/rejected": 1.4022200107574463, |
|
"logps/chosen": -256.5951232910156, |
|
"logps/rejected": -258.8177795410156, |
|
"loss": 0.495, |
|
"pred_label": 487.0, |
|
"rewards/accuracies": 0.28125, |
|
"rewards/chosen": -1.7696645259857178, |
|
"rewards/margins": 0.1548328697681427, |
|
"rewards/rejected": -1.9244972467422485, |
|
"step": 220, |
|
"use_label": 3467.0 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 4.0, |
|
"learning_rate": 3.089397338773569e-06, |
|
"logits/chosen": 1.3947041034698486, |
|
"logits/rejected": 1.5894306898117065, |
|
"logps/chosen": -181.77613830566406, |
|
"logps/rejected": -231.12332153320312, |
|
"loss": 0.518, |
|
"pred_label": 532.0499877929688, |
|
"rewards/accuracies": 0.2874999940395355, |
|
"rewards/chosen": -1.1734154224395752, |
|
"rewards/margins": 0.5102296471595764, |
|
"rewards/rejected": -1.683645248413086, |
|
"step": 230, |
|
"use_label": 3581.949951171875 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 3.765625, |
|
"learning_rate": 2.9100607788275547e-06, |
|
"logits/chosen": 1.8860304355621338, |
|
"logits/rejected": 1.7700283527374268, |
|
"logps/chosen": -187.30068969726562, |
|
"logps/rejected": -253.653564453125, |
|
"loss": 0.513, |
|
"pred_label": 577.4000244140625, |
|
"rewards/accuracies": 0.34375, |
|
"rewards/chosen": -1.1826814413070679, |
|
"rewards/margins": 0.6116172671318054, |
|
"rewards/rejected": -1.794298768043518, |
|
"step": 240, |
|
"use_label": 3696.60009765625 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 2.96875, |
|
"learning_rate": 2.72852616010567e-06, |
|
"logits/chosen": 1.8056014776229858, |
|
"logits/rejected": 1.8892968893051147, |
|
"logps/chosen": -222.9417266845703, |
|
"logps/rejected": -281.18231201171875, |
|
"loss": 0.5089, |
|
"pred_label": 626.8250122070312, |
|
"rewards/accuracies": 0.39375001192092896, |
|
"rewards/chosen": -1.4954261779785156, |
|
"rewards/margins": 0.6240721940994263, |
|
"rewards/rejected": -2.1194984912872314, |
|
"step": 250, |
|
"use_label": 3807.175048828125 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 2.78125, |
|
"learning_rate": 2.5457665670441937e-06, |
|
"logits/chosen": 2.1560091972351074, |
|
"logits/rejected": 2.1253867149353027, |
|
"logps/chosen": -209.4928741455078, |
|
"logps/rejected": -288.0691223144531, |
|
"loss": 0.4966, |
|
"pred_label": 678.25, |
|
"rewards/accuracies": 0.3375000059604645, |
|
"rewards/chosen": -1.4338725805282593, |
|
"rewards/margins": 0.7023388743400574, |
|
"rewards/rejected": -2.136211395263672, |
|
"step": 260, |
|
"use_label": 3915.75 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 7.0625, |
|
"learning_rate": 2.3627616503391813e-06, |
|
"logits/chosen": 2.430591583251953, |
|
"logits/rejected": 2.3143506050109863, |
|
"logps/chosen": -224.49380493164062, |
|
"logps/rejected": -281.049072265625, |
|
"loss": 0.4953, |
|
"pred_label": 728.0750122070312, |
|
"rewards/accuracies": 0.3062500059604645, |
|
"rewards/chosen": -1.5157802104949951, |
|
"rewards/margins": 0.600957453250885, |
|
"rewards/rejected": -2.1167378425598145, |
|
"step": 270, |
|
"use_label": 4025.925048828125 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 2.84375, |
|
"learning_rate": 2.1804923757009885e-06, |
|
"logits/chosen": 2.489325523376465, |
|
"logits/rejected": 2.6676642894744873, |
|
"logps/chosen": -211.58285522460938, |
|
"logps/rejected": -260.68853759765625, |
|
"loss": 0.5164, |
|
"pred_label": 778.6749877929688, |
|
"rewards/accuracies": 0.32499998807907104, |
|
"rewards/chosen": -1.4607734680175781, |
|
"rewards/margins": 0.5169156193733215, |
|
"rewards/rejected": -1.9776890277862549, |
|
"step": 280, |
|
"use_label": 4135.3251953125 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 3.890625, |
|
"learning_rate": 1.9999357655598894e-06, |
|
"logits/chosen": 2.1360135078430176, |
|
"logits/rejected": 2.1046082973480225, |
|
"logps/chosen": -214.5660400390625, |
|
"logps/rejected": -288.2430419921875, |
|
"loss": 0.4926, |
|
"pred_label": 830.5, |
|
"rewards/accuracies": 0.32499998807907104, |
|
"rewards/chosen": -1.4801760911941528, |
|
"rewards/margins": 0.6386412978172302, |
|
"rewards/rejected": -2.1188173294067383, |
|
"step": 290, |
|
"use_label": 4243.5 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 5.6875, |
|
"learning_rate": 1.8220596619089576e-06, |
|
"logits/chosen": 2.7119574546813965, |
|
"logits/rejected": 2.501838207244873, |
|
"logps/chosen": -269.5751953125, |
|
"logps/rejected": -370.7685852050781, |
|
"loss": 0.4799, |
|
"pred_label": 889.2750244140625, |
|
"rewards/accuracies": 0.36250001192092896, |
|
"rewards/chosen": -1.9198087453842163, |
|
"rewards/margins": 0.8466728329658508, |
|
"rewards/rejected": -2.766481876373291, |
|
"step": 300, |
|
"use_label": 4344.72509765625 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"eval_logits/chosen": 3.1510589122772217, |
|
"eval_logits/rejected": 3.222506284713745, |
|
"eval_logps/chosen": -242.9697723388672, |
|
"eval_logps/rejected": -343.4284973144531, |
|
"eval_loss": 0.48854950070381165, |
|
"eval_pred_label": 969.25, |
|
"eval_rewards/accuracies": 0.3359375, |
|
"eval_rewards/chosen": -1.7905751466751099, |
|
"eval_rewards/margins": 0.871780276298523, |
|
"eval_rewards/rejected": -2.662355422973633, |
|
"eval_runtime": 125.4501, |
|
"eval_samples_per_second": 15.943, |
|
"eval_steps_per_second": 0.255, |
|
"eval_use_label": 4474.75, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 2.6875, |
|
"learning_rate": 1.647817538357072e-06, |
|
"logits/chosen": 2.556201457977295, |
|
"logits/rejected": 2.59236478805542, |
|
"logps/chosen": -194.0545654296875, |
|
"logps/rejected": -294.6336975097656, |
|
"loss": 0.4854, |
|
"pred_label": 1040.375, |
|
"rewards/accuracies": 0.35624998807907104, |
|
"rewards/chosen": -1.3845796585083008, |
|
"rewards/margins": 0.8792532682418823, |
|
"rewards/rejected": -2.2638330459594727, |
|
"step": 310, |
|
"use_label": 4609.625 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 4.75, |
|
"learning_rate": 1.4781433892011132e-06, |
|
"logits/chosen": 2.2738680839538574, |
|
"logits/rejected": 2.5079522132873535, |
|
"logps/chosen": -225.6286163330078, |
|
"logps/rejected": -287.9954833984375, |
|
"loss": 0.4846, |
|
"pred_label": 1091.550048828125, |
|
"rewards/accuracies": 0.34375, |
|
"rewards/chosen": -1.5255814790725708, |
|
"rewards/margins": 0.6824158430099487, |
|
"rewards/rejected": -2.2079973220825195, |
|
"step": 320, |
|
"use_label": 4718.4501953125 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 5.9375, |
|
"learning_rate": 1.3139467229135999e-06, |
|
"logits/chosen": 2.808880567550659, |
|
"logits/rejected": 2.8045334815979004, |
|
"logps/chosen": -228.8271026611328, |
|
"logps/rejected": -294.6129150390625, |
|
"loss": 0.4772, |
|
"pred_label": 1143.199951171875, |
|
"rewards/accuracies": 0.3499999940395355, |
|
"rewards/chosen": -1.587189793586731, |
|
"rewards/margins": 0.6728593111038208, |
|
"rewards/rejected": -2.2600488662719727, |
|
"step": 330, |
|
"use_label": 4826.7998046875 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 2.765625, |
|
"learning_rate": 1.1561076868822756e-06, |
|
"logits/chosen": 2.5798306465148926, |
|
"logits/rejected": 2.4416697025299072, |
|
"logps/chosen": -287.6748352050781, |
|
"logps/rejected": -322.15899658203125, |
|
"loss": 0.4893, |
|
"pred_label": 1191.0999755859375, |
|
"rewards/accuracies": 0.2750000059604645, |
|
"rewards/chosen": -2.0293564796447754, |
|
"rewards/margins": 0.42713117599487305, |
|
"rewards/rejected": -2.4564874172210693, |
|
"step": 340, |
|
"use_label": 4938.89990234375 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 3.15625, |
|
"learning_rate": 1.0054723495346484e-06, |
|
"logits/chosen": 2.786007881164551, |
|
"logits/rejected": 2.829763174057007, |
|
"logps/chosen": -366.9530944824219, |
|
"logps/rejected": -448.0956115722656, |
|
"loss": 0.4576, |
|
"pred_label": 1257.5250244140625, |
|
"rewards/accuracies": 0.3187499940395355, |
|
"rewards/chosen": -2.775364637374878, |
|
"rewards/margins": 0.8529101610183716, |
|
"rewards/rejected": -3.628274440765381, |
|
"step": 350, |
|
"use_label": 5032.47509765625 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 2.9375, |
|
"learning_rate": 8.628481651367876e-07, |
|
"logits/chosen": 2.9445724487304688, |
|
"logits/rejected": 3.1931867599487305, |
|
"logps/chosen": -301.0486145019531, |
|
"logps/rejected": -423.34130859375, |
|
"loss": 0.463, |
|
"pred_label": 1317.699951171875, |
|
"rewards/accuracies": 0.375, |
|
"rewards/chosen": -2.374109983444214, |
|
"rewards/margins": 1.1370208263397217, |
|
"rewards/rejected": -3.5111305713653564, |
|
"step": 360, |
|
"use_label": 5132.2998046875 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 4.15625, |
|
"learning_rate": 7.289996455765749e-07, |
|
"logits/chosen": 3.396423816680908, |
|
"logits/rejected": 3.5762810707092285, |
|
"logps/chosen": -308.2368469238281, |
|
"logps/rejected": -430.18524169921875, |
|
"loss": 0.4574, |
|
"pred_label": 1379.199951171875, |
|
"rewards/accuracies": 0.3499999940395355, |
|
"rewards/chosen": -2.4280202388763428, |
|
"rewards/margins": 1.1946780681610107, |
|
"rewards/rejected": -3.6226983070373535, |
|
"step": 370, |
|
"use_label": 5230.7998046875 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 5.90625, |
|
"learning_rate": 6.046442623320145e-07, |
|
"logits/chosen": 2.96122407913208, |
|
"logits/rejected": 2.9667248725891113, |
|
"logps/chosen": -357.73297119140625, |
|
"logps/rejected": -534.4653930664062, |
|
"loss": 0.4633, |
|
"pred_label": 1439.324951171875, |
|
"rewards/accuracies": 0.3062500059604645, |
|
"rewards/chosen": -2.918975830078125, |
|
"rewards/margins": 1.5620373487472534, |
|
"rewards/rejected": -4.481013298034668, |
|
"step": 380, |
|
"use_label": 5330.6748046875 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 6.625, |
|
"learning_rate": 4.904486005914027e-07, |
|
"logits/chosen": 3.662972927093506, |
|
"logits/rejected": 3.2519752979278564, |
|
"logps/chosen": -526.6204833984375, |
|
"logps/rejected": -657.4584350585938, |
|
"loss": 0.4314, |
|
"pred_label": 1506.574951171875, |
|
"rewards/accuracies": 0.32499998807907104, |
|
"rewards/chosen": -4.338021278381348, |
|
"rewards/margins": 1.406864047050476, |
|
"rewards/rejected": -5.744885444641113, |
|
"step": 390, |
|
"use_label": 5423.4248046875 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 3.71875, |
|
"learning_rate": 3.8702478614051353e-07, |
|
"logits/chosen": 3.13154935836792, |
|
"logits/rejected": 3.3134002685546875, |
|
"logps/chosen": -389.10174560546875, |
|
"logps/rejected": -441.57305908203125, |
|
"loss": 0.4443, |
|
"pred_label": 1566.699951171875, |
|
"rewards/accuracies": 0.3125, |
|
"rewards/chosen": -3.166107654571533, |
|
"rewards/margins": 0.6149949431419373, |
|
"rewards/rejected": -3.7811026573181152, |
|
"step": 400, |
|
"use_label": 5523.2998046875 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"eval_logits/chosen": 4.387378692626953, |
|
"eval_logits/rejected": 4.5207109451293945, |
|
"eval_logps/chosen": -371.9975891113281, |
|
"eval_logps/rejected": -566.3418579101562, |
|
"eval_loss": 0.44047147035598755, |
|
"eval_pred_label": 1650.28125, |
|
"eval_rewards/accuracies": 0.34375, |
|
"eval_rewards/chosen": -3.080853223800659, |
|
"eval_rewards/margins": 1.8106356859207153, |
|
"eval_rewards/rejected": -4.891489028930664, |
|
"eval_runtime": 125.426, |
|
"eval_samples_per_second": 15.946, |
|
"eval_steps_per_second": 0.255, |
|
"eval_use_label": 5649.71875, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 4.78125, |
|
"learning_rate": 2.9492720416985004e-07, |
|
"logits/chosen": 3.4670283794403076, |
|
"logits/rejected": 3.4501025676727295, |
|
"logps/chosen": -364.8808898925781, |
|
"logps/rejected": -489.7000427246094, |
|
"loss": 0.4407, |
|
"pred_label": 1735.4749755859375, |
|
"rewards/accuracies": 0.33125001192092896, |
|
"rewards/chosen": -3.034578800201416, |
|
"rewards/margins": 1.265229344367981, |
|
"rewards/rejected": -4.299808025360107, |
|
"step": 410, |
|
"use_label": 5770.52490234375 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 2.359375, |
|
"learning_rate": 2.1464952759020857e-07, |
|
"logits/chosen": 3.780207872390747, |
|
"logits/rejected": 3.6938164234161377, |
|
"logps/chosen": -389.7974548339844, |
|
"logps/rejected": -390.455078125, |
|
"loss": 0.46, |
|
"pred_label": 1791.824951171875, |
|
"rewards/accuracies": 0.20624999701976776, |
|
"rewards/chosen": -3.288583278656006, |
|
"rewards/margins": 0.10213696956634521, |
|
"rewards/rejected": -3.3907198905944824, |
|
"step": 420, |
|
"use_label": 5874.1748046875 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 4.625, |
|
"learning_rate": 1.4662207078575685e-07, |
|
"logits/chosen": 3.7266852855682373, |
|
"logits/rejected": 3.6461174488067627, |
|
"logps/chosen": -460.2088928222656, |
|
"logps/rejected": -562.7196655273438, |
|
"loss": 0.4448, |
|
"pred_label": 1855.1500244140625, |
|
"rewards/accuracies": 0.39375001192092896, |
|
"rewards/chosen": -3.6094202995300293, |
|
"rewards/margins": 1.2242047786712646, |
|
"rewards/rejected": -4.833625316619873, |
|
"step": 430, |
|
"use_label": 5970.85009765625 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 3.953125, |
|
"learning_rate": 9.120948298936422e-08, |
|
"logits/chosen": 3.6001758575439453, |
|
"logits/rejected": 3.7878482341766357, |
|
"logps/chosen": -407.4084167480469, |
|
"logps/rejected": -561.12744140625, |
|
"loss": 0.4359, |
|
"pred_label": 1919.800048828125, |
|
"rewards/accuracies": 0.3125, |
|
"rewards/chosen": -3.4050445556640625, |
|
"rewards/margins": 1.4556002616882324, |
|
"rewards/rejected": -4.860644817352295, |
|
"step": 440, |
|
"use_label": 6066.2001953125 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 3.65625, |
|
"learning_rate": 4.870879364444109e-08, |
|
"logits/chosen": 4.037863254547119, |
|
"logits/rejected": 3.809945583343506, |
|
"logps/chosen": -381.8726501464844, |
|
"logps/rejected": -569.0548706054688, |
|
"loss": 0.4494, |
|
"pred_label": 1975.4000244140625, |
|
"rewards/accuracies": 0.35624998807907104, |
|
"rewards/chosen": -3.096148729324341, |
|
"rewards/margins": 1.7115033864974976, |
|
"rewards/rejected": -4.807651996612549, |
|
"step": 450, |
|
"use_label": 6170.60009765625 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 4.34375, |
|
"learning_rate": 1.93478202307823e-08, |
|
"logits/chosen": 3.7793803215026855, |
|
"logits/rejected": 3.7878577709198, |
|
"logps/chosen": -287.76025390625, |
|
"logps/rejected": -431.52020263671875, |
|
"loss": 0.4442, |
|
"pred_label": 2037.375, |
|
"rewards/accuracies": 0.2750000059604645, |
|
"rewards/chosen": -2.4164037704467773, |
|
"rewards/margins": 1.265346646308899, |
|
"rewards/rejected": -3.681750535964966, |
|
"step": 460, |
|
"use_label": 6268.625 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 5.96875, |
|
"learning_rate": 3.283947088983663e-09, |
|
"logits/chosen": 3.807328701019287, |
|
"logits/rejected": 3.645596981048584, |
|
"logps/chosen": -342.3217468261719, |
|
"logps/rejected": -512.537109375, |
|
"loss": 0.4555, |
|
"pred_label": 2090.22509765625, |
|
"rewards/accuracies": 0.32499998807907104, |
|
"rewards/chosen": -2.7499260902404785, |
|
"rewards/margins": 1.6848747730255127, |
|
"rewards/rejected": -4.4348015785217285, |
|
"step": 470, |
|
"use_label": 6375.77490234375 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"step": 477, |
|
"total_flos": 0.0, |
|
"train_loss": 0.5378267840019562, |
|
"train_runtime": 9600.9753, |
|
"train_samples_per_second": 6.368, |
|
"train_steps_per_second": 0.05 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 477, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"total_flos": 0.0, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|