Brando Miranda
Initial commit to push model at path_local_hub_repo = /lfs/skampere1/0/brando9/data/maf_data/results_2023-m09-d29-t11h_23m_01s/hub-checkpoint-6000
6d7c824
{ | |
"best_metric": null, | |
"best_model_checkpoint": null, | |
"epoch": 12.818800907998398, | |
"eval_steps": 500, | |
"global_step": 6000, | |
"is_hyper_param_search": false, | |
"is_local_process_zero": true, | |
"is_world_process_zero": true, | |
"log_history": [ | |
{ | |
"epoch": 0.11, | |
"learning_rate": 1.0000000000000002e-06, | |
"loss": 0.8267, | |
"step": 50 | |
}, | |
{ | |
"epoch": 0.21, | |
"learning_rate": 2.0000000000000003e-06, | |
"loss": 0.6077, | |
"step": 100 | |
}, | |
{ | |
"epoch": 0.32, | |
"learning_rate": 3e-06, | |
"loss": 0.3982, | |
"step": 150 | |
}, | |
{ | |
"epoch": 0.43, | |
"learning_rate": 4.000000000000001e-06, | |
"loss": 0.3004, | |
"step": 200 | |
}, | |
{ | |
"epoch": 0.53, | |
"learning_rate": 5e-06, | |
"loss": 0.2925, | |
"step": 250 | |
}, | |
{ | |
"epoch": 0.64, | |
"learning_rate": 6e-06, | |
"loss": 0.2492, | |
"step": 300 | |
}, | |
{ | |
"epoch": 0.75, | |
"learning_rate": 7e-06, | |
"loss": 0.2615, | |
"step": 350 | |
}, | |
{ | |
"epoch": 0.85, | |
"learning_rate": 8.000000000000001e-06, | |
"loss": 0.2496, | |
"step": 400 | |
}, | |
{ | |
"epoch": 0.96, | |
"learning_rate": 9e-06, | |
"loss": 0.2456, | |
"step": 450 | |
}, | |
{ | |
"epoch": 1.07, | |
"learning_rate": 1e-05, | |
"loss": 0.2304, | |
"step": 500 | |
}, | |
{ | |
"epoch": 1.18, | |
"learning_rate": 9.998737209226261e-06, | |
"loss": 0.2353, | |
"step": 550 | |
}, | |
{ | |
"epoch": 1.28, | |
"learning_rate": 9.994949474761258e-06, | |
"loss": 0.2178, | |
"step": 600 | |
}, | |
{ | |
"epoch": 1.39, | |
"learning_rate": 9.988638709851444e-06, | |
"loss": 0.2347, | |
"step": 650 | |
}, | |
{ | |
"epoch": 1.5, | |
"learning_rate": 9.9798081021671e-06, | |
"loss": 0.233, | |
"step": 700 | |
}, | |
{ | |
"epoch": 1.6, | |
"learning_rate": 9.968462112192194e-06, | |
"loss": 0.2157, | |
"step": 750 | |
}, | |
{ | |
"epoch": 1.71, | |
"learning_rate": 9.954606470971309e-06, | |
"loss": 0.2296, | |
"step": 800 | |
}, | |
{ | |
"epoch": 1.82, | |
"learning_rate": 9.9382481772148e-06, | |
"loss": 0.2192, | |
"step": 850 | |
}, | |
{ | |
"epoch": 1.92, | |
"learning_rate": 9.919395493763642e-06, | |
"loss": 0.2181, | |
"step": 900 | |
}, | |
{ | |
"epoch": 2.03, | |
"learning_rate": 9.898057943415722e-06, | |
"loss": 0.2032, | |
"step": 950 | |
}, | |
{ | |
"epoch": 2.14, | |
"learning_rate": 9.874246304115728e-06, | |
"loss": 0.1536, | |
"step": 1000 | |
}, | |
{ | |
"epoch": 2.24, | |
"learning_rate": 9.847972603511025e-06, | |
"loss": 0.167, | |
"step": 1050 | |
}, | |
{ | |
"epoch": 2.35, | |
"learning_rate": 9.8192501128763e-06, | |
"loss": 0.1455, | |
"step": 1100 | |
}, | |
{ | |
"epoch": 2.46, | |
"learning_rate": 9.78809334041002e-06, | |
"loss": 0.1488, | |
"step": 1150 | |
}, | |
{ | |
"epoch": 2.56, | |
"learning_rate": 9.754518023906113e-06, | |
"loss": 0.1488, | |
"step": 1200 | |
}, | |
{ | |
"epoch": 2.67, | |
"learning_rate": 9.718541122804537e-06, | |
"loss": 0.1571, | |
"step": 1250 | |
}, | |
{ | |
"epoch": 2.78, | |
"learning_rate": 9.680180809624806e-06, | |
"loss": 0.1556, | |
"step": 1300 | |
}, | |
{ | |
"epoch": 2.88, | |
"learning_rate": 9.639456460786743e-06, | |
"loss": 0.1468, | |
"step": 1350 | |
}, | |
{ | |
"epoch": 2.99, | |
"learning_rate": 9.59638864682314e-06, | |
"loss": 0.1577, | |
"step": 1400 | |
}, | |
{ | |
"epoch": 3.1, | |
"learning_rate": 9.550999121989246e-06, | |
"loss": 0.0985, | |
"step": 1450 | |
}, | |
{ | |
"epoch": 3.2, | |
"learning_rate": 9.503310813274334e-06, | |
"loss": 0.0786, | |
"step": 1500 | |
}, | |
{ | |
"epoch": 3.31, | |
"learning_rate": 9.453347808820907e-06, | |
"loss": 0.088, | |
"step": 1550 | |
}, | |
{ | |
"epoch": 3.42, | |
"learning_rate": 9.401135345757387e-06, | |
"loss": 0.0828, | |
"step": 1600 | |
}, | |
{ | |
"epoch": 3.53, | |
"learning_rate": 9.346699797450427e-06, | |
"loss": 0.086, | |
"step": 1650 | |
}, | |
{ | |
"epoch": 3.63, | |
"learning_rate": 9.290068660183292e-06, | |
"loss": 0.0863, | |
"step": 1700 | |
}, | |
{ | |
"epoch": 3.74, | |
"learning_rate": 9.23127053926704e-06, | |
"loss": 0.0924, | |
"step": 1750 | |
}, | |
{ | |
"epoch": 3.85, | |
"learning_rate": 9.170335134591516e-06, | |
"loss": 0.0941, | |
"step": 1800 | |
}, | |
{ | |
"epoch": 3.95, | |
"learning_rate": 9.107293225623445e-06, | |
"loss": 0.0897, | |
"step": 1850 | |
}, | |
{ | |
"epoch": 4.06, | |
"learning_rate": 9.042176655859231e-06, | |
"loss": 0.0622, | |
"step": 1900 | |
}, | |
{ | |
"epoch": 4.17, | |
"learning_rate": 8.975018316740278e-06, | |
"loss": 0.0388, | |
"step": 1950 | |
}, | |
{ | |
"epoch": 4.27, | |
"learning_rate": 8.905852131038996e-06, | |
"loss": 0.0424, | |
"step": 2000 | |
}, | |
{ | |
"epoch": 4.38, | |
"learning_rate": 8.834713035723846e-06, | |
"loss": 0.0463, | |
"step": 2050 | |
}, | |
{ | |
"epoch": 4.49, | |
"learning_rate": 8.761636964312114e-06, | |
"loss": 0.0436, | |
"step": 2100 | |
}, | |
{ | |
"epoch": 4.59, | |
"learning_rate": 8.686660828719309e-06, | |
"loss": 0.0449, | |
"step": 2150 | |
}, | |
{ | |
"epoch": 4.7, | |
"learning_rate": 8.609822500614336e-06, | |
"loss": 0.0441, | |
"step": 2200 | |
}, | |
{ | |
"epoch": 4.81, | |
"learning_rate": 8.531160792289917e-06, | |
"loss": 0.0438, | |
"step": 2250 | |
}, | |
{ | |
"epoch": 4.91, | |
"learning_rate": 8.45071543705786e-06, | |
"loss": 0.0458, | |
"step": 2300 | |
}, | |
{ | |
"epoch": 5.02, | |
"learning_rate": 8.368527069179117e-06, | |
"loss": 0.0399, | |
"step": 2350 | |
}, | |
{ | |
"epoch": 5.13, | |
"learning_rate": 8.284637203338753e-06, | |
"loss": 0.0211, | |
"step": 2400 | |
}, | |
{ | |
"epoch": 5.23, | |
"learning_rate": 8.199088213676205e-06, | |
"loss": 0.0215, | |
"step": 2450 | |
}, | |
{ | |
"epoch": 5.34, | |
"learning_rate": 8.111923312381413e-06, | |
"loss": 0.0223, | |
"step": 2500 | |
}, | |
{ | |
"epoch": 5.45, | |
"learning_rate": 8.023186527867637e-06, | |
"loss": 0.0217, | |
"step": 2550 | |
}, | |
{ | |
"epoch": 5.55, | |
"learning_rate": 7.932922682531986e-06, | |
"loss": 0.0229, | |
"step": 2600 | |
}, | |
{ | |
"epoch": 5.66, | |
"learning_rate": 7.841177370114898e-06, | |
"loss": 0.0223, | |
"step": 2650 | |
}, | |
{ | |
"epoch": 5.77, | |
"learning_rate": 7.747996932669996e-06, | |
"loss": 0.0222, | |
"step": 2700 | |
}, | |
{ | |
"epoch": 5.88, | |
"learning_rate": 7.653428437155956e-06, | |
"loss": 0.023, | |
"step": 2750 | |
}, | |
{ | |
"epoch": 5.98, | |
"learning_rate": 7.557519651662229e-06, | |
"loss": 0.0218, | |
"step": 2800 | |
}, | |
{ | |
"epoch": 6.09, | |
"learning_rate": 7.460319021280591e-06, | |
"loss": 0.0148, | |
"step": 2850 | |
}, | |
{ | |
"epoch": 6.2, | |
"learning_rate": 7.3618756436347415e-06, | |
"loss": 0.013, | |
"step": 2900 | |
}, | |
{ | |
"epoch": 6.3, | |
"learning_rate": 7.262239244080291e-06, | |
"loss": 0.013, | |
"step": 2950 | |
}, | |
{ | |
"epoch": 6.41, | |
"learning_rate": 7.161460150587674e-06, | |
"loss": 0.0129, | |
"step": 3000 | |
}, | |
{ | |
"epoch": 6.52, | |
"learning_rate": 7.05958926832067e-06, | |
"loss": 0.0129, | |
"step": 3050 | |
}, | |
{ | |
"epoch": 6.62, | |
"learning_rate": 6.956678053923373e-06, | |
"loss": 0.0137, | |
"step": 3100 | |
}, | |
{ | |
"epoch": 6.73, | |
"learning_rate": 6.852778489528608e-06, | |
"loss": 0.0133, | |
"step": 3150 | |
}, | |
{ | |
"epoch": 6.84, | |
"learning_rate": 6.7479430565008986e-06, | |
"loss": 0.013, | |
"step": 3200 | |
}, | |
{ | |
"epoch": 6.94, | |
"learning_rate": 6.642224708927281e-06, | |
"loss": 0.0141, | |
"step": 3250 | |
}, | |
{ | |
"epoch": 7.05, | |
"learning_rate": 6.535676846869327e-06, | |
"loss": 0.0109, | |
"step": 3300 | |
}, | |
{ | |
"epoch": 7.16, | |
"learning_rate": 6.428353289389902e-06, | |
"loss": 0.0076, | |
"step": 3350 | |
}, | |
{ | |
"epoch": 7.26, | |
"learning_rate": 6.320308247368285e-06, | |
"loss": 0.0077, | |
"step": 3400 | |
}, | |
{ | |
"epoch": 7.37, | |
"learning_rate": 6.2115962961173616e-06, | |
"loss": 0.0075, | |
"step": 3450 | |
}, | |
{ | |
"epoch": 7.48, | |
"learning_rate": 6.1022723478167445e-06, | |
"loss": 0.0076, | |
"step": 3500 | |
}, | |
{ | |
"epoch": 7.58, | |
"learning_rate": 5.992391623775738e-06, | |
"loss": 0.0081, | |
"step": 3550 | |
}, | |
{ | |
"epoch": 7.69, | |
"learning_rate": 5.882009626540155e-06, | |
"loss": 0.0081, | |
"step": 3600 | |
}, | |
{ | |
"epoch": 7.8, | |
"learning_rate": 5.771182111857075e-06, | |
"loss": 0.0076, | |
"step": 3650 | |
}, | |
{ | |
"epoch": 7.9, | |
"learning_rate": 5.659965060511705e-06, | |
"loss": 0.0078, | |
"step": 3700 | |
}, | |
{ | |
"epoch": 8.01, | |
"learning_rate": 5.5484146500505735e-06, | |
"loss": 0.0074, | |
"step": 3750 | |
}, | |
{ | |
"epoch": 8.12, | |
"learning_rate": 5.436587226405333e-06, | |
"loss": 0.0047, | |
"step": 3800 | |
}, | |
{ | |
"epoch": 8.23, | |
"learning_rate": 5.324539275431518e-06, | |
"loss": 0.0046, | |
"step": 3850 | |
}, | |
{ | |
"epoch": 8.33, | |
"learning_rate": 5.212327394376612e-06, | |
"loss": 0.0045, | |
"step": 3900 | |
}, | |
{ | |
"epoch": 8.44, | |
"learning_rate": 5.100008263291852e-06, | |
"loss": 0.0046, | |
"step": 3950 | |
}, | |
{ | |
"epoch": 8.55, | |
"learning_rate": 4.987638616402219e-06, | |
"loss": 0.0046, | |
"step": 4000 | |
}, | |
{ | |
"epoch": 8.65, | |
"learning_rate": 4.875275213449051e-06, | |
"loss": 0.0046, | |
"step": 4050 | |
}, | |
{ | |
"epoch": 8.76, | |
"learning_rate": 4.762974811019766e-06, | |
"loss": 0.0045, | |
"step": 4100 | |
}, | |
{ | |
"epoch": 8.87, | |
"learning_rate": 4.650794133879198e-06, | |
"loss": 0.0049, | |
"step": 4150 | |
}, | |
{ | |
"epoch": 8.97, | |
"learning_rate": 4.538789846316977e-06, | |
"loss": 0.0047, | |
"step": 4200 | |
}, | |
{ | |
"epoch": 9.08, | |
"learning_rate": 4.4270185235254904e-06, | |
"loss": 0.0037, | |
"step": 4250 | |
}, | |
{ | |
"epoch": 9.19, | |
"learning_rate": 4.3155366230228055e-06, | |
"loss": 0.0034, | |
"step": 4300 | |
}, | |
{ | |
"epoch": 9.29, | |
"learning_rate": 4.204400456135086e-06, | |
"loss": 0.0033, | |
"step": 4350 | |
}, | |
{ | |
"epoch": 9.4, | |
"learning_rate": 4.093666159552802e-06, | |
"loss": 0.0032, | |
"step": 4400 | |
}, | |
{ | |
"epoch": 9.51, | |
"learning_rate": 3.983389666975174e-06, | |
"loss": 0.0033, | |
"step": 4450 | |
}, | |
{ | |
"epoch": 9.61, | |
"learning_rate": 3.873626680857161e-06, | |
"loss": 0.0033, | |
"step": 4500 | |
}, | |
{ | |
"epoch": 9.72, | |
"learning_rate": 3.7644326442732255e-06, | |
"loss": 0.0033, | |
"step": 4550 | |
}, | |
{ | |
"epoch": 9.83, | |
"learning_rate": 3.6558627129121506e-06, | |
"loss": 0.0033, | |
"step": 4600 | |
}, | |
{ | |
"epoch": 9.93, | |
"learning_rate": 3.547971727216982e-06, | |
"loss": 0.0033, | |
"step": 4650 | |
}, | |
{ | |
"epoch": 10.04, | |
"learning_rate": 3.4408141846842493e-06, | |
"loss": 0.0035, | |
"step": 4700 | |
}, | |
{ | |
"epoch": 10.15, | |
"learning_rate": 3.334444212336364e-06, | |
"loss": 0.0029, | |
"step": 4750 | |
}, | |
{ | |
"epoch": 10.26, | |
"learning_rate": 3.2289155393812045e-06, | |
"loss": 0.0033, | |
"step": 4800 | |
}, | |
{ | |
"epoch": 10.36, | |
"learning_rate": 3.1242814700725977e-06, | |
"loss": 0.0029, | |
"step": 4850 | |
}, | |
{ | |
"epoch": 10.47, | |
"learning_rate": 3.0205948567854815e-06, | |
"loss": 0.003, | |
"step": 4900 | |
}, | |
{ | |
"epoch": 10.58, | |
"learning_rate": 2.917908073319302e-06, | |
"loss": 0.0031, | |
"step": 4950 | |
}, | |
{ | |
"epoch": 10.68, | |
"learning_rate": 2.816272988443158e-06, | |
"loss": 0.003, | |
"step": 5000 | |
}, | |
{ | |
"epoch": 10.79, | |
"learning_rate": 2.71574093969604e-06, | |
"loss": 0.003, | |
"step": 5050 | |
}, | |
{ | |
"epoch": 10.9, | |
"learning_rate": 2.6163627074553954e-06, | |
"loss": 0.003, | |
"step": 5100 | |
}, | |
{ | |
"epoch": 11.0, | |
"learning_rate": 2.5181884892871378e-06, | |
"loss": 0.003, | |
"step": 5150 | |
}, | |
{ | |
"epoch": 11.11, | |
"learning_rate": 2.4212678745900336e-06, | |
"loss": 0.0032, | |
"step": 5200 | |
}, | |
{ | |
"epoch": 11.22, | |
"learning_rate": 2.3256498195472966e-06, | |
"loss": 0.0028, | |
"step": 5250 | |
}, | |
{ | |
"epoch": 11.32, | |
"learning_rate": 2.231382622398008e-06, | |
"loss": 0.0028, | |
"step": 5300 | |
}, | |
{ | |
"epoch": 11.43, | |
"learning_rate": 2.1385138990409037e-06, | |
"loss": 0.0029, | |
"step": 5350 | |
}, | |
{ | |
"epoch": 11.54, | |
"learning_rate": 2.0470905589827853e-06, | |
"loss": 0.0029, | |
"step": 5400 | |
}, | |
{ | |
"epoch": 11.64, | |
"learning_rate": 1.957158781643792e-06, | |
"loss": 0.0029, | |
"step": 5450 | |
}, | |
{ | |
"epoch": 11.75, | |
"learning_rate": 1.8687639930313944e-06, | |
"loss": 0.0029, | |
"step": 5500 | |
}, | |
{ | |
"epoch": 11.86, | |
"learning_rate": 1.781950842794999e-06, | |
"loss": 0.0029, | |
"step": 5550 | |
}, | |
{ | |
"epoch": 11.96, | |
"learning_rate": 1.6967631816726676e-06, | |
"loss": 0.0029, | |
"step": 5600 | |
}, | |
{ | |
"epoch": 12.07, | |
"learning_rate": 1.613244039341399e-06, | |
"loss": 0.0029, | |
"step": 5650 | |
}, | |
{ | |
"epoch": 12.18, | |
"learning_rate": 1.5314356026821426e-06, | |
"loss": 0.0027, | |
"step": 5700 | |
}, | |
{ | |
"epoch": 12.28, | |
"learning_rate": 1.451379194470507e-06, | |
"loss": 0.0028, | |
"step": 5750 | |
}, | |
{ | |
"epoch": 12.39, | |
"learning_rate": 1.3731152525039627e-06, | |
"loss": 0.0028, | |
"step": 5800 | |
}, | |
{ | |
"epoch": 12.5, | |
"learning_rate": 1.29668330917604e-06, | |
"loss": 0.0028, | |
"step": 5850 | |
}, | |
{ | |
"epoch": 12.61, | |
"learning_rate": 1.2221219715078808e-06, | |
"loss": 0.0028, | |
"step": 5900 | |
}, | |
{ | |
"epoch": 12.71, | |
"learning_rate": 1.1494689016471977e-06, | |
"loss": 0.0028, | |
"step": 5950 | |
}, | |
{ | |
"epoch": 12.82, | |
"learning_rate": 1.0787607978445153e-06, | |
"loss": 0.003, | |
"step": 6000 | |
} | |
], | |
"logging_steps": 50, | |
"max_steps": 7489, | |
"num_train_epochs": 17, | |
"save_steps": 2000, | |
"total_flos": 3.355230384370483e+19, | |
"trial_name": null, | |
"trial_params": null | |
} | |