lm1-misc-pile / 280m5b95b9 /280m5b95b9pile /evaluation /rankeval /lm1-280m-5b9-results_lm-eval_global_step11269_2023-01-23-19-58-52_1shots.json
Muennighoff's picture
Add
c305798
{
"results": {
"anli_r1": {
"acc": 0.322,
"acc_stderr": 0.014782913600996667
},
"anli_r2": {
"acc": 0.335,
"acc_stderr": 0.014933117490932573
},
"anli_r3": {
"acc": 0.3333333333333333,
"acc_stderr": 0.013613950010225606
},
"cb": {
"acc": 0.4107142857142857,
"acc_stderr": 0.0663363415035954,
"f1": 0.2751322751322751
},
"copa": {
"acc": 0.62,
"acc_stderr": 0.048783173121456316
},
"hellaswag": {
"acc": 0.26837283409679347,
"acc_stderr": 0.004422070927212535,
"acc_norm": 0.27504481179047996,
"acc_norm_stderr": 0.004456242601950632
},
"rte": {
"acc": 0.5415162454873647,
"acc_stderr": 0.02999253538537331
},
"winogrande": {
"acc": 0.5240726124704025,
"acc_stderr": 0.014036189665395132
},
"storycloze_2016": {
"acc": 0.5451630144307856,
"acc_stderr": 0.011515167912227987
},
"boolq": {
"acc": 0.5452599388379205,
"acc_stderr": 0.00870915345549762
},
"arc_easy": {
"acc": 0.380050505050505,
"acc_stderr": 0.009960175831493116,
"acc_norm": 0.36363636363636365,
"acc_norm_stderr": 0.009870849346011767
},
"arc_challenge": {
"acc": 0.1757679180887372,
"acc_stderr": 0.011122850863120485,
"acc_norm": 0.2158703071672355,
"acc_norm_stderr": 0.012022975360030663
},
"sciq": {
"acc": 0.707,
"acc_stderr": 0.014399942998441271,
"acc_norm": 0.659,
"acc_norm_stderr": 0.014998131348402697
},
"piqa": {
"acc": 0.5984766050054406,
"acc_stderr": 0.01143732437339785,
"acc_norm": 0.5903155603917302,
"acc_norm_stderr": 0.011473932007187613
}
},
"versions": {
"anli_r1": 0,
"anli_r2": 0,
"anli_r3": 0,
"cb": 1,
"copa": 0,
"hellaswag": 0,
"rte": 0,
"winogrande": 0,
"storycloze_2016": 0,
"boolq": 1,
"arc_easy": 0,
"arc_challenge": 0,
"sciq": 0,
"piqa": 0
}
}