lm1-misc-pile / 619m2b72b7 /evaluation /rankeval /lm1-619m-2b7-results_lm-eval_global_step5111_2023-01-24-13-57-06_3shots.json
Muennighoff's picture
Add
8393ff0
{
"results": {
"anli_r1": {
"acc": 0.325,
"acc_stderr": 0.014818724459095524
},
"anli_r2": {
"acc": 0.347,
"acc_stderr": 0.01506047203170662
},
"anli_r3": {
"acc": 0.32916666666666666,
"acc_stderr": 0.01357080625843363
},
"cb": {
"acc": 0.42857142857142855,
"acc_stderr": 0.06672848092813058,
"f1": 0.29871345029239765
},
"copa": {
"acc": 0.6,
"acc_stderr": 0.049236596391733084
},
"hellaswag": {
"acc": 0.2704640509858594,
"acc_stderr": 0.004432917403755054,
"acc_norm": 0.2746464847639912,
"acc_norm_stderr": 0.004454237797448344
},
"rte": {
"acc": 0.5090252707581228,
"acc_stderr": 0.030091559826331334
},
"winogrande": {
"acc": 0.5146014206787688,
"acc_stderr": 0.014046492383275832
},
"storycloze_2016": {
"acc": 0.5334045964724746,
"acc_stderr": 0.011536599118298168
},
"boolq": {
"acc": 0.5926605504587156,
"acc_stderr": 0.00859357330260705
},
"arc_easy": {
"acc": 0.35774410774410775,
"acc_stderr": 0.00983577275734336,
"acc_norm": 0.351010101010101,
"acc_norm_stderr": 0.009793703885101049
},
"arc_challenge": {
"acc": 0.16296928327645052,
"acc_stderr": 0.010793073338782481,
"acc_norm": 0.19368600682593856,
"acc_norm_stderr": 0.01154842540997854
},
"sciq": {
"acc": 0.656,
"acc_stderr": 0.015029633724408943,
"acc_norm": 0.623,
"acc_norm_stderr": 0.01533317012577987
},
"piqa": {
"acc": 0.5870511425462459,
"acc_stderr": 0.011487658725079095,
"acc_norm": 0.5756256800870512,
"acc_norm_stderr": 0.011531612758871062
}
},
"versions": {
"anli_r1": 0,
"anli_r2": 0,
"anli_r3": 0,
"cb": 1,
"copa": 0,
"hellaswag": 0,
"rte": 0,
"winogrande": 0,
"storycloze_2016": 0,
"boolq": 1,
"arc_easy": 0,
"arc_challenge": 0,
"sciq": 0,
"piqa": 0
}
}