lm1-misc-pile / 146m14b14b /evaluation /rankeval /lm1-146m-14b-results_lm-eval_global_step21553_2023-01-24-17-05-33_5shots.json
Muennighoff's picture
Add
0f79083
raw
history blame
2.46 kB
{
"results": {
"anli_r1": {
"acc": 0.343,
"acc_stderr": 0.015019206922356951
},
"anli_r2": {
"acc": 0.338,
"acc_stderr": 0.014965960710224487
},
"anli_r3": {
"acc": 0.3333333333333333,
"acc_stderr": 0.013613950010225598
},
"cb": {
"acc": 0.5178571428571429,
"acc_stderr": 0.06737697508644648,
"f1": 0.3607787942763312
},
"copa": {
"acc": 0.62,
"acc_stderr": 0.048783173121456316
},
"hellaswag": {
"acc": 0.26976697868950406,
"acc_stderr": 0.004429315788310528,
"acc_norm": 0.2778331009759012,
"acc_norm_stderr": 0.004470152081675125
},
"rte": {
"acc": 0.5667870036101083,
"acc_stderr": 0.029826764082138264
},
"winogrande": {
"acc": 0.5240726124704025,
"acc_stderr": 0.014036189665395125
},
"storycloze_2016": {
"acc": 0.5547835382148584,
"acc_stderr": 0.01149281951929236
},
"boolq": {
"acc": 0.609480122324159,
"acc_stderr": 0.008532845556631466
},
"arc_easy": {
"acc": 0.3968855218855219,
"acc_stderr": 0.010039236800583202,
"acc_norm": 0.36784511784511786,
"acc_norm_stderr": 0.009894923464455196
},
"arc_challenge": {
"acc": 0.1680887372013652,
"acc_stderr": 0.01092771504612486,
"acc_norm": 0.21416382252559726,
"acc_norm_stderr": 0.011988383205966494
},
"sciq": {
"acc": 0.737,
"acc_stderr": 0.013929286594259715,
"acc_norm": 0.715,
"acc_norm_stderr": 0.014282120955200478
},
"piqa": {
"acc": 0.5908596300326442,
"acc_stderr": 0.011471593460443316,
"acc_norm": 0.588683351468988,
"acc_norm_stderr": 0.011480860577192817
}
},
"versions": {
"anli_r1": 0,
"anli_r2": 0,
"anli_r3": 0,
"cb": 1,
"copa": 0,
"hellaswag": 0,
"rte": 0,
"winogrande": 0,
"storycloze_2016": 0,
"boolq": 1,
"arc_easy": 0,
"arc_challenge": 0,
"sciq": 0,
"piqa": 0
}
}