lm1-misc-pile / 1b58b88b8 /1b58b88b8pile /evaluation /rankeval /lm1-1b5-8b8-results_lm-eval_global_step16765_2023-01-22-18-55-55_0shots.json
Muennighoff's picture
Add
46fa37e
raw
history blame
2.47 kB
{
"results": {
"anli_r1": {
"acc": 0.325,
"acc_stderr": 0.014818724459095524
},
"anli_r2": {
"acc": 0.345,
"acc_stderr": 0.015039986742055233
},
"anli_r3": {
"acc": 0.3516666666666667,
"acc_stderr": 0.013789711695404798
},
"cb": {
"acc": 0.4642857142857143,
"acc_stderr": 0.0672477765493766,
"f1": 0.324184846352334
},
"copa": {
"acc": 0.63,
"acc_stderr": 0.04852365870939099
},
"hellaswag": {
"acc": 0.2862975502887871,
"acc_stderr": 0.004511063351278702,
"acc_norm": 0.30501892053375823,
"acc_norm_stderr": 0.004594744821762281
},
"rte": {
"acc": 0.4620938628158845,
"acc_stderr": 0.030009848912529113
},
"winogrande": {
"acc": 0.5027624309392266,
"acc_stderr": 0.014052271211616436
},
"storycloze_2016": {
"acc": 0.5788348476750401,
"acc_stderr": 0.011417808278216117
},
"boolq": {
"acc": 0.5235474006116208,
"acc_stderr": 0.008735351675636605
},
"arc_easy": {
"acc": 0.4537037037037037,
"acc_stderr": 0.010215708295494128,
"acc_norm": 0.4036195286195286,
"acc_norm_stderr": 0.010067368960348204
},
"arc_challenge": {
"acc": 0.19539249146757678,
"acc_stderr": 0.011586907189952911,
"acc_norm": 0.24658703071672355,
"acc_norm_stderr": 0.012595726268790124
},
"sciq": {
"acc": 0.768,
"acc_stderr": 0.013354937452281557,
"acc_norm": 0.671,
"acc_norm_stderr": 0.014865395385928354
},
"piqa": {
"acc": 0.6147986942328618,
"acc_stderr": 0.011354179751257075,
"acc_norm": 0.6186071817192601,
"acc_norm_stderr": 0.011332850406528672
}
},
"versions": {
"anli_r1": 0,
"anli_r2": 0,
"anli_r3": 0,
"cb": 1,
"copa": 0,
"hellaswag": 0,
"rte": 0,
"winogrande": 0,
"storycloze_2016": 0,
"boolq": 1,
"arc_easy": 0,
"arc_challenge": 0,
"sciq": 0,
"piqa": 0
}
}