lm1-misc-pile / 1b191b91b /1b191b91bpile /evaluation /rankeval /lm1-1b1-91b-results_lm-eval_global_step173500_2023-01-23-19-59-49_1shots.json
Muennighoff's picture
Add'
0fc6405
raw
history blame
2.45 kB
{
"results": {
"anli_r1": {
"acc": 0.329,
"acc_stderr": 0.014865395385928367
},
"anli_r2": {
"acc": 0.318,
"acc_stderr": 0.014734079309311901
},
"anli_r3": {
"acc": 0.34,
"acc_stderr": 0.013680495725767797
},
"cb": {
"acc": 0.48214285714285715,
"acc_stderr": 0.06737697508644648,
"f1": 0.3421052631578947
},
"copa": {
"acc": 0.69,
"acc_stderr": 0.04648231987117316
},
"hellaswag": {
"acc": 0.3619796853216491,
"acc_stderr": 0.004795908282584554,
"acc_norm": 0.4466241784505079,
"acc_norm_stderr": 0.004961268387512966
},
"rte": {
"acc": 0.5270758122743683,
"acc_stderr": 0.0300523034631437
},
"winogrande": {
"acc": 0.5374901341752171,
"acc_stderr": 0.014012928183336574
},
"storycloze_2016": {
"acc": 0.6509887760555852,
"acc_stderr": 0.011022640519108541
},
"boolq": {
"acc": 0.4996941896024465,
"acc_stderr": 0.008745053340723166
},
"arc_easy": {
"acc": 0.5597643097643098,
"acc_stderr": 0.010186228624515655,
"acc_norm": 0.5446127946127947,
"acc_norm_stderr": 0.010218861787618721
},
"arc_challenge": {
"acc": 0.24744027303754265,
"acc_stderr": 0.01261035266329267,
"acc_norm": 0.27986348122866894,
"acc_norm_stderr": 0.013119040897725923
},
"sciq": {
"acc": 0.893,
"acc_stderr": 0.009779910359847167,
"acc_norm": 0.89,
"acc_norm_stderr": 0.009899393819724439
},
"piqa": {
"acc": 0.6887921653971708,
"acc_stderr": 0.01080226387804584,
"acc_norm": 0.6860718171926007,
"acc_norm_stderr": 0.010827928134189646
}
},
"versions": {
"anli_r1": 0,
"anli_r2": 0,
"anli_r3": 0,
"cb": 1,
"copa": 0,
"hellaswag": 0,
"rte": 0,
"winogrande": 0,
"storycloze_2016": 0,
"boolq": 1,
"arc_easy": 0,
"arc_challenge": 0,
"sciq": 0,
"piqa": 0
}
}