File size: 614 Bytes
30c6afb |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 |
{
"results": {
"truthfulqa_mc": {
"mc1": 0.23745410036719705,
"mc1_stderr": 0.014896277441041836,
"mc2": 0.4402813457518687,
"mc2_stderr": 0.015339681556915718
}
},
"versions": {
"truthfulqa_mc": 1
},
"config": {
"model": "hf-causal-experimental",
"model_args": "pretrained=BEE-spoke-data/smol_llama-220M-GQA,revision=main,trust_remote_code=True,dtype='bfloat16'",
"num_fewshot": 0,
"batch_size": "8",
"batch_sizes": [],
"device": "cuda",
"no_cache": false,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
} |