{ | |
"results": { | |
"truthfulqa_mc": { | |
"mc1": 0.23745410036719705, | |
"mc1_stderr": 0.014896277441041836, | |
"mc2": 0.4402813457518687, | |
"mc2_stderr": 0.015339681556915718 | |
} | |
}, | |
"versions": { | |
"truthfulqa_mc": 1 | |
}, | |
"config": { | |
"model": "hf-causal-experimental", | |
"model_args": "pretrained=BEE-spoke-data/smol_llama-220M-GQA,revision=main,trust_remote_code=True,dtype='bfloat16'", | |
"num_fewshot": 0, | |
"batch_size": "8", | |
"batch_sizes": [], | |
"device": "cuda", | |
"no_cache": false, | |
"limit": null, | |
"bootstrap_iters": 100000, | |
"description_dict": {} | |
} | |
} |