href_results / temperature=1.0 /OLMo-7B-0724-Instruct-hf.json
alrope's picture
Upload folder using huggingface_hub
f5869f0 verified
raw
history blame
1.26 kB
{
"path": "allenai/OLMo-7B-SFT",
"brainstorm": 0.065,
"open_qa": 0.176,
"closed_qa": 0.04,
"extract": 0.037,
"generation": 0.04,
"rewrite": 0.033,
"summarize": 0.05,
"classify": 0.08,
"reasoning_over_numerical_data": 0.05,
"multi-document_synthesis": 0.073,
"fact_checking_or_attributed_qa": 0.173,
"average": 0.0669,
"brainstorm_rank": 20,
"open_qa_rank": 26,
"closed_qa_rank": 21,
"extract_rank": 21,
"generation_rank": 21,
"rewrite_rank": 21,
"summarize_rank": 19,
"classify_rank": 21,
"reasoning_over_numerical_data_rank": 19,
"multi-document_synthesis_rank": 19,
"fact_checking_or_attributed_qa_rank": 22,
"average_rank": 22,
"brainstorm_confi": "+2.00 / -2.00",
"open_qa_confi": "+7.84 / -6.86",
"closed_qa_confi": "+2.72 / -2.23",
"extract_confi": "+2.72 / -2.23",
"generation_confi": "+1.67 / -1.50",
"rewrite_confi": "+1.43 / -1.29",
"summarize_confi": "+2.97 / -2.97",
"classify_confi": "+3.73 / -3.23",
"reasoning_over_numerical_data_confi": "+1.87 / -1.78",
"multi-document_synthesis_confi": "+2.43 / -2.21",
"fact_checking_or_attributed_qa_confi": "+3.46 / -3.46",
"average_confi": "+0.7 / - 0.7"
}