href_results / temperature=0.0 /OLMo-7B-SFT-hf.json
alrope's picture
Upload folder using huggingface_hub
7b20e33 verified
{
"path": "allenai/OLMo-7B-SFT",
"brainstorm": 0.012,
"open_qa": 0.804,
"closed_qa": 0.134,
"extract": 0.069,
"generation": 0.067,
"rewrite": 0.044,
"summarize": 0.015,
"classify": 0.224,
"reasoning_over_numerical_data": 0.047,
"multi-document_synthesis": 0.024,
"fact_checking_or_attributed_qa": 0.5,
"average": 0.1211,
"brainstorm_rank": 29,
"open_qa_rank": 5,
"closed_qa_rank": 27,
"extract_rank": 26,
"generation_rank": 25,
"rewrite_rank": 29,
"summarize_rank": 28,
"classify_rank": 20,
"reasoning_over_numerical_data_rank": 27,
"multi-document_synthesis_rank": 26,
"fact_checking_or_attributed_qa_rank": 8,
"average_rank": 29,
"brainstorm_confi": "+0.8 / -0.8",
"open_qa_confi": "+7.4 / -7.8",
"closed_qa_confi": "+4.5 / -4.0",
"extract_confi": "+3.2 / -2.7",
"generation_confi": "+2.0 / -2.0",
"rewrite_confi": "+1.6 / -1.5",
"summarize_confi": "+1.5 / -1.2",
"classify_confi": "+5.5 / -5.7",
"reasoning_over_numerical_data_confi": "+1.9 / -1.7",
"multi-document_synthesis_confi": "+1.5 / -1.3",
"fact_checking_or_attributed_qa_confi": "+4.5 / -4.5",
"average_confi": "+0.99 / -0.99"
}