href_results / temperature=1.0 /tulu-2-dpo-13b.json
alrope's picture
Upload folder using huggingface_hub
efa4eea verified
{
"path": "allenai/OLMo-7B-SFT",
"brainstorm": 0.148,
"open_qa": 0.529,
"closed_qa": 0.161,
"extract": 0.104,
"generation": 0.148,
"rewrite": 0.146,
"summarize": 0.084,
"classify": 0.206,
"reasoning_over_numerical_data": 0.099,
"multi-document_synthesis": 0.135,
"fact_checking_or_attributed_qa": 0.437,
"average": 0.179,
"brainstorm_rank": 17,
"open_qa_rank": 19,
"closed_qa_rank": 14,
"extract_rank": 12,
"generation_rank": 13,
"rewrite_rank": 13,
"summarize_rank": 10,
"classify_rank": 13,
"reasoning_over_numerical_data_rank": 13,
"multi-document_synthesis_rank": 12,
"fact_checking_or_attributed_qa_rank": 1,
"average_rank": 13,
"brainstorm_confi": "+2.8 / -2.8",
"open_qa_confi": "+9.8 / -8.8",
"closed_qa_confi": "+4.7 / -4.7",
"extract_confi": "+4.0 / -3.7",
"generation_confi": "+3.0 / -2.8",
"rewrite_confi": "+2.6 / -2.5",
"summarize_confi": "+4.0 / -3.5",
"classify_confi": "+5.5 / -5.0",
"reasoning_over_numerical_data_confi": "+2.4 / -2.4",
"multi-document_synthesis_confi": "+3.1 / -3.1",
"fact_checking_or_attributed_qa_confi": "+4.5 / -4.3",
"average_confi": "+1.12 / -1.14"
}