href_results / temperature=1.0 /Llama-2-70b-chat-hf.json
alrope's picture
Upload folder using huggingface_hub
efa4eea verified
{
"path": "meta-llama/Llama-2-70b-chat-hf",
"brainstorm": 0.218,
"open_qa": 0.662,
"closed_qa": 0.312,
"extract": 0.173,
"generation": 0.197,
"rewrite": 0.183,
"summarize": 0.153,
"classify": 0.343,
"reasoning_over_numerical_data": 0.172,
"multi-document_synthesis": 0.166,
"fact_checking_or_attributed_qa": 0.488,
"average": 0.2431,
"brainstorm_rank": 13,
"open_qa_rank": 1,
"closed_qa_rank": 1,
"extract_rank": 5,
"generation_rank": 9,
"rewrite_rank": 9,
"summarize_rank": 10,
"classify_rank": 5,
"reasoning_over_numerical_data_rank": 10,
"multi-document_synthesis_rank": 12,
"fact_checking_or_attributed_qa_rank": 1,
"average_rank": 10,
"brainstorm_confi": "+3.2 / -3.3",
"open_qa_confi": "+8.3 / -9.3",
"closed_qa_confi": "+5.7 / -5.7",
"extract_confi": "+5.0 / -4.5",
"generation_confi": "+3.3 / -3.1",
"rewrite_confi": "+2.9 / -2.9",
"summarize_confi": "+5.2 / -4.7",
"classify_confi": "+6.0 / -6.0",
"reasoning_over_numerical_data_confi": "+3.1 / -3.0",
"multi-document_synthesis_confi": "+3.5 / -3.3",
"fact_checking_or_attributed_qa_confi": "+4.7 / -4.7",
"average_confi": "+1.28 / -1.26"
}