href_results / temperature=1.0 /Llama-2-13b-chat-hf.json
alrope's picture
Upload folder using huggingface_hub
f5869f0 verified
raw
history blame
1.27 kB
{
"path": "meta-llama/Llama-2-13b-chat-hf",
"brainstorm": 0.21,
"open_qa": 0.593,
"closed_qa": 0.176,
"extract": 0.134,
"generation": 0.167,
"rewrite": 0.136,
"summarize": 0.188,
"classify": 0.323,
"reasoning_over_numerical_data": 0.122,
"multi-document_synthesis": 0.121,
"fact_checking_or_attributed_qa": 0.409,
"average": 0.2013,
"brainstorm_rank": 13,
"open_qa_rank": 18,
"closed_qa_rank": 14,
"extract_rank": 12,
"generation_rank": 13,
"rewrite_rank": 13,
"summarize_rank": 5,
"classify_rank": 5,
"reasoning_over_numerical_data_rank": 13,
"multi-document_synthesis_rank": 14,
"fact_checking_or_attributed_qa_rank": 9,
"average_rank": 13,
"brainstorm_confi": "+3.26 / -3.17",
"open_qa_confi": "+9.31 / -9.31",
"closed_qa_confi": "+4.70 / -4.46",
"extract_confi": "+4.46 / -3.96",
"generation_confi": "+3.00 / -3.00",
"rewrite_confi": "+2.57 / -2.43",
"summarize_confi": "+5.20 / -5.20",
"classify_confi": "+5.97 / -5.72",
"reasoning_over_numerical_data_confi": "+2.71 / -2.71",
"multi-document_synthesis_confi": "+3.09 / -2.87",
"fact_checking_or_attributed_qa_confi": "+4.55 / -4.55",
"average_confi": "+1.2 / - 1.2"
}