href_results / temperature=0.0 /Mistral-Large-Instruct-2407.json
alrope's picture
Upload folder using huggingface_hub
7b20e33 verified
{
"path": "mistralai/Mistral-Large-Instruct-2407",
"brainstorm": 0.545,
"open_qa": 0.588,
"closed_qa": 0.351,
"extract": 0.396,
"generation": 0.514,
"rewrite": 0.505,
"summarize": 0.453,
"classify": 0.445,
"reasoning_over_numerical_data": 0.485,
"multi-document_synthesis": 0.591,
"fact_checking_or_attributed_qa": 0.26,
"average": 0.4762,
"brainstorm_rank": 1,
"open_qa_rank": 21,
"closed_qa_rank": 2,
"extract_rank": 1,
"generation_rank": 1,
"rewrite_rank": 1,
"summarize_rank": 1,
"classify_rank": 1,
"reasoning_over_numerical_data_rank": 1,
"multi-document_synthesis_rank": 1,
"fact_checking_or_attributed_qa_rank": 25,
"average_rank": 1,
"brainstorm_confi": "+4.0 / -4.1",
"open_qa_confi": "+8.8 / -8.8",
"closed_qa_confi": "+5.7 / -5.7",
"extract_confi": "+5.4 / -5.7",
"generation_confi": "+3.9 / -3.9",
"rewrite_confi": "+3.6 / -3.7",
"summarize_confi": "+6.7 / -6.7",
"classify_confi": "+6.0 / -6.2",
"reasoning_over_numerical_data_confi": "+3.7 / -3.7",
"multi-document_synthesis_confi": "+4.5 / -4.5",
"fact_checking_or_attributed_qa_confi": "+4.1 / -3.9",
"average_confi": "+1.42 / -1.41"
}