|
{ |
|
"path": "mistralai/Mistral-Small-Instruct-2409", |
|
"brainstorm": 0.47, |
|
"open_qa": 0.676, |
|
"closed_qa": 0.322, |
|
"extract": 0.364, |
|
"generation": 0.424, |
|
"rewrite": 0.449, |
|
"summarize": 0.401, |
|
"classify": 0.393, |
|
"reasoning_over_numerical_data": 0.397, |
|
"multi-document_synthesis": 0.547, |
|
"fact_checking_or_attributed_qa": 0.258, |
|
"average": 0.4221, |
|
"brainstorm_rank": 1, |
|
"open_qa_rank": 5, |
|
"closed_qa_rank": 2, |
|
"extract_rank": 1, |
|
"generation_rank": 7, |
|
"rewrite_rank": 1, |
|
"summarize_rank": 1, |
|
"classify_rank": 9, |
|
"reasoning_over_numerical_data_rank": 5, |
|
"multi-document_synthesis_rank": 1, |
|
"fact_checking_or_attributed_qa_rank": 25, |
|
"average_rank": 3, |
|
"brainstorm_confi": "+3.8 / -4.0", |
|
"open_qa_confi": "+8.8 / -8.8", |
|
"closed_qa_confi": "+5.9 / -5.7", |
|
"extract_confi": "+5.9 / -5.7", |
|
"generation_confi": "+4.0 / -3.9", |
|
"rewrite_confi": "+3.6 / -3.7", |
|
"summarize_confi": "+6.7 / -6.4", |
|
"classify_confi": "+6.0 / -5.7", |
|
"reasoning_over_numerical_data_confi": "+3.9 / -3.7", |
|
"multi-document_synthesis_confi": "+4.6 / -4.6", |
|
"fact_checking_or_attributed_qa_confi": "+4.1 / -3.9", |
|
"average_confi": "+1.46 / -1.44" |
|
} |