|
{ |
|
"path": "WizardLMTeam/WizardLM-13B-V1.2", |
|
"brainstorm": 0.189, |
|
"open_qa": 0.5, |
|
"closed_qa": 0.119, |
|
"extract": 0.084, |
|
"generation": 0.165, |
|
"rewrite": 0.137, |
|
"summarize": 0.072, |
|
"classify": 0.216, |
|
"reasoning_over_numerical_data": 0.078, |
|
"multi-document_synthesis": 0.104, |
|
"fact_checking_or_attributed_qa": 0.307, |
|
"average": 0.1618, |
|
"brainstorm_rank": 13, |
|
"open_qa_rank": 19, |
|
"closed_qa_rank": 14, |
|
"extract_rank": 12, |
|
"generation_rank": 13, |
|
"rewrite_rank": 13, |
|
"summarize_rank": 19, |
|
"classify_rank": 13, |
|
"reasoning_over_numerical_data_rank": 13, |
|
"multi-document_synthesis_rank": 15, |
|
"fact_checking_or_attributed_qa_rank": 15, |
|
"average_rank": 15, |
|
"brainstorm_confi": "+3.3 / -3.1", |
|
"open_qa_confi": "+9.8 / -9.8", |
|
"closed_qa_confi": "+4.5 / -4.0", |
|
"extract_confi": "+3.7 / -3.2", |
|
"generation_confi": "+3.2 / -3.0", |
|
"rewrite_confi": "+2.5 / -2.4", |
|
"summarize_confi": "+3.7 / -3.2", |
|
"classify_confi": "+5.5 / -5.2", |
|
"reasoning_over_numerical_data_confi": "+2.1 / -2.1", |
|
"multi-document_synthesis_confi": "+2.9 / -2.9", |
|
"fact_checking_or_attributed_qa_confi": "+4.1 / -4.1", |
|
"average_confi": "+1.08 / -1.06" |
|
} |