|
{ |
|
"path": "WizardLMTeam/WizardLM-13B-V1.2", |
|
"brainstorm": 0.169, |
|
"open_qa": 0.632, |
|
"closed_qa": 0.228, |
|
"extract": 0.171, |
|
"generation": 0.158, |
|
"rewrite": 0.147, |
|
"summarize": 0.077, |
|
"classify": 0.336, |
|
"reasoning_over_numerical_data": 0.097, |
|
"multi-document_synthesis": 0.113, |
|
"fact_checking_or_attributed_qa": 0.439, |
|
"average": 0.1956, |
|
"brainstorm_rank": 19, |
|
"open_qa_rank": 21, |
|
"closed_qa_rank": 14, |
|
"extract_rank": 14, |
|
"generation_rank": 19, |
|
"rewrite_rank": 19, |
|
"summarize_rank": 21, |
|
"classify_rank": 9, |
|
"reasoning_over_numerical_data_rank": 21, |
|
"multi-document_synthesis_rank": 20, |
|
"fact_checking_or_attributed_qa_rank": 8, |
|
"average_rank": 20, |
|
"brainstorm_confi": "+3.0 / -2.8", |
|
"open_qa_confi": "+9.3 / -9.8", |
|
"closed_qa_confi": "+5.4 / -5.0", |
|
"extract_confi": "+5.0 / -4.5", |
|
"generation_confi": "+3.0 / -2.8", |
|
"rewrite_confi": "+2.6 / -2.6", |
|
"summarize_confi": "+3.5 / -3.2", |
|
"classify_confi": "+6.2 / -6.2", |
|
"reasoning_over_numerical_data_confi": "+2.4 / -2.3", |
|
"multi-document_synthesis_confi": "+3.1 / -2.9", |
|
"fact_checking_or_attributed_qa_confi": "+4.5 / -4.5", |
|
"average_confi": "+1.17 / -1.17" |
|
} |