|
{ |
|
"path": "allenai/Llama-3.1-Tulu-3-8B-SFT", |
|
"brainstorm": 0.015, |
|
"open_qa": 0.877, |
|
"closed_qa": 0.319, |
|
"extract": 0.176, |
|
"generation": 0.068, |
|
"rewrite": 0.074, |
|
"summarize": 0.007, |
|
"classify": 0.313, |
|
"reasoning_over_numerical_data": 0.187, |
|
"multi-document_synthesis": 0.04, |
|
"fact_checking_or_attributed_qa": 0.539, |
|
"average": 0.1697, |
|
"brainstorm_rank": 29, |
|
"open_qa_rank": 1, |
|
"closed_qa_rank": 2, |
|
"extract_rank": 14, |
|
"generation_rank": 25, |
|
"rewrite_rank": 26, |
|
"summarize_rank": 28, |
|
"classify_rank": 9, |
|
"reasoning_over_numerical_data_rank": 16, |
|
"multi-document_synthesis_rank": 26, |
|
"fact_checking_or_attributed_qa_rank": 1, |
|
"average_rank": 25, |
|
"brainstorm_confi": "+1.0 / -0.8", |
|
"open_qa_confi": "+6.4 / -6.9", |
|
"closed_qa_confi": "+6.2 / -5.7", |
|
"extract_confi": "+4.7 / -4.5", |
|
"generation_confi": "+2.2 / -2.0", |
|
"rewrite_confi": "+2.0 / -1.9", |
|
"summarize_confi": "+1.2 / -0.7", |
|
"classify_confi": "+6.2 / -6.5", |
|
"reasoning_over_numerical_data_confi": "+3.2 / -3.1", |
|
"multi-document_synthesis_confi": "+1.8 / -1.8", |
|
"fact_checking_or_attributed_qa_confi": "+4.5 / -4.5", |
|
"average_confi": "+1.10 / -1.12" |
|
} |