eval 0 and 1
Browse files
README.md
CHANGED
@@ -43,6 +43,138 @@ The objective is to streamline the cognitive or reasoning core, eliminating any
|
|
43 |
litgpt evaluate --tasks 'leaderboard' --out_dir 'evaluate-0/' --batch_size 4 --dtype 'bfloat16' out/contrain/final/
|
44 |
```
|
45 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
46 |
```bash
|
47 |
litgpt evaluate --tasks 'hellaswag,gsm8k,truthfulqa_mc2,mmlu,winogrande,arc_challenge' --out_dir 'evaluate-1/' --batch_size 4 --dtype 'bfloat16' out/contrain/final/
|
48 |
```
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
43 |
litgpt evaluate --tasks 'leaderboard' --out_dir 'evaluate-0/' --batch_size 4 --dtype 'bfloat16' out/contrain/final/
|
44 |
```
|
45 |
|
46 |
+
| Tasks |Version|Filter|n-shot| Metric | |Value | |Stderr|
|
47 |
+
|-----------------------------------------------------------|-------|------|-----:|-----------------------|---|-----:|---|------|
|
48 |
+
|leaderboard | N/A| | | | | | | |
|
49 |
+
| - leaderboard_bbh | N/A| | | | | | | |
|
50 |
+
| - leaderboard_bbh_boolean_expressions | 1|none | 3|acc_norm |↑ |0.4600|± |0.0316|
|
51 |
+
| - leaderboard_bbh_causal_judgement | 1|none | 3|acc_norm |↑ |0.5187|± |0.0366|
|
52 |
+
| - leaderboard_bbh_date_understanding | 1|none | 3|acc_norm |↑ |0.1560|± |0.0230|
|
53 |
+
| - leaderboard_bbh_disambiguation_qa | 1|none | 3|acc_norm |↑ |0.3000|± |0.0290|
|
54 |
+
| - leaderboard_bbh_formal_fallacies | 1|none | 3|acc_norm |↑ |0.4680|± |0.0316|
|
55 |
+
| - leaderboard_bbh_geometric_shapes | 1|none | 3|acc_norm |↑ |0.0920|± |0.0183|
|
56 |
+
| - leaderboard_bbh_hyperbaton | 1|none | 3|acc_norm |↑ |0.5320|± |0.0316|
|
57 |
+
| - leaderboard_bbh_logical_deduction_five_objects | 1|none | 3|acc_norm |↑ |0.2240|± |0.0264|
|
58 |
+
| - leaderboard_bbh_logical_deduction_seven_objects | 1|none | 3|acc_norm |↑ |0.1600|± |0.0232|
|
59 |
+
| - leaderboard_bbh_logical_deduction_three_objects | 1|none | 3|acc_norm |↑ |0.3360|± |0.0299|
|
60 |
+
| - leaderboard_bbh_movie_recommendation | 1|none | 3|acc_norm |↑ |0.0640|± |0.0155|
|
61 |
+
| - leaderboard_bbh_navigate | 1|none | 3|acc_norm |↑ |0.4200|± |0.0313|
|
62 |
+
| - leaderboard_bbh_object_counting | 1|none | 3|acc_norm |↑ |0.0640|± |0.0155|
|
63 |
+
| - leaderboard_bbh_penguins_in_a_table | 1|none | 3|acc_norm |↑ |0.2397|± |0.0355|
|
64 |
+
| - leaderboard_bbh_reasoning_about_colored_objects | 1|none | 3|acc_norm |↑ |0.1520|± |0.0228|
|
65 |
+
| - leaderboard_bbh_ruin_names | 1|none | 3|acc_norm |↑ |0.2720|± |0.0282|
|
66 |
+
| - leaderboard_bbh_salient_translation_error_detection | 1|none | 3|acc_norm |↑ |0.1240|± |0.0209|
|
67 |
+
| - leaderboard_bbh_snarks | 1|none | 3|acc_norm |↑ |0.5618|± |0.0373|
|
68 |
+
| - leaderboard_bbh_sports_understanding | 1|none | 3|acc_norm |↑ |0.4600|± |0.0316|
|
69 |
+
| - leaderboard_bbh_temporal_sequences | 1|none | 3|acc_norm |↑ |0.2920|± |0.0288|
|
70 |
+
| - leaderboard_bbh_tracking_shuffled_objects_five_objects | 1|none | 3|acc_norm |↑ |0.2040|± |0.0255|
|
71 |
+
| - leaderboard_bbh_tracking_shuffled_objects_seven_objects| 1|none | 3|acc_norm |↑ |0.1200|± |0.0206|
|
72 |
+
| - leaderboard_bbh_tracking_shuffled_objects_three_objects| 1|none | 3|acc_norm |↑ |0.3160|± |0.0295|
|
73 |
+
| - leaderboard_bbh_web_of_lies | 1|none | 3|acc_norm |↑ |0.4880|± |0.0317|
|
74 |
+
| - leaderboard_gpqa | N/A| | | | | | | |
|
75 |
+
| - leaderboard_gpqa_diamond | 1|none | 0|acc_norm |↑ |0.1919|± |0.0281|
|
76 |
+
| - leaderboard_gpqa_extended | 1|none | 0|acc_norm |↑ |0.2289|± |0.0180|
|
77 |
+
| - leaderboard_gpqa_main | 1|none | 0|acc_norm |↑ |0.2768|± |0.0212|
|
78 |
+
| - leaderboard_ifeval | 3|none | 0|inst_level_loose_acc |↑ |0.2098|± | N/A|
|
79 |
+
| | |none | 0|inst_level_strict_acc |↑ |0.1966|± | N/A|
|
80 |
+
| | |none | 0|prompt_level_loose_acc |↑ |0.1109|± |0.0135|
|
81 |
+
| | |none | 0|prompt_level_strict_acc|↑ |0.1072|± |0.0133|
|
82 |
+
| - leaderboard_math_hard | N/A| | | | | | | |
|
83 |
+
| - leaderboard_math_algebra_hard | 1|none | 4|exact_match |↑ |0.0000|± | 0|
|
84 |
+
| - leaderboard_math_counting_and_prob_hard | 1|none | 4|exact_match |↑ |0.0000|± | 0|
|
85 |
+
| - leaderboard_math_geometry_hard | 1|none | 4|exact_match |↑ |0.0000|± | 0|
|
86 |
+
| - leaderboard_math_intermediate_algebra_hard | 1|none | 4|exact_match |↑ |0.0000|± | 0|
|
87 |
+
| - leaderboard_math_num_theory_hard | 1|none | 4|exact_match |↑ |0.0000|± | 0|
|
88 |
+
| - leaderboard_math_prealgebra_hard | 1|none | 4|exact_match |↑ |0.0000|± | 0|
|
89 |
+
| - leaderboard_math_precalculus_hard | 1|none | 4|exact_match |↑ |0.0000|± | 0|
|
90 |
+
| - leaderboard_mmlu_pro | 0.1|none | 5|acc |↑ |0.1184|± |0.0029|
|
91 |
+
| - leaderboard_musr | N/A| | | | | | | |
|
92 |
+
| - leaderboard_musr_murder_mysteries | 1|none | 0|acc_norm |↑ |0.5000|± |0.0317|
|
93 |
+
| - leaderboard_musr_object_placements | 1|none | 0|acc_norm |↑ |0.2578|± |0.0274|
|
94 |
+
| - leaderboard_musr_team_allocation | 1|none | 0|acc_norm |↑ |0.2600|± |0.0278|
|
95 |
+
|
96 |
+
|
97 |
```bash
|
98 |
litgpt evaluate --tasks 'hellaswag,gsm8k,truthfulqa_mc2,mmlu,winogrande,arc_challenge' --out_dir 'evaluate-1/' --batch_size 4 --dtype 'bfloat16' out/contrain/final/
|
99 |
```
|
100 |
+
|
101 |
+
| Tasks |Version| Filter |n-shot| Metric | |Value | |Stderr|
|
102 |
+
|---------------------------------------|------:|----------------|-----:|-----------|---|-----:|---|-----:|
|
103 |
+
|arc_challenge | 1|none | 0|acc |↑ |0.1962|± |0.0116|
|
104 |
+
| | |none | 0|acc_norm |↑ |0.2483|± |0.0126|
|
105 |
+
|gsm8k | 3|flexible-extract| 5|exact_match|↑ |0.0008|± |0.0008|
|
106 |
+
| | |strict-match | 5|exact_match|↑ |0.0000|± |0.0000|
|
107 |
+
|hellaswag | 1|none | 0|acc |↑ |0.2594|± |0.0044|
|
108 |
+
| | |none | 0|acc_norm |↑ |0.2564|± |0.0044|
|
109 |
+
|mmlu | 2|none | |acc |↑ |0.2575|± |0.0037|
|
110 |
+
| - humanities | 2|none | |acc |↑ |0.2410|± |0.0062|
|
111 |
+
| - formal_logic | 1|none | 0|acc |↑ |0.3016|± |0.0410|
|
112 |
+
| - high_school_european_history | 1|none | 0|acc |↑ |0.1879|± |0.0305|
|
113 |
+
| - high_school_us_history | 1|none | 0|acc |↑ |0.2794|± |0.0315|
|
114 |
+
| - high_school_world_history | 1|none | 0|acc |↑ |0.2363|± |0.0277|
|
115 |
+
| - international_law | 1|none | 0|acc |↑ |0.1240|± |0.0301|
|
116 |
+
| - jurisprudence | 1|none | 0|acc |↑ |0.2315|± |0.0408|
|
117 |
+
| - logical_fallacies | 1|none | 0|acc |↑ |0.2270|± |0.0329|
|
118 |
+
| - moral_disputes | 1|none | 0|acc |↑ |0.2139|± |0.0221|
|
119 |
+
| - moral_scenarios | 1|none | 0|acc |↑ |0.2715|± |0.0149|
|
120 |
+
| - philosophy | 1|none | 0|acc |↑ |0.2572|± |0.0248|
|
121 |
+
| - prehistory | 1|none | 0|acc |↑ |0.2500|± |0.0241|
|
122 |
+
| - professional_law | 1|none | 0|acc |↑ |0.2334|± |0.0108|
|
123 |
+
| - world_religions | 1|none | 0|acc |↑ |0.2281|± |0.0322|
|
124 |
+
| - other | 2|none | |acc |↑ |0.2379|± |0.0076|
|
125 |
+
| - business_ethics | 1|none | 0|acc |↑ |0.2800|± |0.0451|
|
126 |
+
| - clinical_knowledge | 1|none | 0|acc |↑ |0.2943|± |0.0280|
|
127 |
+
| - college_medicine | 1|none | 0|acc |↑ |0.2832|± |0.0344|
|
128 |
+
| - global_facts | 1|none | 0|acc |↑ |0.1900|± |0.0394|
|
129 |
+
| - human_aging | 1|none | 0|acc |↑ |0.1794|± |0.0257|
|
130 |
+
| - management | 1|none | 0|acc |↑ |0.3398|± |0.0469|
|
131 |
+
| - marketing | 1|none | 0|acc |↑ |0.1838|± |0.0254|
|
132 |
+
| - medical_genetics | 1|none | 0|acc |↑ |0.1900|± |0.0394|
|
133 |
+
| - miscellaneous | 1|none | 0|acc |↑ |0.2171|± |0.0147|
|
134 |
+
| - nutrition | 1|none | 0|acc |↑ |0.2549|± |0.0250|
|
135 |
+
| - professional_accounting | 1|none | 0|acc |↑ |0.2447|± |0.0256|
|
136 |
+
| - professional_medicine | 1|none | 0|acc |↑ |0.3162|± |0.0282|
|
137 |
+
| - virology | 1|none | 0|acc |↑ |0.1506|± |0.0278|
|
138 |
+
| - social sciences | 2|none | |acc |↑ |0.2899|± |0.0082|
|
139 |
+
| - econometrics | 1|none | 0|acc |↑ |0.2807|± |0.0423|
|
140 |
+
| - high_school_geography | 1|none | 0|acc |↑ |0.3030|± |0.0327|
|
141 |
+
| - high_school_government_and_politics| 1|none | 0|acc |↑ |0.3109|± |0.0334|
|
142 |
+
| - high_school_macroeconomics | 1|none | 0|acc |↑ |0.3103|± |0.0235|
|
143 |
+
| - high_school_microeconomics | 1|none | 0|acc |↑ |0.2563|± |0.0284|
|
144 |
+
| - high_school_psychology | 1|none | 0|acc |↑ |0.3193|± |0.0200|
|
145 |
+
| - human_sexuality | 1|none | 0|acc |↑ |0.2824|± |0.0395|
|
146 |
+
| - professional_psychology | 1|none | 0|acc |↑ |0.2451|± |0.0174|
|
147 |
+
| - public_relations | 1|none | 0|acc |↑ |0.2818|± |0.0431|
|
148 |
+
| - security_studies | 1|none | 0|acc |↑ |0.3592|± |0.0307|
|
149 |
+
| - sociology | 1|none | 0|acc |↑ |0.2488|± |0.0306|
|
150 |
+
| - us_foreign_policy | 1|none | 0|acc |↑ |0.2800|± |0.0451|
|
151 |
+
| - stem | 2|none | |acc |↑ |0.2699|± |0.0079|
|
152 |
+
| - abstract_algebra | 1|none | 0|acc |↑ |0.1900|± |0.0394|
|
153 |
+
| - anatomy | 1|none | 0|acc |↑ |0.2444|± |0.0371|
|
154 |
+
| - astronomy | 1|none | 0|acc |↑ |0.2961|± |0.0372|
|
155 |
+
| - college_biology | 1|none | 0|acc |↑ |0.2431|± |0.0359|
|
156 |
+
| - college_chemistry | 1|none | 0|acc |↑ |0.3500|± |0.0479|
|
157 |
+
| - college_computer_science | 1|none | 0|acc |↑ |0.2700|± |0.0446|
|
158 |
+
| - college_mathematics | 1|none | 0|acc |↑ |0.2400|± |0.0429|
|
159 |
+
| - college_physics | 1|none | 0|acc |↑ |0.3627|± |0.0478|
|
160 |
+
| - computer_security | 1|none | 0|acc |↑ |0.1900|± |0.0394|
|
161 |
+
| - conceptual_physics | 1|none | 0|acc |↑ |0.2766|± |0.0292|
|
162 |
+
| - electrical_engineering | 1|none | 0|acc |↑ |0.2621|± |0.0366|
|
163 |
+
| - elementary_mathematics | 1|none | 0|acc |↑ |0.2725|± |0.0229|
|
164 |
+
| - high_school_biology | 1|none | 0|acc |↑ |0.3065|± |0.0262|
|
165 |
+
| - high_school_chemistry | 1|none | 0|acc |↑ |0.2562|± |0.0307|
|
166 |
+
| - high_school_computer_science | 1|none | 0|acc |↑ |0.2200|± |0.0416|
|
167 |
+
| - high_school_mathematics | 1|none | 0|acc |↑ |0.2630|± |0.0268|
|
168 |
+
| - high_school_physics | 1|none | 0|acc |↑ |0.2649|± |0.0360|
|
169 |
+
| - high_school_statistics | 1|none | 0|acc |↑ |0.3380|± |0.0323|
|
170 |
+
| - machine_learning | 1|none | 0|acc |↑ |0.1607|± |0.0349|
|
171 |
+
|truthfulqa_mc2 | 2|none | 0|acc |↑ |0.4992|± |0.0163|
|
172 |
+
|winogrande | 1|none | 0|acc |↑ |0.5075|± |0.0141|
|
173 |
+
|
174 |
+
| Groups |Version|Filter|n-shot|Metric| |Value | |Stderr|
|
175 |
+
|------------------|------:|------|------|------|---|-----:|---|-----:|
|
176 |
+
|mmlu | 2|none | |acc |↑ |0.2575|± |0.0037|
|
177 |
+
| - humanities | 2|none | |acc |↑ |0.2410|± |0.0062|
|
178 |
+
| - other | 2|none | |acc |↑ |0.2379|± |0.0076|
|
179 |
+
| - social sciences| 2|none | |acc |↑ |0.2899|± |0.0082|
|
180 |
+
| - stem | 2|none | |acc |↑ |0.2699|± |0.0079|
|