run_id
large_stringlengths
64
64
timestamp_utc
int64
1,736B
1,738B
timestamp_day_hour_utc
int64
1,736B
1,738B
model_name_or_path
large_stringclasses
5 values
unitxt_card
large_stringclasses
76 values
unitxt_recipe
large_stringlengths
330
400
quantization_type
large_stringclasses
1 value
quantization_bit_count
large_stringclasses
1 value
inference_runtime_s
float64
1.1
745
generation_args
large_stringclasses
1 value
model_args
large_stringclasses
5 values
inference_engine
large_stringclasses
1 value
packages_versions
large_stringclasses
1 value
scores
large_stringlengths
174
242
num_gpu
int64
1
1
device
large_stringclasses
1 value
59b661b3a7c2358e5a038dd664fc595e1246d1f2283860c65c77d867f63750c5
1,738,317,774,210
1,738,317,600,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu_pro.computer_science
card=cards.mmlu_pro.computer_science,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.MMLU_PRO.MultipleChoiceTemplatesInstructionsStateBelowPlease.enumerator_capitals_choicesSeparator_newline_shuffleChoices_False,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
12.828638
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.35, 'accuracy_ci_low': 0.26, 'accuracy_ci_high': 0.44277837028301065, 'score_name': 'accuracy', 'score': 0.35, 'score_ci_high': 0.44277837028301065, 'score_ci_low': 0.26, 'num_of_instances': 100}
1
a100_80gb
c08108ac81bf54a13e269825f4a81aff624ee49ae1896a695ca7be76a5fc213d
1,738,317,609,260
1,738,317,600,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu_pro.math
card=cards.mmlu_pro.math,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.MMLU_PRO.MultipleChoiceTemplatesStructuredWithTopic.enumerator_roman_choicesSeparator_newline_shuffleChoices_placeCorrectChoiceFirst,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
11.875094
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.29, 'accuracy_ci_low': 0.21, 'accuracy_ci_high': 0.39, 'score_name': 'accuracy', 'score': 0.29, 'score_ci_high': 0.39, 'score_ci_low': 0.21, 'num_of_instances': 100}
1
a100_80gb
1ddf955aed99f8f73886a4ed326487864f18a7af9f96867e47d867e691ba18a8
1,738,317,617,581
1,738,317,600,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.public_relations
card=cards.mmlu.public_relations,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesStructuredWithTopic.enumerator_roman_choicesSeparator_space_shuffleChoices_lengthSortReverse,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
7.139793
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.6, 'accuracy_ci_low': 0.5, 'accuracy_ci_high': 0.691124845923058, 'score_name': 'accuracy', 'score': 0.6, 'score_ci_high': 0.691124845923058, 'score_ci_low': 0.5, 'num_of_instances': 100}
1
a100_80gb
6b52f7fddd2087ef139d5b7b35b239b051b44ebb7abad853c9a1d9ee75ba1801
1,738,317,633,436
1,738,317,600,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.security_studies
card=cards.mmlu.security_studies,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsWithoutTopicFixed.enumerator_keyboard_choicesSeparator_comma_shuffleChoices_placeCorrectChoiceFourth,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
15.014698
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.81, 'accuracy_ci_low': 0.73, 'accuracy_ci_high': 0.89, 'score_name': 'accuracy', 'score': 0.81, 'score_ci_high': 0.89, 'score_ci_low': 0.73, 'num_of_instances': 100}
1
a100_80gb
414a732835de74f3eedc99cce1f042ec7a8ad384cafa37e47c10da3fc2450410
1,738,317,639,404
1,738,317,600,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu_pro.economics
card=cards.mmlu_pro.economics,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU_PRO.MultipleChoiceTemplatesInstructionsWithoutTopicHarness.enumerator_lowercase_choicesSeparator_newline_shuffleChoices_alphabeticalSortReverse,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
4.243666
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.36, 'accuracy_ci_low': 0.27, 'accuracy_ci_high': 0.46, 'score_name': 'accuracy', 'score': 0.36, 'score_ci_high': 0.46, 'score_ci_low': 0.27, 'num_of_instances': 100}
1
a100_80gb
b7a4cf6091f3cdcf911b6c516faf77badd1b2930a32795e02507d7c703e20090
1,738,317,643,494
1,738,317,600,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.business_ethics
card=cards.mmlu.business_ethics,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesStructuredWithoutTopic.enumerator_numbers_choicesSeparator_newline_shuffleChoices_lengthSortReverse,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
3.31157
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.59, 'accuracy_ci_low': 0.49, 'accuracy_ci_high': 0.68, 'score_name': 'accuracy', 'score': 0.59, 'score_ci_high': 0.68, 'score_ci_low': 0.49, 'num_of_instances': 100}
1
a100_80gb
5cfb32606115e7d01aab3b3686811f97ccd97fb6446cbbe3d97f2d097e56cae2
1,738,317,705,829
1,738,317,600,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.business_ethics
card=cards.mmlu.business_ethics,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsStateHere.enumerator_greek_choicesSeparator_semicolon_shuffleChoices_lengthSortReverse,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
3.51728
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.63, 'accuracy_ci_low': 0.52, 'accuracy_ci_high': 0.71, 'score_name': 'accuracy', 'score': 0.63, 'score_ci_high': 0.71, 'score_ci_low': 0.52, 'num_of_instances': 100}
1
a100_80gb
e33a078f955878ed7df567d7acd2e61d943151ba847c5209892f029f278d1535
1,738,317,653,340
1,738,317,600,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu_pro.philosophy
card=cards.mmlu_pro.philosophy,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.MMLU_PRO.MultipleChoiceTemplatesInstructionsStateHere.enumerator_keyboard_choicesSeparator_orLower_shuffleChoices_alphabeticalSortReverse,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
9.296923
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.25, 'accuracy_ci_low': 0.18, 'accuracy_ci_high': 0.34, 'score_name': 'accuracy', 'score': 0.25, 'score_ci_high': 0.34, 'score_ci_low': 0.18, 'num_of_instances': 100}
1
a100_80gb
7d7f8c24a66af9516fb0b54095179c84058d608b3f9e5bf18b3fc3d571b8c4d2
1,738,317,658,174
1,738,317,600,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.high_school_computer_science
card=cards.mmlu.high_school_computer_science,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsStateBelowPlease.enumerator_roman_choicesSeparator_OrCapital_shuffleChoices_lengthSortReverse,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
3.722113
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.61, 'accuracy_ci_low': 0.51, 'accuracy_ci_high': 0.7, 'score_name': 'accuracy', 'score': 0.61, 'score_ci_high': 0.7, 'score_ci_low': 0.51, 'num_of_instances': 100}
1
a100_80gb
555139271939a102987d411c16f6112dde2f3e75e8369d2f85f6c6ddace5dc87
1,738,317,662,034
1,738,317,600,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.miscellaneous
card=cards.mmlu.miscellaneous,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsWithTopicHelm.enumerator_greek_choicesSeparator_semicolon_shuffleChoices_placeCorrectChoiceFirst,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
3.2515
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.62, 'accuracy_ci_low': 0.53, 'accuracy_ci_high': 0.71, 'score_name': 'accuracy', 'score': 0.62, 'score_ci_high': 0.71, 'score_ci_low': 0.53, 'num_of_instances': 100}
1
a100_80gb
a91478a1095acd37e64114ee884edeec8e629a0d4964ddcd21e7e40512f19631
1,738,317,669,646
1,738,317,600,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.us_foreign_policy
card=cards.mmlu.us_foreign_policy,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsProSACould.enumerator_greek_choicesSeparator_comma_shuffleChoices_placeCorrectChoiceFourth,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
7.090729
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.75, 'accuracy_ci_low': 0.66, 'accuracy_ci_high': 0.83, 'score_name': 'accuracy', 'score': 0.75, 'score_ci_high': 0.83, 'score_ci_low': 0.66, 'num_of_instances': 100}
1
a100_80gb
6fe16c5c00c0ea42ca308fbcd64a89094971b471a76d92145aa3c58b80d1fae5
1,738,317,674,974
1,738,317,600,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.professional_accounting
card=cards.mmlu.professional_accounting,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsStateBelowPlease.enumerator_greek_choicesSeparator_orLower_shuffleChoices_placeCorrectChoiceFirst,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
4.431322
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.31, 'accuracy_ci_low': 0.22, 'accuracy_ci_high': 0.41, 'score_name': 'accuracy', 'score': 0.31, 'score_ci_high': 0.41, 'score_ci_low': 0.22, 'num_of_instances': 100}
1
a100_80gb
dd6cff04029348bcfb4e1a7a914a22428a945f3b1a8e1bfc57801e5a00a53001
1,738,317,683,246
1,738,317,600,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.college_medicine
card=cards.mmlu.college_medicine,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsWithoutTopicHarness.enumerator_lowercase_choicesSeparator_newline_shuffleChoices_placeCorrectChoiceFourth,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
7.590691
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.48, 'accuracy_ci_low': 0.38, 'accuracy_ci_high': 0.58, 'score_name': 'accuracy', 'score': 0.48, 'score_ci_high': 0.58, 'score_ci_low': 0.38, 'num_of_instances': 100}
1
a100_80gb
2eb4b3c57f9eab046639320ebf8a71d0092d6252a414896ddfbee8b651d4ea88
1,738,317,691,151
1,738,317,600,000
mistralai_Mistral-7B-Instruct-v0.3
cards.ai2_arc.arc_challenge
card=cards.ai2_arc.arc_challenge,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.AI2_ARC.MultipleChoiceTemplatesInstructionsWithoutTopicHarness.enumerator_keyboard_choicesSeparator_pipe_shuffleChoices_alphabeticalSortReverse,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
6.997725
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.78, 'accuracy_ci_low': 0.69, 'accuracy_ci_high': 0.85, 'score_name': 'accuracy', 'score': 0.78, 'score_ci_high': 0.85, 'score_ci_low': 0.69, 'num_of_instances': 100}
1
a100_80gb
128fdd361ac01e3e3c8077aa5ecf3b2229f84c46e4da11c81fb3fd3e371d11e3
1,738,317,696,720
1,738,317,600,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu_pro.physics
card=cards.mmlu_pro.physics,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU_PRO.MultipleChoiceTemplatesStructuredWithoutTopic.enumerator_numbers_choicesSeparator_OrCapital_shuffleChoices_lengthSortReverse,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
4.791999
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.19, 'accuracy_ci_low': 0.13, 'accuracy_ci_high': 0.29, 'score_name': 'accuracy', 'score': 0.19, 'score_ci_high': 0.29, 'score_ci_low': 0.13, 'num_of_instances': 100}
1
a100_80gb
e5c0afee46e8834ef4d9a55abc5dc65241a8d8d8452d90a2557ef63e05c86a9c
1,738,317,701,610
1,738,317,600,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu_pro.business
card=cards.mmlu_pro.business,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU_PRO.MultipleChoiceTemplatesInstructionsWithTopic.enumerator_keyboard_choicesSeparator_comma_shuffleChoices_alphabeticalSortReverse,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
4.218084
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.21, 'accuracy_ci_low': 0.13, 'accuracy_ci_high': 0.3, 'score_name': 'accuracy', 'score': 0.21, 'score_ci_high': 0.3, 'score_ci_low': 0.13, 'num_of_instances': 100}
1
a100_80gb
106bf6e748e50e7fadd9863f7bcd1f6263d26300653a3d040125ac00c2c787e4
1,738,317,712,310
1,738,317,600,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.anatomy
card=cards.mmlu.anatomy,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsProSAAddress.enumerator_greek_choicesSeparator_newline_shuffleChoices_placeCorrectChoiceFirst,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
5.91364
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.65, 'accuracy_ci_low': 0.5519578323559557, 'accuracy_ci_high': 0.73, 'score_name': 'accuracy', 'score': 0.65, 'score_ci_high': 0.73, 'score_ci_low': 0.5519578323559557, 'num_of_instances': 100}
1
a100_80gb
4829810efb716448bc7cae191ccdd472487deaef1bb7479ee01ef6a568e88c29
1,738,317,719,552
1,738,317,600,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.jurisprudence
card=cards.mmlu.jurisprudence,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesStructuredWithoutTopic.enumerator_lowercase_choicesSeparator_orLower_shuffleChoices_alphabeticalSort,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
6.521319
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.71, 'accuracy_ci_low': 0.61, 'accuracy_ci_high': 0.79, 'score_name': 'accuracy', 'score': 0.71, 'score_ci_high': 0.79, 'score_ci_low': 0.61, 'num_of_instances': 100}
1
a100_80gb
ae8dc7e61787634e2decbcc87c2dac6e28bc64e016e354fb8f6a1c4a792eae05
1,738,317,723,655
1,738,317,600,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.human_aging
card=cards.mmlu.human_aging,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsStateHere.enumerator_numbers_choicesSeparator_orLower_shuffleChoices_placeCorrectChoiceFirst,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
3.280874
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.55, 'accuracy_ci_low': 0.45, 'accuracy_ci_high': 0.64, 'score_name': 'accuracy', 'score': 0.55, 'score_ci_high': 0.64, 'score_ci_low': 0.45, 'num_of_instances': 100}
1
a100_80gb
c7a467dd699e90922046ab996f19d4bda4fa250090debd74e1a7fafd5e42c25c
1,738,317,727,784
1,738,317,600,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.moral_scenarios
card=cards.mmlu.moral_scenarios,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsProSAAddress.enumerator_capitals_choicesSeparator_semicolon_shuffleChoices_lengthSortReverse,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
3.59344
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.3, 'accuracy_ci_low': 0.22, 'accuracy_ci_high': 0.39, 'score_name': 'accuracy', 'score': 0.3, 'score_ci_high': 0.39, 'score_ci_low': 0.22, 'num_of_instances': 100}
1
a100_80gb
9ca6eb2f39e8e75942afeda9dc0bbdddb0f6011f04f62a6216035509fa549cf4
1,738,317,737,166
1,738,317,600,000
mistralai_Mistral-7B-Instruct-v0.3
cards.hellaswag
card=cards.hellaswag,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.HellaSwag.MultipleChoiceTemplatesInstructionsStructured.enumerator_roman_choicesSeparator_newline_shuffleChoices_alphabeticalSortReverse,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
8.816494
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.62, 'accuracy_ci_low': 0.51, 'accuracy_ci_high': 0.71, 'score_name': 'accuracy', 'score': 0.62, 'score_ci_high': 0.71, 'score_ci_low': 0.51, 'num_of_instances': 100}
1
a100_80gb