run_id
large_stringlengths
64
64
timestamp_utc
int64
1,736B
1,737B
timestamp_day_hour_utc
int64
1,736B
1,737B
model_name_or_path
large_stringclasses
5 values
unitxt_card
large_stringclasses
76 values
unitxt_recipe
large_stringlengths
330
399
quantization_type
large_stringclasses
1 value
quantization_bit_count
large_stringclasses
1 value
inference_runtime_s
float64
1.2
66.4
generation_args
large_stringclasses
1 value
model_args
large_stringclasses
5 values
inference_engine
large_stringclasses
1 value
packages_versions
large_stringclasses
1 value
scores
large_stringlengths
174
242
num_gpu
int64
1
1
device
large_stringclasses
1 value
218629bd11968e2db99f3f7f8ca0683d9964b3aefe8d993b2e37514288458080
1,736,452,744,688
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.medical_genetics
card=cards.mmlu.medical_genetics,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsStateBelow.enumerator_numbers_choicesSeparator_semicolon_shuffleChoices_False,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
3.514133
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.65, 'accuracy_ci_low': 0.56, 'accuracy_ci_high': 0.75, 'score_name': 'accuracy', 'score': 0.65, 'score_ci_high': 0.75, 'score_ci_low': 0.56, 'num_of_instances': 100}
1
a100_80gb
fa43b443b6dd712339f70a065904fa02dda4a21470f3b06e996351bd6cafea68
1,736,452,748,464
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.high_school_government_and_politics
card=cards.mmlu.high_school_government_and_politics,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsWithoutTopicHarness.enumerator_keyboard_choicesSeparator_OrCapital_shuffleChoices_placeCorrectChoiceFourth,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
3.2296
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.68, 'accuracy_ci_low': 0.59, 'accuracy_ci_high': 0.76, 'score_name': 'accuracy', 'score': 0.68, 'score_ci_high': 0.76, 'score_ci_low': 0.59, 'num_of_instances': 100}
1
a100_80gb
ed995bbf6e3da5e4883b77ea33f23ff55cf6f64ae762615485e8a0c0a359bf73
1,736,452,752,093
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.human_aging
card=cards.mmlu.human_aging,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsProSAAddress.enumerator_keyboard_choicesSeparator_OrCapital_shuffleChoices_False,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
3.044794
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.37, 'accuracy_ci_low': 0.28, 'accuracy_ci_high': 0.47, 'score_name': 'accuracy', 'score': 0.37, 'score_ci_high': 0.47, 'score_ci_low': 0.28, 'num_of_instances': 100}
1
a100_80gb
6c291a8ea4cd42413a0fe3d00ead3f38b1d2dc9f87da6bb9a744cd7294f9710b
1,736,452,694,669
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.moral_scenarios
card=cards.mmlu.moral_scenarios,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsWithoutTopicFixed.enumerator_roman_choicesSeparator_semicolon_shuffleChoices_alphabeticalSortReverse,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
9.375571
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.37, 'accuracy_ci_low': 0.28, 'accuracy_ci_high': 0.46, 'score_name': 'accuracy', 'score': 0.37, 'score_ci_high': 0.46, 'score_ci_low': 0.28, 'num_of_instances': 100}
1
a100_80gb
ebd69d5a4306072d83b58bf77fdee8c3a77e6083a85bcee9f0e16dc896f5fb96
1,736,452,699,851
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.ai2_arc.arc_easy
card=cards.ai2_arc.arc_easy,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.AI2_ARC.MultipleChoiceTemplatesInstructionsProSASimple.enumerator_capitals_choicesSeparator_OrCapital_shuffleChoices_lengthSort,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
3.959495
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.77, 'accuracy_ci_low': 0.69, 'accuracy_ci_high': 0.85, 'score_name': 'accuracy', 'score': 0.77, 'score_ci_high': 0.85, 'score_ci_low': 0.69, 'num_of_instances': 100}
1
a100_80gb
47aa16868d00f3801d37d0a2672502f62f91ae95d662ba40e2b6eb1e30960688
1,736,452,706,458
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.anatomy
card=cards.mmlu.anatomy,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsProSACould.enumerator_keyboard_choicesSeparator_orLower_shuffleChoices_placeCorrectChoiceFourth,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
6.092915
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.76, 'accuracy_ci_low': 0.67, 'accuracy_ci_high': 0.83, 'score_name': 'accuracy', 'score': 0.76, 'score_ci_high': 0.83, 'score_ci_low': 0.67, 'num_of_instances': 100}
1
a100_80gb
01dbde73cbf052e215fe098042b8ee0797248a86f717979d59cfedaf30179fbd
1,736,452,711,644
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu_pro.psychology
card=cards.mmlu_pro.psychology,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU_PRO.MultipleChoiceTemplatesStructuredWithTopic.enumerator_roman_choicesSeparator_OrCapital_shuffleChoices_alphabeticalSortReverse,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
4.474478
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.35, 'accuracy_ci_low': 0.26, 'accuracy_ci_high': 0.45, 'score_name': 'accuracy', 'score': 0.35, 'score_ci_high': 0.45, 'score_ci_low': 0.26, 'num_of_instances': 100}
1
a100_80gb
1b061962311c6c104363ad2cfc8bc419bd21026541934b6853f37e486c93cd4b
1,736,452,721,299
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.college_chemistry
card=cards.mmlu.college_chemistry,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesStructuredWithTopic.enumerator_capitals_choicesSeparator_semicolon_shuffleChoices_placeCorrectChoiceFourth,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
8.436061
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.25, 'accuracy_ci_low': 0.17, 'accuracy_ci_high': 0.34, 'score_name': 'accuracy', 'score': 0.25, 'score_ci_high': 0.34, 'score_ci_low': 0.17, 'num_of_instances': 100}
1
a100_80gb
1bed80bdb90aa1c4170373706b08b8f8b38b4cdf037a979ef3a569f0b5afd5c0
1,736,452,729,801
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.high_school_geography
card=cards.mmlu.high_school_geography,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsStateHere.enumerator_numbers_choicesSeparator_comma_shuffleChoices_placeCorrectChoiceFirst,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
7.109529
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.77, 'accuracy_ci_low': 0.68, 'accuracy_ci_high': 0.84, 'score_name': 'accuracy', 'score': 0.77, 'score_ci_high': 0.84, 'score_ci_low': 0.68, 'num_of_instances': 100}
1
a100_80gb
4bef225b3b66556135ab248e937066a9c843044e22ceb01451c9f9d66c60bf36
1,736,452,735,898
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.logical_fallacies
card=cards.mmlu.logical_fallacies,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsWithoutTopicHelmFixed.enumerator_lowercase_choicesSeparator_semicolon_shuffleChoices_lengthSort,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
5.298533
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.61, 'accuracy_ci_low': 0.51, 'accuracy_ci_high': 0.7, 'score_name': 'accuracy', 'score': 0.61, 'score_ci_high': 0.7, 'score_ci_low': 0.51, 'num_of_instances': 100}
1
a100_80gb
3563fd726b947b8e9064aa59801ec0408515e12aaea99571d949ea1576b45cea
1,736,452,739,807
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.computer_security
card=cards.mmlu.computer_security,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsWithoutTopicHelmFixed.enumerator_numbers_choicesSeparator_space_shuffleChoices_alphabeticalSort,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
3.31754
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.58, 'accuracy_ci_low': 0.48, 'accuracy_ci_high': 0.671314794849194, 'score_name': 'accuracy', 'score': 0.58, 'score_ci_high': 0.671314794849194, 'score_ci_low': 0.48, 'num_of_instances': 100}
1
a100_80gb
c2f057ae3fd7f16ee33860477bee8377422aa0ec35d219f6b7e4bd4aca074fda
1,736,452,747,135
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.openbook_qa
card=cards.openbook_qa,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.OpenBookQA.MultipleChoiceTemplatesStructuredWithoutTopic.enumerator_lowercase_choicesSeparator_OrCapital_shuffleChoices_lengthSort,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
6.773003
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.76, 'accuracy_ci_low': 0.67, 'accuracy_ci_high': 0.83, 'score_name': 'accuracy', 'score': 0.76, 'score_ci_high': 0.83, 'score_ci_low': 0.67, 'num_of_instances': 100}
1
a100_80gb
ee5f600b76d4af8daf7d2acc69a353fd580d3500e184045ce77d0044fd6e8929
1,736,452,752,408
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.high_school_physics
card=cards.mmlu.high_school_physics,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsStateHere.enumerator_capitals_choicesSeparator_orLower_shuffleChoices_alphabeticalSortReverse,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
4.607615
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.32, 'accuracy_ci_low': 0.23, 'accuracy_ci_high': 0.42, 'score_name': 'accuracy', 'score': 0.32, 'score_ci_high': 0.42, 'score_ci_low': 0.23, 'num_of_instances': 100}
1
a100_80gb
729f808b9ab957d39744afe9974dc3d1b7c0499ddb32d20faeecd2fb3b511cb0
1,736,452,683,993
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.abstract_algebra
card=cards.mmlu.abstract_algebra,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsProSACould.enumerator_numbers_choicesSeparator_space_shuffleChoices_lengthSortReverse,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
4.125154
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.29, 'accuracy_ci_low': 0.21, 'accuracy_ci_high': 0.39, 'score_name': 'accuracy', 'score': 0.29, 'score_ci_high': 0.39, 'score_ci_low': 0.21, 'num_of_instances': 100}
1
a100_80gb
11a63a987c885fe8c5ac39eb9622abb2dc34c1e6f246eb7bba31f53740854cda
1,736,452,727,461
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.abstract_algebra
card=cards.mmlu.abstract_algebra,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsStateBelowPlease.enumerator_lowercase_choicesSeparator_comma_shuffleChoices_alphabeticalSort,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
6.96945
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.32, 'accuracy_ci_low': 0.23, 'accuracy_ci_high': 0.42, 'score_name': 'accuracy', 'score': 0.32, 'score_ci_high': 0.42, 'score_ci_low': 0.23, 'num_of_instances': 100}
1
a100_80gb
958df3fc75fda1e320b528e3b85baf26ba8adc7e7ebf87318bb14db03bab7af4
1,736,452,688,251
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.ai2_arc.arc_easy
card=cards.ai2_arc.arc_easy,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.AI2_ARC.MultipleChoiceTemplatesStructuredWithTopic.enumerator_capitals_choicesSeparator_pipe_shuffleChoices_alphabeticalSort,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
3.699016
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.85, 'accuracy_ci_low': 0.76, 'accuracy_ci_high': 0.91, 'score_name': 'accuracy', 'score': 0.85, 'score_ci_high': 0.91, 'score_ci_low': 0.76, 'num_of_instances': 100}
1
a100_80gb
9b6248afd2e6ede1d9e9305d32b55cae0b6663d09f4e72c5da3673584224e64f
1,736,452,692,642
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.college_biology
card=cards.mmlu.college_biology,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsProSASimple.enumerator_roman_choicesSeparator_pipe_shuffleChoices_False,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
3.9275
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.56, 'accuracy_ci_low': 0.46, 'accuracy_ci_high': 0.65, 'score_name': 'accuracy', 'score': 0.56, 'score_ci_high': 0.65, 'score_ci_low': 0.46, 'num_of_instances': 100}
1
a100_80gb
e242a0dc4a323fd375bcd736218139965fbeed02ef031a95f3f6b213a8023c90
1,736,452,703,894
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu_pro.economics
card=cards.mmlu_pro.economics,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.MMLU_PRO.MultipleChoiceTemplatesInstructionsWithoutTopicFixed.enumerator_roman_choicesSeparator_comma_shuffleChoices_lengthSortReverse,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
10.685167
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.42, 'accuracy_ci_low': 0.3244104443237977, 'accuracy_ci_high': 0.5146955405919335, 'score_name': 'accuracy', 'score': 0.42, 'score_ci_high': 0.5146955405919335, 'score_ci_low': 0.3244104443237977, 'num_of_instances': 100}
1
a100_80gb
1082a3077119aa08080fc4afc7f218bb93a1db84bfec09b855aaab945145a5ad
1,736,452,709,171
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.astronomy
card=cards.mmlu.astronomy,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsStateHere.enumerator_lowercase_choicesSeparator_comma_shuffleChoices_alphabeticalSort,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
4.046937
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.57, 'accuracy_ci_low': 0.47, 'accuracy_ci_high': 0.66, 'score_name': 'accuracy', 'score': 0.57, 'score_ci_high': 0.66, 'score_ci_low': 0.47, 'num_of_instances': 100}
1
a100_80gb
79fa2a7acf53719097a42cfaddf7ca526bbe0c5bf0b75ec5c94f8a9b757725d5
1,736,452,713,003
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.high_school_macroeconomics
card=cards.mmlu.high_school_macroeconomics,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsWithoutTopicFixed.enumerator_keyboard_choicesSeparator_newline_shuffleChoices_False,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
3.200396
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.47, 'accuracy_ci_low': 0.37, 'accuracy_ci_high': 0.57, 'score_name': 'accuracy', 'score': 0.47, 'score_ci_high': 0.57, 'score_ci_low': 0.37, 'num_of_instances': 100}
1
a100_80gb
8d249087908c559b10dd5807cf2638f958c23bce256f8bf6e738fb9ecb31d723
1,736,452,719,148
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.virology
card=cards.mmlu.virology,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsProSAAddress.enumerator_capitals_choicesSeparator_OrCapital_shuffleChoices_placeCorrectChoiceFourth,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
5.585423
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.45, 'accuracy_ci_low': 0.35, 'accuracy_ci_high': 0.55, 'score_name': 'accuracy', 'score': 0.45, 'score_ci_high': 0.55, 'score_ci_low': 0.35, 'num_of_instances': 100}
1
a100_80gb
d0a8c0c14512cf08f1a3844121eca456d8d42d4c1a1339275fbfba992082b2aa
1,736,452,738,004
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.nutrition
card=cards.mmlu.nutrition,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsStateHere.enumerator_roman_choicesSeparator_pipe_shuffleChoices_alphabeticalSortReverse,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
9.693968
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.69, 'accuracy_ci_low': 0.6, 'accuracy_ci_high': 0.78, 'score_name': 'accuracy', 'score': 0.69, 'score_ci_high': 0.78, 'score_ci_low': 0.6, 'num_of_instances': 100}
1
a100_80gb
dfaad4f7efe597dae4d839668a8418e395210a295336315ec4dd5fb2b4509bae
1,736,452,750,726
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu_pro.biology
card=cards.mmlu_pro.biology,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.MMLU_PRO.MultipleChoiceTemplatesInstructionsWithTopic.enumerator_lowercase_choicesSeparator_newline_shuffleChoices_lengthSort,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
11.676182
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.53, 'accuracy_ci_low': 0.43, 'accuracy_ci_high': 0.63, 'score_name': 'accuracy', 'score': 0.53, 'score_ci_high': 0.63, 'score_ci_low': 0.43, 'num_of_instances': 100}
1
a100_80gb
7a35d30a7dddc72f081542ebfd461ffa6f7110beea73db280c5db7accbb0a0a2
1,736,452,696,287
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu_pro.other
card=cards.mmlu_pro.other,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU_PRO.MultipleChoiceTemplatesInstructionsWithoutTopicFixed.enumerator_roman_choicesSeparator_orLower_shuffleChoices_lengthSortReverse,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
4.63024
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.24, 'accuracy_ci_low': 0.16, 'accuracy_ci_high': 0.33, 'score_name': 'accuracy', 'score': 0.24, 'score_ci_high': 0.33, 'score_ci_low': 0.16, 'num_of_instances': 100}
1
a100_80gb
49ebe3c4475ac37e70e7a205551f8f5e93b877fdd439c38bf054a469449b0d22
1,736,452,702,228
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu_pro.biology
card=cards.mmlu_pro.biology,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU_PRO.MultipleChoiceTemplatesInstructionsStateBelow.enumerator_roman_choicesSeparator_space_shuffleChoices_alphabeticalSortReverse,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
5.205208
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.46, 'accuracy_ci_low': 0.37, 'accuracy_ci_high': 0.56, 'score_name': 'accuracy', 'score': 0.46, 'score_ci_high': 0.56, 'score_ci_low': 0.37, 'num_of_instances': 100}
1
a100_80gb
806536ac52a4d61b3218ea78625595f18aeb9d554dcd724fa364972d7278cf12
1,736,452,711,688
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.astronomy
card=cards.mmlu.astronomy,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsProSACould.enumerator_roman_choicesSeparator_orLower_shuffleChoices_False,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
8.613231
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.63, 'accuracy_ci_low': 0.53, 'accuracy_ci_high': 0.72, 'score_name': 'accuracy', 'score': 0.63, 'score_ci_high': 0.72, 'score_ci_low': 0.53, 'num_of_instances': 100}
1
a100_80gb
e5151b09849e42ddc817854481f0d8a33d7ee1ce6fceded87017a9d442a09a96
1,736,452,720,690
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.public_relations
card=cards.mmlu.public_relations,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsStateHere.enumerator_numbers_choicesSeparator_semicolon_shuffleChoices_alphabeticalSortReverse,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
7.555483
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.6, 'accuracy_ci_low': 0.5, 'accuracy_ci_high': 0.69, 'score_name': 'accuracy', 'score': 0.6, 'score_ci_high': 0.69, 'score_ci_low': 0.5, 'num_of_instances': 100}
1
a100_80gb
5843ecfda071db28a30493ecdda3476596ed0dc7a6f4d77bae25bdbf67ef83c5
1,736,452,724,989
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.high_school_macroeconomics
card=cards.mmlu.high_school_macroeconomics,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsProSAAddress.enumerator_numbers_choicesSeparator_OrCapital_shuffleChoices_placeCorrectChoiceFourth,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
3.468716
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.38, 'accuracy_ci_low': 0.29, 'accuracy_ci_high': 0.47, 'score_name': 'accuracy', 'score': 0.38, 'score_ci_high': 0.47, 'score_ci_low': 0.29, 'num_of_instances': 100}
1
a100_80gb
e25d4bf4211a0edf0cde2a5c0dcc579069996d602d71f8bda0c8e7933e6a30d3
1,736,452,733,949
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.college_chemistry
card=cards.mmlu.college_chemistry,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsProSASimple.enumerator_greek_choicesSeparator_newline_shuffleChoices_lengthSort,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
8.371351
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.44, 'accuracy_ci_low': 0.35, 'accuracy_ci_high': 0.5375448194455783, 'score_name': 'accuracy', 'score': 0.44, 'score_ci_high': 0.5375448194455783, 'score_ci_low': 0.35, 'num_of_instances': 100}
1
a100_80gb
41534b3b77ca53803f223aa996f9e09ef472cc863d91c740b15b8b2bcff7b592
1,736,452,738,474
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.abstract_algebra
card=cards.mmlu.abstract_algebra,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesStructuredWithoutTopic.enumerator_greek_choicesSeparator_space_shuffleChoices_lengthSort,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
3.635241
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.19, 'accuracy_ci_low': 0.12, 'accuracy_ci_high': 0.27, 'score_name': 'accuracy', 'score': 0.19, 'score_ci_high': 0.27, 'score_ci_low': 0.12, 'num_of_instances': 100}
1
a100_80gb
c2b73871a0ff634dc2ba9ac478eda6129a6d1af83e2d1f49285f87c11f9e722e
1,736,452,746,968
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.abstract_algebra
card=cards.mmlu.abstract_algebra,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsStateBelowPlease.enumerator_lowercase_choicesSeparator_orLower_shuffleChoices_placeCorrectChoiceFourth,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
4.270524
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.21, 'accuracy_ci_low': 0.14, 'accuracy_ci_high': 0.31, 'score_name': 'accuracy', 'score': 0.21, 'score_ci_high': 0.31, 'score_ci_low': 0.14, 'num_of_instances': 100}
1
a100_80gb
7314bad16ec793903873a2e6697c57984e8e49bc55a1899c5f4d5666b7ee77ad
1,736,452,742,184
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.marketing
card=cards.mmlu.marketing,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsProSASimple.enumerator_roman_choicesSeparator_pipe_shuffleChoices_False,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
3.172511
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.63, 'accuracy_ci_low': 0.54, 'accuracy_ci_high': 0.73, 'score_name': 'accuracy', 'score': 0.63, 'score_ci_high': 0.73, 'score_ci_low': 0.54, 'num_of_instances': 100}
1
a100_80gb
d663e25b6614df97eb69fcba819a0558eb2fd1bd3a83b7724b4fecfc0e0657b7
1,736,452,750,870
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.anatomy
card=cards.mmlu.anatomy,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesStructuredWithoutTopic.enumerator_greek_choicesSeparator_semicolon_shuffleChoices_lengthSortReverse,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
3.375818
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.47, 'accuracy_ci_low': 0.38, 'accuracy_ci_high': 0.57, 'score_name': 'accuracy', 'score': 0.47, 'score_ci_high': 0.57, 'score_ci_low': 0.38, 'num_of_instances': 100}
1
a100_80gb
2146b09c34252bc90d533cdf373c2208db39ae4bf18078fcbc4c756820b39c2a
1,736,452,686,642
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.clinical_knowledge
card=cards.mmlu.clinical_knowledge,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsStateBelow.enumerator_lowercase_choicesSeparator_newline_shuffleChoices_lengthSort,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
4.519318
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.61, 'accuracy_ci_low': 0.51, 'accuracy_ci_high': 0.7, 'score_name': 'accuracy', 'score': 0.61, 'score_ci_high': 0.7, 'score_ci_low': 0.51, 'num_of_instances': 100}
1
a100_80gb
262acb09ee5fcfffb35dd4cfaae4b91d89c22430df2bb9a3c95a2dbd9ca1c184
1,736,452,691,198
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.abstract_algebra
card=cards.mmlu.abstract_algebra,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsWithoutTopicFixed.enumerator_greek_choicesSeparator_semicolon_shuffleChoices_alphabeticalSort,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
3.861472
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.26, 'accuracy_ci_low': 0.18, 'accuracy_ci_high': 0.35, 'score_name': 'accuracy', 'score': 0.26, 'score_ci_high': 0.35, 'score_ci_low': 0.18, 'num_of_instances': 100}
1
a100_80gb
cd531f664aa45ae83eff643cdd79a78d6acd875ce868298162299d1e21c87a8e
1,736,452,727,574
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.abstract_algebra
card=cards.mmlu.abstract_algebra,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsStateHere.enumerator_numbers_choicesSeparator_OrCapital_shuffleChoices_placeCorrectChoiceFourth,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
3.447599
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.45, 'accuracy_ci_low': 0.35, 'accuracy_ci_high': 0.5584732912372409, 'score_name': 'accuracy', 'score': 0.45, 'score_ci_high': 0.5584732912372409, 'score_ci_low': 0.35, 'num_of_instances': 100}
1
a100_80gb
7509c9a9aab0abf740b65282f3679ceda37d610f37d4c9c9cf49e4ef133830f3
1,736,452,747,856
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.abstract_algebra
card=cards.mmlu.abstract_algebra,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsWithTopic.enumerator_lowercase_choicesSeparator_comma_shuffleChoices_lengthSort,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
7.034135
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.29, 'accuracy_ci_low': 0.22, 'accuracy_ci_high': 0.38, 'score_name': 'accuracy', 'score': 0.29, 'score_ci_high': 0.38, 'score_ci_low': 0.22, 'num_of_instances': 100}
1
a100_80gb
bb578d9b270f980547b900a2f7efb100b669c6cfb86e2c93cd4c89420e1b51f0
1,736,452,695,300
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.machine_learning
card=cards.mmlu.machine_learning,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsProSASimple.enumerator_keyboard_choicesSeparator_space_shuffleChoices_placeCorrectChoiceFirst,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
3.502488
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.25, 'accuracy_ci_low': 0.18, 'accuracy_ci_high': 0.35, 'score_name': 'accuracy', 'score': 0.25, 'score_ci_high': 0.35, 'score_ci_low': 0.18, 'num_of_instances': 100}
1
a100_80gb
bc7f1cd93b57522b8c27d569f693475ff56d24141d34e17fd2eebd97bfc2482a
1,736,452,704,272
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.ai2_arc.arc_easy
card=cards.ai2_arc.arc_easy,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.AI2_ARC.MultipleChoiceTemplatesInstructionsStateBelow.enumerator_greek_choicesSeparator_pipe_shuffleChoices_placeCorrectChoiceFirst,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
8.35314
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.87, 'accuracy_ci_low': 0.79, 'accuracy_ci_high': 0.93, 'score_name': 'accuracy', 'score': 0.87, 'score_ci_high': 0.93, 'score_ci_low': 0.79, 'num_of_instances': 100}
1
a100_80gb
f9126b72f63de7b55a98b9d21b9fc8962d54ad5d25efc2857d8ec672f1244dd5
1,736,452,714,092
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.professional_psychology
card=cards.mmlu.professional_psychology,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsProSACould.enumerator_roman_choicesSeparator_comma_shuffleChoices_lengthSort,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
8.946639
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.58, 'accuracy_ci_low': 0.47, 'accuracy_ci_high': 0.67, 'score_name': 'accuracy', 'score': 0.58, 'score_ci_high': 0.67, 'score_ci_low': 0.47, 'num_of_instances': 100}
1
a100_80gb
a2bf02a8bf348108b3e20157699e56b11633a3b2af6e7ff44c7a74a9b8a95448
1,736,452,718,786
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.high_school_government_and_politics
card=cards.mmlu.high_school_government_and_politics,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsStateBelowPlease.enumerator_numbers_choicesSeparator_newline_shuffleChoices_alphabeticalSortReverse,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
3.567575
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.82, 'accuracy_ci_low': 0.74, 'accuracy_ci_high': 0.89, 'score_name': 'accuracy', 'score': 0.82, 'score_ci_high': 0.89, 'score_ci_low': 0.74, 'num_of_instances': 100}
1
a100_80gb
cf64e026f416d6f951f081db8b63f0ff690ad2714e991412675b314a07983bdb
1,736,452,723,506
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.college_mathematics
card=cards.mmlu.college_mathematics,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsStateHere.enumerator_lowercase_choicesSeparator_space_shuffleChoices_False,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
4.056798
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.28, 'accuracy_ci_low': 0.2, 'accuracy_ci_high': 0.37, 'score_name': 'accuracy', 'score': 0.28, 'score_ci_high': 0.37, 'score_ci_low': 0.2, 'num_of_instances': 100}
1
a100_80gb
316f29b0d06bc140e5b1a5e824f8ead898a8875410294a9d188b8fbbc638706c
1,736,452,739,522
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.high_school_statistics
card=cards.mmlu.high_school_statistics,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsWithoutTopicFixed.enumerator_roman_choicesSeparator_pipe_shuffleChoices_lengthSortReverse,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
11.330147
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.4, 'accuracy_ci_low': 0.3, 'accuracy_ci_high': 0.49, 'score_name': 'accuracy', 'score': 0.4, 'score_ci_high': 0.49, 'score_ci_low': 0.3, 'num_of_instances': 100}
1
a100_80gb
a6265ec405fba58907791b5ea40630041436775ff0be8f47fb0908c2563f4cc1
1,736,452,691,172
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.moral_scenarios
card=cards.mmlu.moral_scenarios,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsProSACould.enumerator_lowercase_choicesSeparator_OrCapital_shuffleChoices_placeCorrectChoiceFirst,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
4.486525
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.17, 'accuracy_ci_low': 0.1, 'accuracy_ci_high': 0.25, 'score_name': 'accuracy', 'score': 0.17, 'score_ci_high': 0.25, 'score_ci_low': 0.1, 'num_of_instances': 100}
1
a100_80gb
b76a2d81c5d7ba09f96aeffcdf950afd466e9e145b6d9f0d7265342367ff6590
1,736,452,707,878
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.moral_scenarios
card=cards.mmlu.moral_scenarios,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesStructuredWithTopic.enumerator_roman_choicesSeparator_pipe_shuffleChoices_alphabeticalSortReverse,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
3.27982
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.27, 'accuracy_ci_low': 0.19, 'accuracy_ci_high': 0.35, 'score_name': 'accuracy', 'score': 0.27, 'score_ci_high': 0.35, 'score_ci_low': 0.19, 'num_of_instances': 100}
1
a100_80gb
51595a482d3641cec52080f3003c6bf572bd8a242aff53f2857916685b1eacac
1,736,452,695,692
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.college_physics
card=cards.mmlu.college_physics,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsProSASimple.enumerator_roman_choicesSeparator_semicolon_shuffleChoices_alphabeticalSortReverse,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
3.917148
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.28, 'accuracy_ci_low': 0.19, 'accuracy_ci_high': 0.36, 'score_name': 'accuracy', 'score': 0.28, 'score_ci_high': 0.36, 'score_ci_low': 0.19, 'num_of_instances': 100}
1
a100_80gb
cc7c449d48ea63748ef9cca844d5eed4b723cdf5bc89255a41f7662e0d20cd11
1,736,452,728,797
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.college_physics
card=cards.mmlu.college_physics,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsWithoutTopicHelmFixed.enumerator_roman_choicesSeparator_OrCapital_shuffleChoices_lengthSortReverse,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
3.403411
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.24, 'accuracy_ci_low': 0.16665716236571526, 'accuracy_ci_high': 0.33, 'score_name': 'accuracy', 'score': 0.24, 'score_ci_high': 0.33, 'score_ci_low': 0.16665716236571526, 'num_of_instances': 100}
1
a100_80gb
5d4b18d70cf999b9938d6e96ecf9123c314b918b5f63b635655dfe8cd0238219
1,736,452,703,321
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.high_school_microeconomics
card=cards.mmlu.high_school_microeconomics,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsStateBelow.enumerator_roman_choicesSeparator_semicolon_shuffleChoices_placeCorrectChoiceFourth,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
7.092148
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.44, 'accuracy_ci_low': 0.34, 'accuracy_ci_high': 0.54, 'score_name': 'accuracy', 'score': 0.44, 'score_ci_high': 0.54, 'score_ci_low': 0.34, 'num_of_instances': 100}
1
a100_80gb
050d8696ee2683ef4efde4d510c77e3a5c7675e96f422e4432ef6685d79c68f9
1,736,452,719,474
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu_pro.business
card=cards.mmlu_pro.business,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.MMLU_PRO.MultipleChoiceTemplatesInstructionsProSAAddress.enumerator_numbers_choicesSeparator_pipe_shuffleChoices_alphabeticalSort,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
11.100986
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.21, 'accuracy_ci_low': 0.13, 'accuracy_ci_high': 0.3, 'score_name': 'accuracy', 'score': 0.21, 'score_ci_high': 0.3, 'score_ci_low': 0.13, 'num_of_instances': 100}
1
a100_80gb
55efde21315434f1e4b24bdf061ff2bdd43e9e8cabf3267e7cc73d0935f7a466
1,736,452,724,740
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.international_law
card=cards.mmlu.international_law,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesStructuredWithTopic.enumerator_lowercase_choicesSeparator_semicolon_shuffleChoices_alphabeticalSort,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
4.119699
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.7, 'accuracy_ci_low': 0.6, 'accuracy_ci_high': 0.78, 'score_name': 'accuracy', 'score': 0.7, 'score_ci_high': 0.78, 'score_ci_low': 0.6, 'num_of_instances': 100}
1
a100_80gb
e6d44852ee960b16fe7f6739c5c93616e8ae3cb5e6ac5c2b8085401110cf3f67
1,736,452,732,598
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.medical_genetics
card=cards.mmlu.medical_genetics,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsStateBelow.enumerator_capitals_choicesSeparator_space_shuffleChoices_alphabeticalSortReverse,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
3.255639
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.6, 'accuracy_ci_low': 0.51, 'accuracy_ci_high': 0.69, 'score_name': 'accuracy', 'score': 0.6, 'score_ci_high': 0.69, 'score_ci_low': 0.51, 'num_of_instances': 100}
1
a100_80gb
2515402635857a9f89f26941774907a0d5988185e7bfd3daee8b6a7d36f76872
1,736,452,736,708
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.college_chemistry
card=cards.mmlu.college_chemistry,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsWithTopic.enumerator_keyboard_choicesSeparator_newline_shuffleChoices_placeCorrectChoiceFirst,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
3.57921
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.28, 'accuracy_ci_low': 0.2, 'accuracy_ci_high': 0.38, 'score_name': 'accuracy', 'score': 0.28, 'score_ci_high': 0.38, 'score_ci_low': 0.2, 'num_of_instances': 100}
1
a100_80gb
74325401af8f5fdc31acdceae856c96658da58817c11ca0f7cbf72054c5fde2d
1,736,452,741,213
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.nutrition
card=cards.mmlu.nutrition,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsWithTopic.enumerator_capitals_choicesSeparator_semicolon_shuffleChoices_alphabeticalSort,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
3.949434
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.63, 'accuracy_ci_low': 0.53, 'accuracy_ci_high': 0.73, 'score_name': 'accuracy', 'score': 0.63, 'score_ci_high': 0.73, 'score_ci_low': 0.53, 'num_of_instances': 100}
1
a100_80gb
114acd59a6d0a7663b44b27c3476d1ed163fe05c850bd951bd4212a2de268833
1,736,452,675,714
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.high_school_macroeconomics
card=cards.mmlu.high_school_macroeconomics,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsProSASimple.enumerator_keyboard_choicesSeparator_pipe_shuffleChoices_False,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
6.270087
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.53, 'accuracy_ci_low': 0.43, 'accuracy_ci_high': 0.62, 'score_name': 'accuracy', 'score': 0.53, 'score_ci_high': 0.62, 'score_ci_low': 0.43, 'num_of_instances': 100}
1
a100_80gb
a965027668c16ea33a15967d6532e2cf2f42a6d2dce312872029c5392c0b3a6d
1,736,452,680,151
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.abstract_algebra
card=cards.mmlu.abstract_algebra,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsProSAAddress.enumerator_lowercase_choicesSeparator_newline_shuffleChoices_False,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
3.299692
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.24, 'accuracy_ci_low': 0.17, 'accuracy_ci_high': 0.33, 'score_name': 'accuracy', 'score': 0.24, 'score_ci_high': 0.33, 'score_ci_low': 0.17, 'num_of_instances': 100}
1
a100_80gb
2b6685e483559573c4716bb726c57387bb1e070c2830c39da9337d45cb29b12e
1,736,452,687,242
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.global_facts
card=cards.mmlu.global_facts,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsWithoutTopicHarness.enumerator_roman_choicesSeparator_semicolon_shuffleChoices_alphabeticalSort,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
6.56727
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.31, 'accuracy_ci_low': 0.22, 'accuracy_ci_high': 0.4, 'score_name': 'accuracy', 'score': 0.31, 'score_ci_high': 0.4, 'score_ci_low': 0.22, 'num_of_instances': 100}
1
a100_80gb
1c44e03bbcc6b8aba249f415013e1532927579b0231883fa24f531f790d0ccde
1,736,452,691,634
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.international_law
card=cards.mmlu.international_law,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsWithoutTopicHelmFixed.enumerator_greek_choicesSeparator_semicolon_shuffleChoices_alphabeticalSortReverse,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
3.680202
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.68, 'accuracy_ci_low': 0.58, 'accuracy_ci_high': 0.76, 'score_name': 'accuracy', 'score': 0.68, 'score_ci_high': 0.76, 'score_ci_low': 0.58, 'num_of_instances': 100}
1
a100_80gb
a2fd7dfa9f5128e79773ef90f7c762f9d3c550cccd5e462c2f62cbcb3dbbcecb
1,736,452,697,970
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu_pro.philosophy
card=cards.mmlu_pro.philosophy,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU_PRO.MultipleChoiceTemplatesInstructionsStateBelow.enumerator_numbers_choicesSeparator_OrCapital_shuffleChoices_alphabeticalSortReverse,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
5.665897
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.29, 'accuracy_ci_low': 0.2, 'accuracy_ci_high': 0.38, 'score_name': 'accuracy', 'score': 0.29, 'score_ci_high': 0.38, 'score_ci_low': 0.2, 'num_of_instances': 100}
1
a100_80gb
55a4206ad91630c48287554919874aa5fbf6ce9b50eef37636470f7353103be7
1,736,452,702,320
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.college_chemistry
card=cards.mmlu.college_chemistry,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsWithTopicHelm.enumerator_greek_choicesSeparator_orLower_shuffleChoices_alphabeticalSort,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
3.599807
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.41, 'accuracy_ci_low': 0.31, 'accuracy_ci_high': 0.51, 'score_name': 'accuracy', 'score': 0.41, 'score_ci_high': 0.51, 'score_ci_low': 0.31, 'num_of_instances': 100}
1
a100_80gb
b97b6bb278f860866e6d894e99fe848cf95d4eda2ab3ce329951d2933b50cfc6
1,736,452,711,915
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.ai2_arc.arc_challenge
card=cards.ai2_arc.arc_challenge,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.AI2_ARC.MultipleChoiceTemplatesInstructionsStateBelow.enumerator_numbers_choicesSeparator_OrCapital_shuffleChoices_placeCorrectChoiceFourth,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
9.023024
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.72, 'accuracy_ci_low': 0.62, 'accuracy_ci_high': 0.8, 'score_name': 'accuracy', 'score': 0.72, 'score_ci_high': 0.8, 'score_ci_low': 0.62, 'num_of_instances': 100}
1
a100_80gb
cfaf6cdd783224a889b521717cc2d85ad7ea08698364e1542cbd8eef5f1ead28
1,736,452,720,557
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.elementary_mathematics
card=cards.mmlu.elementary_mathematics,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsWithTopicHelm.enumerator_capitals_choicesSeparator_space_shuffleChoices_lengthSortReverse,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
7.798741
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.41, 'accuracy_ci_low': 0.312789879850247, 'accuracy_ci_high': 0.51, 'score_name': 'accuracy', 'score': 0.41, 'score_ci_high': 0.51, 'score_ci_low': 0.312789879850247, 'num_of_instances': 100}
1
a100_80gb
cb87046630b96cc7a4d7b9be9b27e60d09fab632ded06521e87eb9d7b88875c5
1,736,452,728,564
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.openbook_qa
card=cards.openbook_qa,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.OpenBookQA.MultipleChoiceTemplatesStructuredWithTopic.enumerator_keyboard_choicesSeparator_comma_shuffleChoices_placeCorrectChoiceFirst,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
6.515749
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.85, 'accuracy_ci_low': 0.77, 'accuracy_ci_high': 0.91, 'score_name': 'accuracy', 'score': 0.85, 'score_ci_high': 0.91, 'score_ci_low': 0.77, 'num_of_instances': 100}
1
a100_80gb
cb8372b82bd06f470995a1d3f0cc5261222a0ada2c76a76387f5b77972c38133
1,736,452,740,329
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.college_computer_science
card=cards.mmlu.college_computer_science,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsStateBelowPlease.enumerator_greek_choicesSeparator_orLower_shuffleChoices_lengthSort,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
11.081969
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.5, 'accuracy_ci_low': 0.4, 'accuracy_ci_high': 0.6, 'score_name': 'accuracy', 'score': 0.5, 'score_ci_high': 0.6, 'score_ci_low': 0.4, 'num_of_instances': 100}
1
a100_80gb
2513898c59b651fa036ed9d21e490a61a1f5508ee23a2ad6c85fc0935bab1428
1,736,452,667,277
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu_pro.engineering
card=cards.mmlu_pro.engineering,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU_PRO.MultipleChoiceTemplatesInstructionsProSASimple.enumerator_keyboard_choicesSeparator_OrCapital_shuffleChoices_False,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
6.186918
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.11, 'accuracy_ci_low': 0.06, 'accuracy_ci_high': 0.18, 'score_name': 'accuracy', 'score': 0.11, 'score_ci_high': 0.18, 'score_ci_low': 0.06, 'num_of_instances': 100}
1
a100_80gb
bb2baff4b7f343996fc866c4a124afb7a34ce30bda6d475b25b2e61c892fb5ca
1,736,452,672,289
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.philosophy
card=cards.mmlu.philosophy,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsProSACould.enumerator_lowercase_choicesSeparator_space_shuffleChoices_alphabeticalSort,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
4.289116
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.41, 'accuracy_ci_low': 0.32, 'accuracy_ci_high': 0.5, 'score_name': 'accuracy', 'score': 0.41, 'score_ci_high': 0.5, 'score_ci_low': 0.32, 'num_of_instances': 100}
1
a100_80gb
29a201d57f72b4f1d48fdf6f9a4a922b1498a0040efd7fd1092c14aad03baa87
1,736,452,676,057
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.medical_genetics
card=cards.mmlu.medical_genetics,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsProSASimple.enumerator_capitals_choicesSeparator_orLower_shuffleChoices_False,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
3.207794
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.56, 'accuracy_ci_low': 0.4609702326562014, 'accuracy_ci_high': 0.66, 'score_name': 'accuracy', 'score': 0.56, 'score_ci_high': 0.66, 'score_ci_low': 0.4609702326562014, 'num_of_instances': 100}
1
a100_80gb
6484ec0f99d049994e6bbfb1f192893990e37094796d26b6c6d99d3166de8f5e
1,736,452,680,024
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.human_sexuality
card=cards.mmlu.human_sexuality,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsStateBelow.enumerator_greek_choicesSeparator_newline_shuffleChoices_alphabeticalSort,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
3.436759
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.51, 'accuracy_ci_low': 0.42, 'accuracy_ci_high': 0.61, 'score_name': 'accuracy', 'score': 0.51, 'score_ci_high': 0.61, 'score_ci_low': 0.42, 'num_of_instances': 100}
1
a100_80gb
760bbc7c5a75aeb050fcca40bf54a89a73bde57a25345c4b3f4d6c5ba676d42e
1,736,452,709,998
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.high_school_european_history
card=cards.mmlu.high_school_european_history,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsWithTopic.enumerator_greek_choicesSeparator_space_shuffleChoices_False,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
29.05975
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.68, 'accuracy_ci_low': 0.58, 'accuracy_ci_high': 0.7656509685387971, 'score_name': 'accuracy', 'score': 0.68, 'score_ci_high': 0.7656509685387971, 'score_ci_low': 0.58, 'num_of_instances': 100}
1
a100_80gb
6954b70b66612876f3d7a6a895cfe585092a74b47e1801009cb60c814a6564e3
1,736,452,720,455
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.clinical_knowledge
card=cards.mmlu.clinical_knowledge,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsProSASimple.enumerator_lowercase_choicesSeparator_OrCapital_shuffleChoices_False,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
7.191347
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.67, 'accuracy_ci_low': 0.58, 'accuracy_ci_high': 0.77, 'score_name': 'accuracy', 'score': 0.67, 'score_ci_high': 0.77, 'score_ci_low': 0.58, 'num_of_instances': 100}
1
a100_80gb
487a4355cad3ccceb8c2a566f6296a72aedf8d970dc33c8f1fc0fbfb30a1a7a9
1,736,452,724,996
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.high_school_statistics
card=cards.mmlu.high_school_statistics,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsProSACould.enumerator_keyboard_choicesSeparator_OrCapital_shuffleChoices_lengthSort,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
3.761078
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.33, 'accuracy_ci_low': 0.24, 'accuracy_ci_high': 0.42243784267928164, 'score_name': 'accuracy', 'score': 0.33, 'score_ci_high': 0.42243784267928164, 'score_ci_low': 0.24, 'num_of_instances': 100}
1
a100_80gb
3ac61463dcbbdac3d1c1d9479c5392d8157a14134370b9f8f7ff1197acea5e81
1,736,452,730,523
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu_pro.biology
card=cards.mmlu_pro.biology,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU_PRO.MultipleChoiceTemplatesInstructionsStateBelowPlease.enumerator_keyboard_choicesSeparator_space_shuffleChoices_alphabeticalSortReverse,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
4.885981
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.42, 'accuracy_ci_low': 0.33, 'accuracy_ci_high': 0.53, 'score_name': 'accuracy', 'score': 0.42, 'score_ci_high': 0.53, 'score_ci_low': 0.33, 'num_of_instances': 100}
1
a100_80gb
b4ddad5c972b03a4b6af647f3c9c19a6e8eb6f0c8348aea679fc7231a1553787
1,736,452,734,643
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.virology
card=cards.mmlu.virology,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsStateBelowPlease.enumerator_keyboard_choicesSeparator_OrCapital_shuffleChoices_False,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
3.289147
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.43, 'accuracy_ci_low': 0.34, 'accuracy_ci_high': 0.55, 'score_name': 'accuracy', 'score': 0.43, 'score_ci_high': 0.55, 'score_ci_low': 0.34, 'num_of_instances': 100}
1
a100_80gb
87191a98ae017d2d042b1dd005998153f3170db60d22c66066dd4abe988b3c18
1,736,452,740,004
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.ai2_arc.arc_challenge
card=cards.ai2_arc.arc_challenge,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.AI2_ARC.MultipleChoiceTemplatesInstructionsProSACould.enumerator_greek_choicesSeparator_pipe_shuffleChoices_lengthSort,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
4.831044
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.52, 'accuracy_ci_low': 0.42, 'accuracy_ci_high': 0.62, 'score_name': 'accuracy', 'score': 0.52, 'score_ci_high': 0.62, 'score_ci_low': 0.42, 'num_of_instances': 100}
1
a100_80gb
c1a50b0d94dd1d33ba49ba8cf9376e8a4c6ea3c71c9c221667b61aa9fde2897b
1,736,452,668,551
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.high_school_mathematics
card=cards.mmlu.high_school_mathematics,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsProSACould.enumerator_keyboard_choicesSeparator_newline_shuffleChoices_alphabeticalSort,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
7.852978
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.26, 'accuracy_ci_low': 0.18, 'accuracy_ci_high': 0.35, 'score_name': 'accuracy', 'score': 0.26, 'score_ci_high': 0.35, 'score_ci_low': 0.18, 'num_of_instances': 100}
1
a100_80gb
212b9a381d96a3154043df58fb75e6fb6afe21a32f30e6dc9ec53aa6e3f8ca98
1,736,452,673,147
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.human_aging
card=cards.mmlu.human_aging,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsStateHere.enumerator_roman_choicesSeparator_OrCapital_shuffleChoices_False,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
3.410775
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.61, 'accuracy_ci_low': 0.513121452019117, 'accuracy_ci_high': 0.7, 'score_name': 'accuracy', 'score': 0.61, 'score_ci_high': 0.7, 'score_ci_low': 0.513121452019117, 'num_of_instances': 100}
1
a100_80gb
434e175aaff8f4c9ee2290918f7d032a1749085ffaf0037e247575d8c08e7de2
1,736,452,680,744
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.marketing
card=cards.mmlu.marketing,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsStateHere.enumerator_numbers_choicesSeparator_semicolon_shuffleChoices_False,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
7.091611
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.88, 'accuracy_ci_low': 0.81, 'accuracy_ci_high': 0.93, 'score_name': 'accuracy', 'score': 0.88, 'score_ci_high': 0.93, 'score_ci_low': 0.81, 'num_of_instances': 100}
1
a100_80gb
fca8010f179ff8775bc4071a1428208907eb7a802be55d021b6926805f893d05
1,736,452,693,732
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu_pro.biology
card=cards.mmlu_pro.biology,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.MMLU_PRO.MultipleChoiceTemplatesStructuredWithTopic.enumerator_roman_choicesSeparator_newline_shuffleChoices_alphabeticalSortReverse,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
12.185307
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.48, 'accuracy_ci_low': 0.39, 'accuracy_ci_high': 0.59, 'score_name': 'accuracy', 'score': 0.48, 'score_ci_high': 0.59, 'score_ci_low': 0.39, 'num_of_instances': 100}
1
a100_80gb
4646fa29bd9f44be340e4a89caf1f7389e75b2e428ed641533c46f9bc2c4a1a7
1,736,452,699,489
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.econometrics
card=cards.mmlu.econometrics,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsStateBelowPlease.enumerator_capitals_choicesSeparator_semicolon_shuffleChoices_placeCorrectChoiceFirst,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
3.895856
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.41, 'accuracy_ci_low': 0.32, 'accuracy_ci_high': 0.52, 'score_name': 'accuracy', 'score': 0.41, 'score_ci_high': 0.52, 'score_ci_low': 0.32, 'num_of_instances': 100}
1
a100_80gb
335e395c9d059dcd549697e9c18d9955d0ce61750d4e91cf6be9932048b8d02a
1,736,452,703,685
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.college_chemistry
card=cards.mmlu.college_chemistry,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsStateBelowPlease.enumerator_greek_choicesSeparator_OrCapital_shuffleChoices_alphabeticalSortReverse,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
3.613434
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.44, 'accuracy_ci_low': 0.34, 'accuracy_ci_high': 0.54, 'score_name': 'accuracy', 'score': 0.44, 'score_ci_high': 0.54, 'score_ci_low': 0.34, 'num_of_instances': 100}
1
a100_80gb
2c14b0f2cbf2450fe5fe4c21289c0f6dc4f63089b7906c886583ed01b77746d5
1,736,452,708,326
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.high_school_chemistry
card=cards.mmlu.high_school_chemistry,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesStructuredWithTopic.enumerator_lowercase_choicesSeparator_newline_shuffleChoices_placeCorrectChoiceFourth,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
4.084024
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.32, 'accuracy_ci_low': 0.23, 'accuracy_ci_high': 0.41, 'score_name': 'accuracy', 'score': 0.32, 'score_ci_high': 0.41, 'score_ci_low': 0.23, 'num_of_instances': 100}
1
a100_80gb
584198461dc1528643a41d90e95cd6f7b5f9a8b7209b49ec4a79f516a0b8e2c2
1,736,452,724,973
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.high_school_chemistry
card=cards.mmlu.high_school_chemistry,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesStructuredWithoutTopic.enumerator_greek_choicesSeparator_space_shuffleChoices_lengthSort,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
8.198641
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.35, 'accuracy_ci_low': 0.27, 'accuracy_ci_high': 0.44, 'score_name': 'accuracy', 'score': 0.35, 'score_ci_high': 0.44, 'score_ci_low': 0.27, 'num_of_instances': 100}
1
a100_80gb
f74dc1ea8cb9b82db2b6837c6b3ede3cdb20573a315b46de377450bd6c0f0f10
1,736,452,715,966
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.electrical_engineering
card=cards.mmlu.electrical_engineering,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesStructuredWithTopic.enumerator_roman_choicesSeparator_pipe_shuffleChoices_alphabeticalSortReverse,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
7.072814
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.52, 'accuracy_ci_low': 0.43, 'accuracy_ci_high': 0.62, 'score_name': 'accuracy', 'score': 0.52, 'score_ci_high': 0.62, 'score_ci_low': 0.43, 'num_of_instances': 100}
1
a100_80gb
0f92aa530e572d08779a27acc25d985c372ea4779622e67bca5138c7f345316d
1,736,452,738,326
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu_pro.engineering
card=cards.mmlu_pro.engineering,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.MMLU_PRO.MultipleChoiceTemplatesStructuredWithTopic.enumerator_keyboard_choicesSeparator_comma_shuffleChoices_alphabeticalSort,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
12.462137
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.23, 'accuracy_ci_low': 0.15, 'accuracy_ci_high': 0.31, 'score_name': 'accuracy', 'score': 0.23, 'score_ci_high': 0.31, 'score_ci_low': 0.15, 'num_of_instances': 100}
1
a100_80gb
db27ed06f573b1b6550cff26473f6c592d0c51425ca86ada178479e66b46b04a
1,736,452,671,382
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.college_chemistry
card=cards.mmlu.college_chemistry,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesStructuredWithTopic.enumerator_keyboard_choicesSeparator_newline_shuffleChoices_lengthSortReverse,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
9.419244
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.48, 'accuracy_ci_low': 0.38, 'accuracy_ci_high': 0.57, 'score_name': 'accuracy', 'score': 0.48, 'score_ci_high': 0.57, 'score_ci_low': 0.38, 'num_of_instances': 100}
1
a100_80gb
5b68c8c9fb1564cdb60ab465a3a04e02c09ce751da0aeb072cd0dc7188482323
1,736,452,678,027
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.management
card=cards.mmlu.management,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsWithoutTopicFixed.enumerator_roman_choicesSeparator_newline_shuffleChoices_placeCorrectChoiceFourth,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
5.707621
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.69, 'accuracy_ci_low': 0.6, 'accuracy_ci_high': 0.78, 'score_name': 'accuracy', 'score': 0.69, 'score_ci_high': 0.78, 'score_ci_low': 0.6, 'num_of_instances': 100}
1
a100_80gb
6338002011fc73f52f9053a3e5dfb108a56054afe23fa3e4c92c0e2869c27b15
1,736,452,682,170
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.nutrition
card=cards.mmlu.nutrition,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsProSACould.enumerator_capitals_choicesSeparator_orLower_shuffleChoices_lengthSortReverse,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
3.504966
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.56, 'accuracy_ci_low': 0.45, 'accuracy_ci_high': 0.65, 'score_name': 'accuracy', 'score': 0.56, 'score_ci_high': 0.65, 'score_ci_low': 0.45, 'num_of_instances': 100}
1
a100_80gb
2b7ba1c9f786387a933bd61b50e4ed6cf0f5b86ea20b2172b2bb4f17c786792c
1,736,452,686,310
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.high_school_psychology
card=cards.mmlu.high_school_psychology,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsWithTopic.enumerator_lowercase_choicesSeparator_comma_shuffleChoices_alphabeticalSort,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
3.558302
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.68, 'accuracy_ci_low': 0.58, 'accuracy_ci_high': 0.76, 'score_name': 'accuracy', 'score': 0.68, 'score_ci_high': 0.76, 'score_ci_low': 0.58, 'num_of_instances': 100}
1
a100_80gb
30dba1c44f171a25aee6587480a57a332b3356eabfd384aa6dc97dec93c72c93
1,736,452,693,226
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.human_aging
card=cards.mmlu.human_aging,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesStructuredWithTopic.enumerator_capitals_choicesSeparator_pipe_shuffleChoices_lengthSortReverse,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
6.349718
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.65, 'accuracy_ci_low': 0.55, 'accuracy_ci_high': 0.73, 'score_name': 'accuracy', 'score': 0.65, 'score_ci_high': 0.73, 'score_ci_low': 0.55, 'num_of_instances': 100}
1
a100_80gb
68c7360eb1f349c6100afa86b8605ba1803a89295aac92fb05e99f5376ee029f
1,736,452,702,861
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu_pro.other
card=cards.mmlu_pro.other,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.MMLU_PRO.MultipleChoiceTemplatesInstructionsProSASimple.enumerator_roman_choicesSeparator_space_shuffleChoices_placeCorrectChoiceFirst,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
8.909994
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.42, 'accuracy_ci_low': 0.33, 'accuracy_ci_high': 0.51, 'score_name': 'accuracy', 'score': 0.42, 'score_ci_high': 0.51, 'score_ci_low': 0.33, 'num_of_instances': 100}
1
a100_80gb
5c10d006d38c16d4c430277eef518be3b95b82fcc664f02cb4e04de17ad8d2ec
1,736,452,719,907
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.security_studies
card=cards.mmlu.security_studies,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsWithTopic.enumerator_greek_choicesSeparator_semicolon_shuffleChoices_alphabeticalSortReverse,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
16.03434
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.62, 'accuracy_ci_low': 0.53, 'accuracy_ci_high': 0.71, 'score_name': 'accuracy', 'score': 0.62, 'score_ci_high': 0.71, 'score_ci_low': 0.53, 'num_of_instances': 100}
1
a100_80gb
474b5a8d66994bc8bddc6a7c385b51288a410a785d13849bd221dc63c2c3b0d0
1,736,452,729,278
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.clinical_knowledge
card=cards.mmlu.clinical_knowledge,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsWithTopicHelm.enumerator_roman_choicesSeparator_newline_shuffleChoices_alphabeticalSortReverse,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
7.007459
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.66, 'accuracy_ci_low': 0.56, 'accuracy_ci_high': 0.75, 'score_name': 'accuracy', 'score': 0.66, 'score_ci_high': 0.75, 'score_ci_low': 0.56, 'num_of_instances': 100}
1
a100_80gb
521fc4e01a3f71e31e53abc0dbb97536c3e4e3a128959ec4f624c915251e6e9d
1,736,452,733,909
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.computer_security
card=cards.mmlu.computer_security,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesStructuredWithTopic.enumerator_keyboard_choicesSeparator_semicolon_shuffleChoices_placeCorrectChoiceFirst,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
3.760603
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.53, 'accuracy_ci_low': 0.43, 'accuracy_ci_high': 0.63, 'score_name': 'accuracy', 'score': 0.53, 'score_ci_high': 0.63, 'score_ci_low': 0.43, 'num_of_instances': 100}
1
a100_80gb
90b17f6269c2ea1a9667fd157c2d1355e9d6d275f08a1ee451ca0c40a3ec6446
1,736,452,738,293
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.marketing
card=cards.mmlu.marketing,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsWithoutTopicFixed.enumerator_roman_choicesSeparator_semicolon_shuffleChoices_lengthSortReverse,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
3.125206
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.71, 'accuracy_ci_low': 0.62, 'accuracy_ci_high': 0.79, 'score_name': 'accuracy', 'score': 0.71, 'score_ci_high': 0.79, 'score_ci_low': 0.62, 'num_of_instances': 100}
1
a100_80gb
615a32abbbbe3ead9d660adfd646952f7640b1bd045c63f42200738ec2f96573
1,736,452,664,307
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.high_school_biology
card=cards.mmlu.high_school_biology,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsWithoutTopicFixed.enumerator_roman_choicesSeparator_OrCapital_shuffleChoices_lengthSort,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
4.308711
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.6, 'accuracy_ci_low': 0.49, 'accuracy_ci_high': 0.69, 'score_name': 'accuracy', 'score': 0.6, 'score_ci_high': 0.69, 'score_ci_low': 0.49, 'num_of_instances': 100}
1
a100_80gb
40c00504c660ef9e5ed724582a330bf5d242f3db8a2bed136f45418d5857f2bf
1,736,452,669,395
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.college_chemistry
card=cards.mmlu.college_chemistry,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsWithTopicHelm.enumerator_numbers_choicesSeparator_comma_shuffleChoices_lengthSortReverse,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
4.491703
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.43, 'accuracy_ci_low': 0.34, 'accuracy_ci_high': 0.53, 'score_name': 'accuracy', 'score': 0.43, 'score_ci_high': 0.53, 'score_ci_low': 0.34, 'num_of_instances': 100}
1
a100_80gb
447232becb8452fa95d6b57fb282bbd5f392638a1611fe76fb7da8857c6f2eb5
1,736,452,689,216
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.professional_law
card=cards.mmlu.professional_law,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsWithTopic.enumerator_greek_choicesSeparator_newline_shuffleChoices_lengthSortReverse,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
19.265192
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.43, 'accuracy_ci_low': 0.33, 'accuracy_ci_high': 0.52, 'score_name': 'accuracy', 'score': 0.43, 'score_ci_high': 0.52, 'score_ci_low': 0.33, 'num_of_instances': 100}
1
a100_80gb
58c200ca099dac1ac4e55a384a6269d4697e040c908c6ee3a2be11f4d23f1ed8
1,736,452,695,239
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.conceptual_physics
card=cards.mmlu.conceptual_physics,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsWithoutTopicFixed.enumerator_lowercase_choicesSeparator_OrCapital_shuffleChoices_False,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
3.079778
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.37, 'accuracy_ci_low': 0.28, 'accuracy_ci_high': 0.47, 'score_name': 'accuracy', 'score': 0.37, 'score_ci_high': 0.47, 'score_ci_low': 0.28, 'num_of_instances': 100}
1
a100_80gb
a220a481697dde9d8ccf70421d25456e649875d0c3df2e5e69e24d0ba1e98dfb
1,736,452,701,999
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.electrical_engineering
card=cards.mmlu.electrical_engineering,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsProSASimple.enumerator_roman_choicesSeparator_comma_shuffleChoices_alphabeticalSort,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
6.244002
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.49, 'accuracy_ci_low': 0.4, 'accuracy_ci_high': 0.58, 'score_name': 'accuracy', 'score': 0.49, 'score_ci_high': 0.58, 'score_ci_low': 0.4, 'num_of_instances': 100}
1
a100_80gb
b365d5ece61d3f333c96c06e519c528efd5f7daac44ac577b63c04038567a868
1,736,452,710,307
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.high_school_government_and_politics
card=cards.mmlu.high_school_government_and_politics,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsWithoutTopicFixed.enumerator_roman_choicesSeparator_OrCapital_shuffleChoices_placeCorrectChoiceFourth,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
7.565355
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.82, 'accuracy_ci_low': 0.73, 'accuracy_ci_high': 0.89, 'score_name': 'accuracy', 'score': 0.82, 'score_ci_high': 0.89, 'score_ci_low': 0.73, 'num_of_instances': 100}
1
a100_80gb
e330cf406bcc46544c0c9078c095ed449b551e786eb261a813b250421049ddbb
1,736,452,714,546
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.miscellaneous
card=cards.mmlu.miscellaneous,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesStructuredWithoutTopic.enumerator_greek_choicesSeparator_comma_shuffleChoices_False,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
3.347974
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.69, 'accuracy_ci_low': 0.6, 'accuracy_ci_high': 0.77, 'score_name': 'accuracy', 'score': 0.69, 'score_ci_high': 0.77, 'score_ci_low': 0.6, 'num_of_instances': 100}
1
a100_80gb