Dataset Viewer
	| task_id
				 stringlengths 8 69 | name
				 stringlengths 3 64 | suite
				 stringclasses 6
				values | hf_repo
				 stringclasses 125
				values | hf_subset
				 stringlengths 0 55 | file_path
				 stringclasses 6
				values | line_number
				 int64 36 22.8k | variable_name
				 stringclasses 1
				value | is_subtask
				 bool 2
				classes | main_task
				 stringclasses 122
				values | subtask_count
				 int64 1 1 | suites
				 listlengths 1 1 | subtasks
				 listlengths 1 1 | is_standalone
				 bool 2
				classes | 
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 
	lighteval:mmmu_pro:standard-4 | 
	mmmu_pro:standard-4 | 
	lighteval | 
	MMMU/MMMU_pro | 
	standard (4 options) | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 36 | 
	direct_call | true | 
	mmmu_pro | 1 | 
	[
  "lighteval"
] | 
	[
  "lighteval:mmmu_pro:standard-4"
] | false | 
| 
	lighteval:mmmu_pro:standard-10 | 
	mmmu_pro:standard-10 | 
	lighteval | 
	MMMU/MMMU_pro | 
	standard (10 options) | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 51 | 
	direct_call | true | 
	mmmu_pro | 1 | 
	[
  "lighteval"
] | 
	[
  "lighteval:mmmu_pro:standard-10"
] | false | 
| 
	lighteval:mmmu_pro:vision | 
	mmmu_pro:vision | 
	lighteval | 
	MMMU/MMMU_pro | 
	vision | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 66 | 
	direct_call | true | 
	mmmu_pro | 1 | 
	[
  "lighteval"
] | 
	[
  "lighteval:mmmu_pro:vision"
] | false | 
| 
	lighteval:bigbench:abstract_narrative_understanding | 
	bigbench:abstract_narrative_understanding | 
	lighteval | 
	tasksource/bigbench | 
	abstract_narrative_understanding | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 81 | 
	direct_call | true | 
	bigbench | 1 | 
	[
  "lighteval"
] | 
	[
  "lighteval:bigbench:abstract_narrative_understanding"
] | false | 
| 
	lighteval:agieval:aqua-rat | 
	agieval:aqua-rat | 
	lighteval | 
	dmayhem93/agieval-aqua-rat | 
	default | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 96 | 
	direct_call | true | 
	agieval | 1 | 
	[
  "lighteval"
] | 
	[
  "lighteval:agieval:aqua-rat"
] | false | 
| 
	lighteval:agieval:gaokao-biology | 
	agieval:gaokao-biology | 
	lighteval | 
	dmayhem93/agieval-gaokao-biology | 
	default | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 114 | 
	direct_call | true | 
	agieval | 1 | 
	[
  "lighteval"
] | 
	[
  "lighteval:agieval:gaokao-biology"
] | false | 
| 
	lighteval:agieval:gaokao-chemistry | 
	agieval:gaokao-chemistry | 
	lighteval | 
	dmayhem93/agieval-gaokao-chemistry | 
	default | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 132 | 
	direct_call | true | 
	agieval | 1 | 
	[
  "lighteval"
] | 
	[
  "lighteval:agieval:gaokao-chemistry"
] | false | 
| 
	lighteval:agieval:gaokao-chinese | 
	agieval:gaokao-chinese | 
	lighteval | 
	dmayhem93/agieval-gaokao-chinese | 
	default | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 150 | 
	direct_call | true | 
	agieval | 1 | 
	[
  "lighteval"
] | 
	[
  "lighteval:agieval:gaokao-chinese"
] | false | 
| 
	lighteval:agieval:gaokao-english | 
	agieval:gaokao-english | 
	lighteval | 
	dmayhem93/agieval-gaokao-english | 
	default | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 168 | 
	direct_call | true | 
	agieval | 1 | 
	[
  "lighteval"
] | 
	[
  "lighteval:agieval:gaokao-english"
] | false | 
| 
	lighteval:agieval:gaokao-geography | 
	agieval:gaokao-geography | 
	lighteval | 
	dmayhem93/agieval-gaokao-geography | 
	default | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 186 | 
	direct_call | true | 
	agieval | 1 | 
	[
  "lighteval"
] | 
	[
  "lighteval:agieval:gaokao-geography"
] | false | 
| 
	lighteval:agieval:gaokao-history | 
	agieval:gaokao-history | 
	lighteval | 
	dmayhem93/agieval-gaokao-history | 
	default | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 204 | 
	direct_call | true | 
	agieval | 1 | 
	[
  "lighteval"
] | 
	[
  "lighteval:agieval:gaokao-history"
] | false | 
| 
	lighteval:agieval:gaokao-mathqa | 
	agieval:gaokao-mathqa | 
	lighteval | 
	dmayhem93/agieval-gaokao-mathqa | 
	default | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 222 | 
	direct_call | true | 
	agieval | 1 | 
	[
  "lighteval"
] | 
	[
  "lighteval:agieval:gaokao-mathqa"
] | false | 
| 
	lighteval:agieval:gaokao-physics | 
	agieval:gaokao-physics | 
	lighteval | 
	dmayhem93/agieval-gaokao-physics | 
	default | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 240 | 
	direct_call | true | 
	agieval | 1 | 
	[
  "lighteval"
] | 
	[
  "lighteval:agieval:gaokao-physics"
] | false | 
| 
	lighteval:agieval:logiqa-en | 
	agieval:logiqa-en | 
	lighteval | 
	dmayhem93/agieval-logiqa-en | 
	default | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 258 | 
	direct_call | true | 
	agieval | 1 | 
	[
  "lighteval"
] | 
	[
  "lighteval:agieval:logiqa-en"
] | false | 
| 
	lighteval:agieval:logiqa-zh | 
	agieval:logiqa-zh | 
	lighteval | 
	dmayhem93/agieval-logiqa-zh | 
	default | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 276 | 
	direct_call | true | 
	agieval | 1 | 
	[
  "lighteval"
] | 
	[
  "lighteval:agieval:logiqa-zh"
] | false | 
| 
	lighteval:agieval:lsat-ar | 
	agieval:lsat-ar | 
	lighteval | 
	dmayhem93/agieval-lsat-ar | 
	default | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 294 | 
	direct_call | true | 
	agieval | 1 | 
	[
  "lighteval"
] | 
	[
  "lighteval:agieval:lsat-ar"
] | false | 
| 
	lighteval:agieval:lsat-lr | 
	agieval:lsat-lr | 
	lighteval | 
	dmayhem93/agieval-lsat-lr | 
	default | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 312 | 
	direct_call | true | 
	agieval | 1 | 
	[
  "lighteval"
] | 
	[
  "lighteval:agieval:lsat-lr"
] | false | 
| 
	lighteval:agieval:lsat-rc | 
	agieval:lsat-rc | 
	lighteval | 
	dmayhem93/agieval-lsat-rc | 
	default | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 330 | 
	direct_call | true | 
	agieval | 1 | 
	[
  "lighteval"
] | 
	[
  "lighteval:agieval:lsat-rc"
] | false | 
| 
	lighteval:agieval:sat-en | 
	agieval:sat-en | 
	lighteval | 
	dmayhem93/agieval-sat-en | 
	default | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 348 | 
	direct_call | true | 
	agieval | 1 | 
	[
  "lighteval"
] | 
	[
  "lighteval:agieval:sat-en"
] | false | 
| 
	lighteval:agieval:sat-en-without-passage | 
	agieval:sat-en-without-passage | 
	lighteval | 
	dmayhem93/agieval-sat-en-without-passage | 
	default | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 366 | 
	direct_call | true | 
	agieval | 1 | 
	[
  "lighteval"
] | 
	[
  "lighteval:agieval:sat-en-without-passage"
] | false | 
| 
	lighteval:agieval:sat-math | 
	agieval:sat-math | 
	lighteval | 
	dmayhem93/agieval-sat-math | 
	default | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 384 | 
	direct_call | true | 
	agieval | 1 | 
	[
  "lighteval"
] | 
	[
  "lighteval:agieval:sat-math"
] | false | 
| 
	lighteval:aime24 | 
	aime24 | 
	lighteval | 
	HuggingFaceH4/aime_2024 | 
	default | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 402 | 
	direct_call | false | 
	aime24 | 1 | 
	[
  "lighteval"
] | 
	[
  "lighteval:aime24"
] | true | 
| 
	lighteval:aime24_avg | 
	aime24_avg | 
	lighteval | 
	HuggingFaceH4/aime_2024 | 
	default | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 416 | 
	direct_call | false | 
	aime24_avg | 1 | 
	[
  "lighteval"
] | 
	[
  "lighteval:aime24_avg"
] | true | 
| 
	lighteval:aime24_gpassk | 
	aime24_gpassk | 
	lighteval | 
	HuggingFaceH4/aime_2024 | 
	default | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 430 | 
	direct_call | false | 
	aime24_gpassk | 1 | 
	[
  "lighteval"
] | 
	[
  "lighteval:aime24_gpassk"
] | true | 
| 
	lighteval:aime25 | 
	aime25 | 
	lighteval | 
	yentinglin/aime_2025 | 
	default | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 444 | 
	direct_call | false | 
	aime25 | 1 | 
	[
  "lighteval"
] | 
	[
  "lighteval:aime25"
] | true | 
| 
	lighteval:aime25_gpassk | 
	aime25_gpassk | 
	lighteval | 
	yentinglin/aime_2025 | 
	default | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 458 | 
	direct_call | false | 
	aime25_gpassk | 1 | 
	[
  "lighteval"
] | 
	[
  "lighteval:aime25_gpassk"
] | true | 
| 
	lighteval:bigbench:anachronisms | 
	bigbench:anachronisms | 
	lighteval | 
	tasksource/bigbench | 
	anachronisms | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 472 | 
	direct_call | true | 
	bigbench | 1 | 
	[
  "lighteval"
] | 
	[
  "lighteval:bigbench:anachronisms"
] | false | 
| 
	lighteval:bigbench:analogical_similarity | 
	bigbench:analogical_similarity | 
	lighteval | 
	tasksource/bigbench | 
	analogical_similarity | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 487 | 
	direct_call | true | 
	bigbench | 1 | 
	[
  "lighteval"
] | 
	[
  "lighteval:bigbench:analogical_similarity"
] | false | 
| 
	lighteval:bigbench:analytic_entailment | 
	bigbench:analytic_entailment | 
	lighteval | 
	tasksource/bigbench | 
	analytic_entailment | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 502 | 
	direct_call | true | 
	bigbench | 1 | 
	[
  "lighteval"
] | 
	[
  "lighteval:bigbench:analytic_entailment"
] | false | 
| 
	lighteval:anli:r1 | 
	anli:r1 | 
	lighteval | 
	anli | 
	plain_text | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 517 | 
	direct_call | true | 
	anli | 1 | 
	[
  "lighteval"
] | 
	[
  "lighteval:anli:r1"
] | false | 
| 
	lighteval:anli:r2 | 
	anli:r2 | 
	lighteval | 
	anli | 
	plain_text | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 532 | 
	direct_call | true | 
	anli | 1 | 
	[
  "lighteval"
] | 
	[
  "lighteval:anli:r2"
] | false | 
| 
	lighteval:anli:r3 | 
	anli:r3 | 
	lighteval | 
	anli | 
	plain_text | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 547 | 
	direct_call | true | 
	anli | 1 | 
	[
  "lighteval"
] | 
	[
  "lighteval:anli:r3"
] | false | 
| 
	lighteval:arc_agi_2 | 
	arc_agi_2 | 
	lighteval | 
	arc-agi-community/arc-agi-2 | 
	default | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 562 | 
	direct_call | false | 
	arc_agi_2 | 1 | 
	[
  "lighteval"
] | 
	[
  "lighteval:arc_agi_2"
] | true | 
| 
	original:arc:c:letters | 
	arc:c:letters | 
	original | 
	ai2_arc | 
	ARC-Challenge | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 577 | 
	direct_call | true | 
	arc | 1 | 
	[
  "original"
] | 
	[
  "original:arc:c:letters"
] | false | 
| 
	original:arc:c:options | 
	arc:c:options | 
	original | 
	ai2_arc | 
	ARC-Challenge | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 592 | 
	direct_call | true | 
	arc | 1 | 
	[
  "original"
] | 
	[
  "original:arc:c:options"
] | false | 
| 
	original:arc:c:simple | 
	arc:c:simple | 
	original | 
	ai2_arc | 
	ARC-Challenge | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 610 | 
	direct_call | true | 
	arc | 1 | 
	[
  "original"
] | 
	[
  "original:arc:c:simple"
] | false | 
| 
	leaderboard:arc:challenge | 
	arc:challenge | 
	leaderboard | 
	ai2_arc | 
	ARC-Challenge | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 628 | 
	direct_call | true | 
	arc | 1 | 
	[
  "leaderboard"
] | 
	[
  "leaderboard:arc:challenge"
] | false | 
| 
	lighteval:arc:easy | 
	arc:easy | 
	lighteval | 
	ai2_arc | 
	ARC-Easy | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 646 | 
	direct_call | true | 
	arc | 1 | 
	[
  "lighteval"
] | 
	[
  "lighteval:arc:easy"
] | false | 
| 
	lighteval:arithmetic:1dc | 
	arithmetic:1dc | 
	lighteval | 
	EleutherAI/arithmetic | 
	arithmetic_1dc | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 664 | 
	direct_call | true | 
	arithmetic | 1 | 
	[
  "lighteval"
] | 
	[
  "lighteval:arithmetic:1dc"
] | false | 
| 
	lighteval:arithmetic:2da | 
	arithmetic:2da | 
	lighteval | 
	EleutherAI/arithmetic | 
	arithmetic_2da | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 679 | 
	direct_call | true | 
	arithmetic | 1 | 
	[
  "lighteval"
] | 
	[
  "lighteval:arithmetic:2da"
] | false | 
| 
	lighteval:arithmetic:2dm | 
	arithmetic:2dm | 
	lighteval | 
	EleutherAI/arithmetic | 
	arithmetic_2dm | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 694 | 
	direct_call | true | 
	arithmetic | 1 | 
	[
  "lighteval"
] | 
	[
  "lighteval:arithmetic:2dm"
] | false | 
| 
	lighteval:arithmetic:2ds | 
	arithmetic:2ds | 
	lighteval | 
	EleutherAI/arithmetic | 
	arithmetic_2ds | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 709 | 
	direct_call | true | 
	arithmetic | 1 | 
	[
  "lighteval"
] | 
	[
  "lighteval:arithmetic:2ds"
] | false | 
| 
	lighteval:arithmetic:3da | 
	arithmetic:3da | 
	lighteval | 
	EleutherAI/arithmetic | 
	arithmetic_3da | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 724 | 
	direct_call | true | 
	arithmetic | 1 | 
	[
  "lighteval"
] | 
	[
  "lighteval:arithmetic:3da"
] | false | 
| 
	lighteval:arithmetic:3ds | 
	arithmetic:3ds | 
	lighteval | 
	EleutherAI/arithmetic | 
	arithmetic_3ds | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 739 | 
	direct_call | true | 
	arithmetic | 1 | 
	[
  "lighteval"
] | 
	[
  "lighteval:arithmetic:3ds"
] | false | 
| 
	lighteval:arithmetic:4da | 
	arithmetic:4da | 
	lighteval | 
	EleutherAI/arithmetic | 
	arithmetic_4da | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 754 | 
	direct_call | true | 
	arithmetic | 1 | 
	[
  "lighteval"
] | 
	[
  "lighteval:arithmetic:4da"
] | false | 
| 
	lighteval:arithmetic:4ds | 
	arithmetic:4ds | 
	lighteval | 
	EleutherAI/arithmetic | 
	arithmetic_4ds | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 769 | 
	direct_call | true | 
	arithmetic | 1 | 
	[
  "lighteval"
] | 
	[
  "lighteval:arithmetic:4ds"
] | false | 
| 
	lighteval:arithmetic:5da | 
	arithmetic:5da | 
	lighteval | 
	EleutherAI/arithmetic | 
	arithmetic_5da | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 784 | 
	direct_call | true | 
	arithmetic | 1 | 
	[
  "lighteval"
] | 
	[
  "lighteval:arithmetic:5da"
] | false | 
| 
	lighteval:arithmetic:5ds | 
	arithmetic:5ds | 
	lighteval | 
	EleutherAI/arithmetic | 
	arithmetic_5ds | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 799 | 
	direct_call | true | 
	arithmetic | 1 | 
	[
  "lighteval"
] | 
	[
  "lighteval:arithmetic:5ds"
] | false | 
| 
	lighteval:bigbench:arithmetic_bb | 
	bigbench:arithmetic_bb | 
	lighteval | 
	tasksource/bigbench | 
	arithmetic | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 814 | 
	direct_call | true | 
	bigbench | 1 | 
	[
  "lighteval"
] | 
	[
  "lighteval:bigbench:arithmetic_bb"
] | false | 
| 
	lighteval:bigbench:ascii_word_recognition | 
	bigbench:ascii_word_recognition | 
	lighteval | 
	tasksource/bigbench | 
	ascii_word_recognition | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 829 | 
	direct_call | true | 
	bigbench | 1 | 
	[
  "lighteval"
] | 
	[
  "lighteval:bigbench:ascii_word_recognition"
] | false | 
| 
	lighteval:asdiv | 
	asdiv | 
	lighteval | 
	EleutherAI/asdiv | 
	asdiv | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 844 | 
	direct_call | false | 
	asdiv | 1 | 
	[
  "lighteval"
] | 
	[
  "lighteval:asdiv"
] | true | 
| 
	lighteval:bigbench:authorship_verification | 
	bigbench:authorship_verification | 
	lighteval | 
	tasksource/bigbench | 
	authorship_verification | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 859 | 
	direct_call | true | 
	bigbench | 1 | 
	[
  "lighteval"
] | 
	[
  "lighteval:bigbench:authorship_verification"
] | false | 
| 
	lighteval:bigbench:auto_categorization | 
	bigbench:auto_categorization | 
	lighteval | 
	tasksource/bigbench | 
	auto_categorization | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 874 | 
	direct_call | true | 
	bigbench | 1 | 
	[
  "lighteval"
] | 
	[
  "lighteval:bigbench:auto_categorization"
] | false | 
| 
	lighteval:bigbench_lite:auto_debugging | 
	bigbench_lite:auto_debugging | 
	lighteval | 
	tasksource/bigbench | 
	auto_debugging | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 889 | 
	direct_call | true | 
	bigbench_lite | 1 | 
	[
  "lighteval"
] | 
	[
  "lighteval:bigbench_lite:auto_debugging"
] | false | 
| 
	helm:babi_qa | 
	babi_qa | 
	helm | 
	facebook/babi_qa | 
	en-valid-qa1 | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 904 | 
	direct_call | false | 
	babi_qa | 1 | 
	[
  "helm"
] | 
	[
  "helm:babi_qa"
] | true | 
| 
	lighteval:bigbench:causal_judgment | 
	bigbench:causal_judgment | 
	lighteval | 
	lighteval/bbh | 
	causal_judgement | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 930 | 
	direct_call | true | 
	bigbench | 1 | 
	[
  "lighteval"
] | 
	[
  "lighteval:bigbench:causal_judgment"
] | false | 
| 
	lighteval:bigbench:date_understanding | 
	bigbench:date_understanding | 
	lighteval | 
	lighteval/bbh | 
	date_understanding | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 945 | 
	direct_call | true | 
	bigbench | 1 | 
	[
  "lighteval"
] | 
	[
  "lighteval:bigbench:date_understanding"
] | false | 
| 
	lighteval:bigbench:disambiguation_qa | 
	bigbench:disambiguation_qa | 
	lighteval | 
	lighteval/bbh | 
	disambiguation_qa | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 960 | 
	direct_call | true | 
	bigbench | 1 | 
	[
  "lighteval"
] | 
	[
  "lighteval:bigbench:disambiguation_qa"
] | false | 
| 
	lighteval:bigbench:geometric_shapes | 
	bigbench:geometric_shapes | 
	lighteval | 
	lighteval/bbh | 
	geometric_shapes | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 975 | 
	direct_call | true | 
	bigbench | 1 | 
	[
  "lighteval"
] | 
	[
  "lighteval:bigbench:geometric_shapes"
] | false | 
| 
	lighteval:bigbench:logical_deduction_five_objects | 
	bigbench:logical_deduction_five_objects | 
	lighteval | 
	lighteval/bbh | 
	logical_deduction_five_objects | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 990 | 
	direct_call | true | 
	bigbench | 1 | 
	[
  "lighteval"
] | 
	[
  "lighteval:bigbench:logical_deduction_five_objects"
] | false | 
| 
	lighteval:bigbench:logical_deduction_seven_objects | 
	bigbench:logical_deduction_seven_objects | 
	lighteval | 
	lighteval/bbh | 
	logical_deduction_seven_objects | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 1,005 | 
	direct_call | true | 
	bigbench | 1 | 
	[
  "lighteval"
] | 
	[
  "lighteval:bigbench:logical_deduction_seven_objects"
] | false | 
| 
	lighteval:bigbench:logical_deduction_three_objects | 
	bigbench:logical_deduction_three_objects | 
	lighteval | 
	lighteval/bbh | 
	logical_deduction_three_objects | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 1,020 | 
	direct_call | true | 
	bigbench | 1 | 
	[
  "lighteval"
] | 
	[
  "lighteval:bigbench:logical_deduction_three_objects"
] | false | 
| 
	lighteval:bigbench:movie_recommendation | 
	bigbench:movie_recommendation | 
	lighteval | 
	lighteval/bbh | 
	movie_recommendation | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 1,035 | 
	direct_call | true | 
	bigbench | 1 | 
	[
  "lighteval"
] | 
	[
  "lighteval:bigbench:movie_recommendation"
] | false | 
| 
	lighteval:bigbench:navigate | 
	bigbench:navigate | 
	lighteval | 
	lighteval/bbh | 
	navigate | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 1,050 | 
	direct_call | true | 
	bigbench | 1 | 
	[
  "lighteval"
] | 
	[
  "lighteval:bigbench:navigate"
] | false | 
| 
	lighteval:bigbench:reasoning_about_colored_objects | 
	bigbench:reasoning_about_colored_objects | 
	lighteval | 
	lighteval/bbh | 
	reasoning_about_colored_objects | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 1,065 | 
	direct_call | true | 
	bigbench | 1 | 
	[
  "lighteval"
] | 
	[
  "lighteval:bigbench:reasoning_about_colored_objects"
] | false | 
| 
	lighteval:bigbench:ruin_names | 
	bigbench:ruin_names | 
	lighteval | 
	lighteval/bbh | 
	ruin_names | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 1,080 | 
	direct_call | true | 
	bigbench | 1 | 
	[
  "lighteval"
] | 
	[
  "lighteval:bigbench:ruin_names"
] | false | 
| 
	lighteval:bigbench:salient_translation_error_detection | 
	bigbench:salient_translation_error_detection | 
	lighteval | 
	lighteval/bbh | 
	salient_translation_error_detection | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 1,095 | 
	direct_call | true | 
	bigbench | 1 | 
	[
  "lighteval"
] | 
	[
  "lighteval:bigbench:salient_translation_error_detection"
] | false | 
| 
	lighteval:bigbench:snarks | 
	bigbench:snarks | 
	lighteval | 
	lighteval/bbh | 
	snarks | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 1,110 | 
	direct_call | true | 
	bigbench | 1 | 
	[
  "lighteval"
] | 
	[
  "lighteval:bigbench:snarks"
] | false | 
| 
	lighteval:bigbench:sports_understanding | 
	bigbench:sports_understanding | 
	lighteval | 
	lighteval/bbh | 
	sports_understanding | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 1,125 | 
	direct_call | true | 
	bigbench | 1 | 
	[
  "lighteval"
] | 
	[
  "lighteval:bigbench:sports_understanding"
] | false | 
| 
	lighteval:bigbench:temporal_sequences | 
	bigbench:temporal_sequences | 
	lighteval | 
	lighteval/bbh | 
	temporal_sequences | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 1,140 | 
	direct_call | true | 
	bigbench | 1 | 
	[
  "lighteval"
] | 
	[
  "lighteval:bigbench:temporal_sequences"
] | false | 
| 
	lighteval:bigbench:tracking_shuffled_objects_five_objects | 
	bigbench:tracking_shuffled_objects_five_objects | 
	lighteval | 
	lighteval/bbh | 
	tracking_shuffled_objects_five_objects | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 1,155 | 
	direct_call | true | 
	bigbench | 1 | 
	[
  "lighteval"
] | 
	[
  "lighteval:bigbench:tracking_shuffled_objects_five_objects"
] | false | 
| 
	lighteval:bigbench:tracking_shuffled_objects_seven_objects | 
	bigbench:tracking_shuffled_objects_seven_objects | 
	lighteval | 
	lighteval/bbh | 
	tracking_shuffled_objects_seven_objects | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 1,170 | 
	direct_call | true | 
	bigbench | 1 | 
	[
  "lighteval"
] | 
	[
  "lighteval:bigbench:tracking_shuffled_objects_seven_objects"
] | false | 
| 
	lighteval:bigbench:tracking_shuffled_objects_three_objects | 
	bigbench:tracking_shuffled_objects_three_objects | 
	lighteval | 
	lighteval/bbh | 
	tracking_shuffled_objects_three_objects | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 1,185 | 
	direct_call | true | 
	bigbench | 1 | 
	[
  "lighteval"
] | 
	[
  "lighteval:bigbench:tracking_shuffled_objects_three_objects"
] | false | 
| 
	harness:bigbench:causal_judgment | 
	bigbench:causal_judgment | 
	harness | 
	lighteval/bbh | 
	causal_judgement | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 1,200 | 
	direct_call | true | 
	bigbench | 1 | 
	[
  "harness"
] | 
	[
  "harness:bigbench:causal_judgment"
] | false | 
| 
	harness:bigbench:date_understanding | 
	bigbench:date_understanding | 
	harness | 
	lighteval/bbh | 
	date_understanding | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 1,219 | 
	direct_call | true | 
	bigbench | 1 | 
	[
  "harness"
] | 
	[
  "harness:bigbench:date_understanding"
] | false | 
| 
	harness:bigbench:disambiguation_qa | 
	bigbench:disambiguation_qa | 
	harness | 
	lighteval/bbh | 
	disambiguation_qa | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 1,238 | 
	direct_call | true | 
	bigbench | 1 | 
	[
  "harness"
] | 
	[
  "harness:bigbench:disambiguation_qa"
] | false | 
| 
	harness:bigbench:geometric_shapes | 
	bigbench:geometric_shapes | 
	harness | 
	lighteval/bbh | 
	geometric_shapes | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 1,257 | 
	direct_call | true | 
	bigbench | 1 | 
	[
  "harness"
] | 
	[
  "harness:bigbench:geometric_shapes"
] | false | 
| 
	harness:bigbench:logical_deduction_five_objects | 
	bigbench:logical_deduction_five_objects | 
	harness | 
	lighteval/bbh | 
	logical_deduction_five_objects | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 1,276 | 
	direct_call | true | 
	bigbench | 1 | 
	[
  "harness"
] | 
	[
  "harness:bigbench:logical_deduction_five_objects"
] | false | 
| 
	harness:bigbench:logical_deduction_seven_objects | 
	bigbench:logical_deduction_seven_objects | 
	harness | 
	lighteval/bbh | 
	logical_deduction_seven_objects | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 1,295 | 
	direct_call | true | 
	bigbench | 1 | 
	[
  "harness"
] | 
	[
  "harness:bigbench:logical_deduction_seven_objects"
] | false | 
| 
	harness:bigbench:logical_deduction_three_objects | 
	bigbench:logical_deduction_three_objects | 
	harness | 
	lighteval/bbh | 
	logical_deduction_three_objects | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 1,314 | 
	direct_call | true | 
	bigbench | 1 | 
	[
  "harness"
] | 
	[
  "harness:bigbench:logical_deduction_three_objects"
] | false | 
| 
	harness:bigbench:movie_recommendation | 
	bigbench:movie_recommendation | 
	harness | 
	lighteval/bbh | 
	movie_recommendation | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 1,333 | 
	direct_call | true | 
	bigbench | 1 | 
	[
  "harness"
] | 
	[
  "harness:bigbench:movie_recommendation"
] | false | 
| 
	harness:bigbench:navigate | 
	bigbench:navigate | 
	harness | 
	lighteval/bbh | 
	navigate | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 1,352 | 
	direct_call | true | 
	bigbench | 1 | 
	[
  "harness"
] | 
	[
  "harness:bigbench:navigate"
] | false | 
| 
	harness:bigbench:reasoning_about_colored_objects | 
	bigbench:reasoning_about_colored_objects | 
	harness | 
	lighteval/bbh | 
	reasoning_about_colored_objects | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 1,371 | 
	direct_call | true | 
	bigbench | 1 | 
	[
  "harness"
] | 
	[
  "harness:bigbench:reasoning_about_colored_objects"
] | false | 
| 
	harness:bigbench:ruin_names | 
	bigbench:ruin_names | 
	harness | 
	lighteval/bbh | 
	ruin_names | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 1,390 | 
	direct_call | true | 
	bigbench | 1 | 
	[
  "harness"
] | 
	[
  "harness:bigbench:ruin_names"
] | false | 
| 
	harness:bigbench:salient_translation_error_detection | 
	bigbench:salient_translation_error_detection | 
	harness | 
	lighteval/bbh | 
	salient_translation_error_detection | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 1,409 | 
	direct_call | true | 
	bigbench | 1 | 
	[
  "harness"
] | 
	[
  "harness:bigbench:salient_translation_error_detection"
] | false | 
| 
	harness:bigbench:snarks | 
	bigbench:snarks | 
	harness | 
	lighteval/bbh | 
	snarks | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 1,428 | 
	direct_call | true | 
	bigbench | 1 | 
	[
  "harness"
] | 
	[
  "harness:bigbench:snarks"
] | false | 
| 
	harness:bigbench:sports_understanding | 
	bigbench:sports_understanding | 
	harness | 
	lighteval/bbh | 
	sports_understanding | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 1,447 | 
	direct_call | true | 
	bigbench | 1 | 
	[
  "harness"
] | 
	[
  "harness:bigbench:sports_understanding"
] | false | 
| 
	harness:bigbench:temporal_sequences | 
	bigbench:temporal_sequences | 
	harness | 
	lighteval/bbh | 
	temporal_sequences | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 1,466 | 
	direct_call | true | 
	bigbench | 1 | 
	[
  "harness"
] | 
	[
  "harness:bigbench:temporal_sequences"
] | false | 
| 
	harness:bigbench:tracking_shuffled_objects_five_objects | 
	bigbench:tracking_shuffled_objects_five_objects | 
	harness | 
	lighteval/bbh | 
	tracking_shuffled_objects_five_objects | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 1,485 | 
	direct_call | true | 
	bigbench | 1 | 
	[
  "harness"
] | 
	[
  "harness:bigbench:tracking_shuffled_objects_five_objects"
] | false | 
| 
	harness:bigbench:tracking_shuffled_objects_seven_objects | 
	bigbench:tracking_shuffled_objects_seven_objects | 
	harness | 
	lighteval/bbh | 
	tracking_shuffled_objects_seven_objects | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 1,504 | 
	direct_call | true | 
	bigbench | 1 | 
	[
  "harness"
] | 
	[
  "harness:bigbench:tracking_shuffled_objects_seven_objects"
] | false | 
| 
	harness:bigbench:tracking_shuffled_objects_three_objects | 
	bigbench:tracking_shuffled_objects_three_objects | 
	harness | 
	lighteval/bbh | 
	tracking_shuffled_objects_three_objects | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 1,523 | 
	direct_call | true | 
	bigbench | 1 | 
	[
  "harness"
] | 
	[
  "harness:bigbench:tracking_shuffled_objects_three_objects"
] | false | 
| 
	harness:bbh:boolean_expressions | 
	bbh:boolean_expressions | 
	harness | 
	lukaemon/bbh | 
	boolean_expressions | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 1,542 | 
	direct_call | true | 
	bbh | 1 | 
	[
  "harness"
] | 
	[
  "harness:bbh:boolean_expressions"
] | false | 
| 
	harness:bbh:causal_judgment | 
	bbh:causal_judgment | 
	harness | 
	lukaemon/bbh | 
	causal_judgement | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 1,569 | 
	direct_call | true | 
	bbh | 1 | 
	[
  "harness"
] | 
	[
  "harness:bbh:causal_judgment"
] | false | 
| 
	harness:bbh:date_understanding | 
	bbh:date_understanding | 
	harness | 
	lukaemon/bbh | 
	date_understanding | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 1,596 | 
	direct_call | true | 
	bbh | 1 | 
	[
  "harness"
] | 
	[
  "harness:bbh:date_understanding"
] | false | 
| 
	harness:bbh:disambiguation_qa | 
	bbh:disambiguation_qa | 
	harness | 
	lukaemon/bbh | 
	disambiguation_qa | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 1,623 | 
	direct_call | true | 
	bbh | 1 | 
	[
  "harness"
] | 
	[
  "harness:bbh:disambiguation_qa"
] | false | 
| 
	harness:bbh:dyck_languages | 
	bbh:dyck_languages | 
	harness | 
	lukaemon/bbh | 
	dyck_languages | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 1,650 | 
	direct_call | true | 
	bbh | 1 | 
	[
  "harness"
] | 
	[
  "harness:bbh:dyck_languages"
] | false | 
| 
	harness:bbh:formal_fallacies | 
	bbh:formal_fallacies | 
	harness | 
	lukaemon/bbh | 
	formal_fallacies | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 1,677 | 
	direct_call | true | 
	bbh | 1 | 
	[
  "harness"
] | 
	[
  "harness:bbh:formal_fallacies"
] | false | 
| 
	harness:bbh:geometric_shapes | 
	bbh:geometric_shapes | 
	harness | 
	lukaemon/bbh | 
	geometric_shapes | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 1,704 | 
	direct_call | true | 
	bbh | 1 | 
	[
  "harness"
] | 
	[
  "harness:bbh:geometric_shapes"
] | false | 
| 
	harness:bbh:hyperbaton | 
	bbh:hyperbaton | 
	harness | 
	lukaemon/bbh | 
	hyperbaton | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 1,731 | 
	direct_call | true | 
	bbh | 1 | 
	[
  "harness"
] | 
	[
  "harness:bbh:hyperbaton"
] | false | 
| 
	harness:bbh:logical_deduction_five_objects | 
	bbh:logical_deduction_five_objects | 
	harness | 
	lukaemon/bbh | 
	logical_deduction_five_objects | 
	/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 1,758 | 
	direct_call | true | 
	bbh | 1 | 
	[
  "harness"
] | 
	[
  "harness:bbh:logical_deduction_five_objects"
] | false | 
End of preview. Expand
						in Data Studio
					
	README.md exists but content is empty.
								
- Downloads last month
- 15
