task_id
stringlengths 8
69
| name
stringlengths 3
64
| suite
stringclasses 6
values | hf_repo
stringclasses 125
values | hf_subset
stringlengths 0
55
| file_path
stringclasses 6
values | line_number
int64 36
22.8k
| variable_name
stringclasses 1
value | is_subtask
bool 2
classes | main_task
stringclasses 122
values | subtask_count
int64 1
1
| suites
listlengths 1
1
| subtasks
listlengths 1
1
| is_standalone
bool 2
classes |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
harness:bbh:logical_deduction_seven_objects
|
bbh:logical_deduction_seven_objects
|
harness
|
lukaemon/bbh
|
logical_deduction_seven_objects
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 1,785 |
direct_call
| true |
bbh
| 1 |
[
"harness"
] |
[
"harness:bbh:logical_deduction_seven_objects"
] | false |
harness:bbh:logical_deduction_three_objects
|
bbh:logical_deduction_three_objects
|
harness
|
lukaemon/bbh
|
logical_deduction_three_objects
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 1,812 |
direct_call
| true |
bbh
| 1 |
[
"harness"
] |
[
"harness:bbh:logical_deduction_three_objects"
] | false |
harness:bbh:movie_recommendation
|
bbh:movie_recommendation
|
harness
|
lukaemon/bbh
|
movie_recommendation
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 1,839 |
direct_call
| true |
bbh
| 1 |
[
"harness"
] |
[
"harness:bbh:movie_recommendation"
] | false |
harness:bbh:multistep_arithmetic_two
|
bbh:multistep_arithmetic_two
|
harness
|
lukaemon/bbh
|
multistep_arithmetic_two
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 1,866 |
direct_call
| true |
bbh
| 1 |
[
"harness"
] |
[
"harness:bbh:multistep_arithmetic_two"
] | false |
harness:bbh:navigate
|
bbh:navigate
|
harness
|
lukaemon/bbh
|
navigate
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 1,893 |
direct_call
| true |
bbh
| 1 |
[
"harness"
] |
[
"harness:bbh:navigate"
] | false |
harness:bbh:object_counting
|
bbh:object_counting
|
harness
|
lukaemon/bbh
|
object_counting
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 1,920 |
direct_call
| true |
bbh
| 1 |
[
"harness"
] |
[
"harness:bbh:object_counting"
] | false |
harness:bbh:penguins_in_a_table
|
bbh:penguins_in_a_table
|
harness
|
lukaemon/bbh
|
penguins_in_a_table
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 1,947 |
direct_call
| true |
bbh
| 1 |
[
"harness"
] |
[
"harness:bbh:penguins_in_a_table"
] | false |
harness:bbh:reasoning_about_colored_objects
|
bbh:reasoning_about_colored_objects
|
harness
|
lukaemon/bbh
|
reasoning_about_colored_objects
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 1,974 |
direct_call
| true |
bbh
| 1 |
[
"harness"
] |
[
"harness:bbh:reasoning_about_colored_objects"
] | false |
harness:bbh:ruin_names
|
bbh:ruin_names
|
harness
|
lukaemon/bbh
|
ruin_names
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 2,001 |
direct_call
| true |
bbh
| 1 |
[
"harness"
] |
[
"harness:bbh:ruin_names"
] | false |
harness:bbh:salient_translation_error_detection
|
bbh:salient_translation_error_detection
|
harness
|
lukaemon/bbh
|
salient_translation_error_detection
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 2,028 |
direct_call
| true |
bbh
| 1 |
[
"harness"
] |
[
"harness:bbh:salient_translation_error_detection"
] | false |
harness:bbh:snarks
|
bbh:snarks
|
harness
|
lukaemon/bbh
|
snarks
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 2,055 |
direct_call
| true |
bbh
| 1 |
[
"harness"
] |
[
"harness:bbh:snarks"
] | false |
harness:bbh:sports_understanding
|
bbh:sports_understanding
|
harness
|
lukaemon/bbh
|
sports_understanding
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 2,082 |
direct_call
| true |
bbh
| 1 |
[
"harness"
] |
[
"harness:bbh:sports_understanding"
] | false |
harness:bbh:temporal_sequences
|
bbh:temporal_sequences
|
harness
|
lukaemon/bbh
|
temporal_sequences
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 2,109 |
direct_call
| true |
bbh
| 1 |
[
"harness"
] |
[
"harness:bbh:temporal_sequences"
] | false |
harness:bbh:tracking_shuffled_objects_five_objects
|
bbh:tracking_shuffled_objects_five_objects
|
harness
|
lukaemon/bbh
|
tracking_shuffled_objects_five_objects
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 2,136 |
direct_call
| true |
bbh
| 1 |
[
"harness"
] |
[
"harness:bbh:tracking_shuffled_objects_five_objects"
] | false |
harness:bbh:tracking_shuffled_objects_seven_objects
|
bbh:tracking_shuffled_objects_seven_objects
|
harness
|
lukaemon/bbh
|
tracking_shuffled_objects_seven_objects
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 2,163 |
direct_call
| true |
bbh
| 1 |
[
"harness"
] |
[
"harness:bbh:tracking_shuffled_objects_seven_objects"
] | false |
harness:bbh:tracking_shuffled_objects_three_objects
|
bbh:tracking_shuffled_objects_three_objects
|
harness
|
lukaemon/bbh
|
tracking_shuffled_objects_three_objects
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 2,190 |
direct_call
| true |
bbh
| 1 |
[
"harness"
] |
[
"harness:bbh:tracking_shuffled_objects_three_objects"
] | false |
harness:bbh:web_of_lies
|
bbh:web_of_lies
|
harness
|
lukaemon/bbh
|
web_of_lies
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 2,217 |
direct_call
| true |
bbh
| 1 |
[
"harness"
] |
[
"harness:bbh:web_of_lies"
] | false |
harness:bbh:word_sorting
|
bbh:word_sorting
|
harness
|
lukaemon/bbh
|
word_sorting
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 2,244 |
direct_call
| true |
bbh
| 1 |
[
"harness"
] |
[
"harness:bbh:word_sorting"
] | false |
helm:bbq
|
bbq
|
helm
|
lighteval/bbq_helm
|
all
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 2,271 |
direct_call
| false |
bbq
| 1 |
[
"helm"
] |
[
"helm:bbq"
] | false |
helm:bbq:Age
|
bbq:Age
|
helm
|
lighteval/bbq_helm
|
Age
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 2,298 |
direct_call
| true |
bbq
| 1 |
[
"helm"
] |
[
"helm:bbq:Age"
] | false |
helm:bbq:Disability_status
|
bbq:Disability_status
|
helm
|
lighteval/bbq_helm
|
Disability_status
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 2,325 |
direct_call
| true |
bbq
| 1 |
[
"helm"
] |
[
"helm:bbq:Disability_status"
] | false |
helm:bbq:Gender_identity
|
bbq:Gender_identity
|
helm
|
lighteval/bbq_helm
|
Gender_identity
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 2,352 |
direct_call
| true |
bbq
| 1 |
[
"helm"
] |
[
"helm:bbq:Gender_identity"
] | false |
helm:bbq:Nationality
|
bbq:Nationality
|
helm
|
lighteval/bbq_helm
|
Nationality
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 2,379 |
direct_call
| true |
bbq
| 1 |
[
"helm"
] |
[
"helm:bbq:Nationality"
] | false |
helm:bbq:Physical_appearance
|
bbq:Physical_appearance
|
helm
|
lighteval/bbq_helm
|
Physical_appearance
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 2,406 |
direct_call
| true |
bbq
| 1 |
[
"helm"
] |
[
"helm:bbq:Physical_appearance"
] | false |
helm:bbq:Race_ethnicity
|
bbq:Race_ethnicity
|
helm
|
lighteval/bbq_helm
|
Race_ethnicity
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 2,433 |
direct_call
| true |
bbq
| 1 |
[
"helm"
] |
[
"helm:bbq:Race_ethnicity"
] | false |
helm:bbq:Race_x_SES
|
bbq:Race_x_SES
|
helm
|
lighteval/bbq_helm
|
Race_x_SES
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 2,460 |
direct_call
| true |
bbq
| 1 |
[
"helm"
] |
[
"helm:bbq:Race_x_SES"
] | false |
helm:bbq:Race_x_gender
|
bbq:Race_x_gender
|
helm
|
lighteval/bbq_helm
|
Race_x_gender
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 2,487 |
direct_call
| true |
bbq
| 1 |
[
"helm"
] |
[
"helm:bbq:Race_x_gender"
] | false |
helm:bbq:Religion
|
bbq:Religion
|
helm
|
lighteval/bbq_helm
|
Religion
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 2,514 |
direct_call
| true |
bbq
| 1 |
[
"helm"
] |
[
"helm:bbq:Religion"
] | false |
helm:bbq:SES
|
bbq:SES
|
helm
|
lighteval/bbq_helm
|
SES
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 2,541 |
direct_call
| true |
bbq
| 1 |
[
"helm"
] |
[
"helm:bbq:SES"
] | false |
helm:bbq:Sexual_orientation
|
bbq:Sexual_orientation
|
helm
|
lighteval/bbq_helm
|
Sexual_orientation
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 2,568 |
direct_call
| true |
bbq
| 1 |
[
"helm"
] |
[
"helm:bbq:Sexual_orientation"
] | false |
lighteval:bigbench_lite:bbq_lite_json
|
bigbench_lite:bbq_lite_json
|
lighteval
|
tasksource/bigbench
|
bbq_lite_json
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 2,595 |
direct_call
| true |
bigbench_lite
| 1 |
[
"lighteval"
] |
[
"lighteval:bigbench_lite:bbq_lite_json"
] | false |
helm:bigbench:auto_debugging
|
bigbench:auto_debugging
|
helm
|
lighteval/bigbench_helm
|
auto_debugging
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 2,610 |
direct_call
| true |
bigbench
| 1 |
[
"helm"
] |
[
"helm:bigbench:auto_debugging"
] | false |
helm:bigbench:bbq_lite_json:age_ambig
|
bigbench:bbq_lite_json:age_ambig
|
helm
|
lighteval/bigbench_helm
|
bbq_lite_json-age_ambig
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 2,628 |
direct_call
| true |
bigbench
| 1 |
[
"helm"
] |
[
"helm:bigbench:bbq_lite_json:age_ambig"
] | false |
helm:bigbench:bbq_lite_json:age_disambig
|
bigbench:bbq_lite_json:age_disambig
|
helm
|
lighteval/bigbench_helm
|
bbq_lite_json-age_disambig
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 2,654 |
direct_call
| true |
bigbench
| 1 |
[
"helm"
] |
[
"helm:bigbench:bbq_lite_json:age_disambig"
] | false |
helm:bigbench:bbq_lite_json:disability_status_ambig
|
bigbench:bbq_lite_json:disability_status_ambig
|
helm
|
lighteval/bigbench_helm
|
bbq_lite_json-disability_status_ambig
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 2,680 |
direct_call
| true |
bigbench
| 1 |
[
"helm"
] |
[
"helm:bigbench:bbq_lite_json:disability_status_ambig"
] | false |
helm:bigbench:bbq_lite_json:disability_status_disambig
|
bigbench:bbq_lite_json:disability_status_disambig
|
helm
|
lighteval/bigbench_helm
|
bbq_lite_json-disability_status_disambig
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 2,706 |
direct_call
| true |
bigbench
| 1 |
[
"helm"
] |
[
"helm:bigbench:bbq_lite_json:disability_status_disambig"
] | false |
helm:bigbench:bbq_lite_json:gender_identity_ambig
|
bigbench:bbq_lite_json:gender_identity_ambig
|
helm
|
lighteval/bigbench_helm
|
bbq_lite_json-gender_identity_ambig
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 2,732 |
direct_call
| true |
bigbench
| 1 |
[
"helm"
] |
[
"helm:bigbench:bbq_lite_json:gender_identity_ambig"
] | false |
helm:bigbench:bbq_lite_json:gender_identity_disambig
|
bigbench:bbq_lite_json:gender_identity_disambig
|
helm
|
lighteval/bigbench_helm
|
bbq_lite_json-gender_identity_disambig
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 2,758 |
direct_call
| true |
bigbench
| 1 |
[
"helm"
] |
[
"helm:bigbench:bbq_lite_json:gender_identity_disambig"
] | false |
helm:bigbench:bbq_lite_json:nationality_ambig
|
bigbench:bbq_lite_json:nationality_ambig
|
helm
|
lighteval/bigbench_helm
|
bbq_lite_json-nationality_ambig
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 2,784 |
direct_call
| true |
bigbench
| 1 |
[
"helm"
] |
[
"helm:bigbench:bbq_lite_json:nationality_ambig"
] | false |
helm:bigbench:bbq_lite_json:nationality_disambig
|
bigbench:bbq_lite_json:nationality_disambig
|
helm
|
lighteval/bigbench_helm
|
bbq_lite_json-nationality_disambig
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 2,810 |
direct_call
| true |
bigbench
| 1 |
[
"helm"
] |
[
"helm:bigbench:bbq_lite_json:nationality_disambig"
] | false |
helm:bigbench:bbq_lite_json:physical_appearance_ambig
|
bigbench:bbq_lite_json:physical_appearance_ambig
|
helm
|
lighteval/bigbench_helm
|
bbq_lite_json-physical_appearance_ambig
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 2,836 |
direct_call
| true |
bigbench
| 1 |
[
"helm"
] |
[
"helm:bigbench:bbq_lite_json:physical_appearance_ambig"
] | false |
helm:bigbench:bbq_lite_json:physical_appearance_disambig
|
bigbench:bbq_lite_json:physical_appearance_disambig
|
helm
|
lighteval/bigbench_helm
|
bbq_lite_json-physical_appearance_disambig
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 2,862 |
direct_call
| true |
bigbench
| 1 |
[
"helm"
] |
[
"helm:bigbench:bbq_lite_json:physical_appearance_disambig"
] | false |
helm:bigbench:bbq_lite_json:race_ethnicity_ambig
|
bigbench:bbq_lite_json:race_ethnicity_ambig
|
helm
|
lighteval/bigbench_helm
|
bbq_lite_json-race_ethnicity_ambig
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 2,888 |
direct_call
| true |
bigbench
| 1 |
[
"helm"
] |
[
"helm:bigbench:bbq_lite_json:race_ethnicity_ambig"
] | false |
helm:bigbench:bbq_lite_json:race_ethnicity_disambig
|
bigbench:bbq_lite_json:race_ethnicity_disambig
|
helm
|
lighteval/bigbench_helm
|
bbq_lite_json-race_ethnicity_disambig
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 2,914 |
direct_call
| true |
bigbench
| 1 |
[
"helm"
] |
[
"helm:bigbench:bbq_lite_json:race_ethnicity_disambig"
] | false |
helm:bigbench:bbq_lite_json:religion_ambig
|
bigbench:bbq_lite_json:religion_ambig
|
helm
|
lighteval/bigbench_helm
|
bbq_lite_json-religion_ambig
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 2,940 |
direct_call
| true |
bigbench
| 1 |
[
"helm"
] |
[
"helm:bigbench:bbq_lite_json:religion_ambig"
] | false |
helm:bigbench:bbq_lite_json:religion_disambig
|
bigbench:bbq_lite_json:religion_disambig
|
helm
|
lighteval/bigbench_helm
|
bbq_lite_json-religion_disambig
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 2,966 |
direct_call
| true |
bigbench
| 1 |
[
"helm"
] |
[
"helm:bigbench:bbq_lite_json:religion_disambig"
] | false |
helm:bigbench:bbq_lite_json:ses_ambig
|
bigbench:bbq_lite_json:ses_ambig
|
helm
|
lighteval/bigbench_helm
|
bbq_lite_json-ses_ambig
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 2,992 |
direct_call
| true |
bigbench
| 1 |
[
"helm"
] |
[
"helm:bigbench:bbq_lite_json:ses_ambig"
] | false |
helm:bigbench:bbq_lite_json:ses_disambig
|
bigbench:bbq_lite_json:ses_disambig
|
helm
|
lighteval/bigbench_helm
|
bbq_lite_json-ses_disambig
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 3,018 |
direct_call
| true |
bigbench
| 1 |
[
"helm"
] |
[
"helm:bigbench:bbq_lite_json:ses_disambig"
] | false |
helm:bigbench:bbq_lite_json:sexual_orientation_ambig
|
bigbench:bbq_lite_json:sexual_orientation_ambig
|
helm
|
lighteval/bigbench_helm
|
bbq_lite_json-sexual_orientation_ambig
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 3,044 |
direct_call
| true |
bigbench
| 1 |
[
"helm"
] |
[
"helm:bigbench:bbq_lite_json:sexual_orientation_ambig"
] | false |
helm:bigbench:bbq_lite_json:sexual_orientation_disambig
|
bigbench:bbq_lite_json:sexual_orientation_disambig
|
helm
|
lighteval/bigbench_helm
|
bbq_lite_json-sexual_orientation_disambig
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 3,070 |
direct_call
| true |
bigbench
| 1 |
[
"helm"
] |
[
"helm:bigbench:bbq_lite_json:sexual_orientation_disambig"
] | false |
helm:bigbench:code_line_description
|
bigbench:code_line_description
|
helm
|
lighteval/bigbench_helm
|
code_line_description
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 3,096 |
direct_call
| true |
bigbench
| 1 |
[
"helm"
] |
[
"helm:bigbench:code_line_description"
] | false |
helm:bigbench:conceptual_combinations:contradictions
|
bigbench:conceptual_combinations:contradictions
|
helm
|
lighteval/bigbench_helm
|
conceptual_combinations-contradictions
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 3,122 |
direct_call
| true |
bigbench
| 1 |
[
"helm"
] |
[
"helm:bigbench:conceptual_combinations:contradictions"
] | false |
helm:bigbench:conceptual_combinations:emergent_properties
|
bigbench:conceptual_combinations:emergent_properties
|
helm
|
lighteval/bigbench_helm
|
conceptual_combinations-emergent_properties
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 3,148 |
direct_call
| true |
bigbench
| 1 |
[
"helm"
] |
[
"helm:bigbench:conceptual_combinations:emergent_properties"
] | false |
helm:bigbench:conceptual_combinations:fanciful_fictional_combinations
|
bigbench:conceptual_combinations:fanciful_fictional_combinations
|
helm
|
lighteval/bigbench_helm
|
conceptual_combinations-fanciful_fictional_combinations
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 3,174 |
direct_call
| true |
bigbench
| 1 |
[
"helm"
] |
[
"helm:bigbench:conceptual_combinations:fanciful_fictional_combinations"
] | false |
helm:bigbench:conceptual_combinations:homonyms
|
bigbench:conceptual_combinations:homonyms
|
helm
|
lighteval/bigbench_helm
|
conceptual_combinations-homonyms
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 3,200 |
direct_call
| true |
bigbench
| 1 |
[
"helm"
] |
[
"helm:bigbench:conceptual_combinations:homonyms"
] | false |
helm:bigbench:conceptual_combinations:invented_words
|
bigbench:conceptual_combinations:invented_words
|
helm
|
lighteval/bigbench_helm
|
conceptual_combinations-invented_words
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 3,226 |
direct_call
| true |
bigbench
| 1 |
[
"helm"
] |
[
"helm:bigbench:conceptual_combinations:invented_words"
] | false |
helm:bigbench:conlang_translation:adna_from
|
bigbench:conlang_translation:adna_from
|
helm
|
lighteval/bigbench_helm
|
conlang_translation-adna_from
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 3,252 |
direct_call
| true |
bigbench
| 1 |
[
"helm"
] |
[
"helm:bigbench:conlang_translation:adna_from"
] | false |
helm:bigbench:conlang_translation:adna_to
|
bigbench:conlang_translation:adna_to
|
helm
|
lighteval/bigbench_helm
|
conlang_translation-adna_to
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 3,267 |
direct_call
| true |
bigbench
| 1 |
[
"helm"
] |
[
"helm:bigbench:conlang_translation:adna_to"
] | false |
helm:bigbench:conlang_translation:atikampe_from
|
bigbench:conlang_translation:atikampe_from
|
helm
|
lighteval/bigbench_helm
|
conlang_translation-atikampe_from
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 3,282 |
direct_call
| true |
bigbench
| 1 |
[
"helm"
] |
[
"helm:bigbench:conlang_translation:atikampe_from"
] | false |
helm:bigbench:conlang_translation:atikampe_to
|
bigbench:conlang_translation:atikampe_to
|
helm
|
lighteval/bigbench_helm
|
conlang_translation-atikampe_to
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 3,297 |
direct_call
| true |
bigbench
| 1 |
[
"helm"
] |
[
"helm:bigbench:conlang_translation:atikampe_to"
] | false |
helm:bigbench:conlang_translation:gornam_from
|
bigbench:conlang_translation:gornam_from
|
helm
|
lighteval/bigbench_helm
|
conlang_translation-gornam_from
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 3,312 |
direct_call
| true |
bigbench
| 1 |
[
"helm"
] |
[
"helm:bigbench:conlang_translation:gornam_from"
] | false |
helm:bigbench:conlang_translation:gornam_to
|
bigbench:conlang_translation:gornam_to
|
helm
|
lighteval/bigbench_helm
|
conlang_translation-gornam_to
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 3,327 |
direct_call
| true |
bigbench
| 1 |
[
"helm"
] |
[
"helm:bigbench:conlang_translation:gornam_to"
] | false |
helm:bigbench:conlang_translation:holuan_from
|
bigbench:conlang_translation:holuan_from
|
helm
|
lighteval/bigbench_helm
|
conlang_translation-holuan_from
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 3,342 |
direct_call
| true |
bigbench
| 1 |
[
"helm"
] |
[
"helm:bigbench:conlang_translation:holuan_from"
] | false |
helm:bigbench:conlang_translation:holuan_to
|
bigbench:conlang_translation:holuan_to
|
helm
|
lighteval/bigbench_helm
|
conlang_translation-holuan_to
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 3,357 |
direct_call
| true |
bigbench
| 1 |
[
"helm"
] |
[
"helm:bigbench:conlang_translation:holuan_to"
] | false |
helm:bigbench:conlang_translation:mkafala_from
|
bigbench:conlang_translation:mkafala_from
|
helm
|
lighteval/bigbench_helm
|
conlang_translation-mkafala_from
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 3,372 |
direct_call
| true |
bigbench
| 1 |
[
"helm"
] |
[
"helm:bigbench:conlang_translation:mkafala_from"
] | false |
helm:bigbench:conlang_translation:mkafala_to
|
bigbench:conlang_translation:mkafala_to
|
helm
|
lighteval/bigbench_helm
|
conlang_translation-mkafala_to
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 3,387 |
direct_call
| true |
bigbench
| 1 |
[
"helm"
] |
[
"helm:bigbench:conlang_translation:mkafala_to"
] | false |
helm:bigbench:conlang_translation:postpositive_english_from
|
bigbench:conlang_translation:postpositive_english_from
|
helm
|
lighteval/bigbench_helm
|
conlang_translation-postpositive_english_from
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 3,402 |
direct_call
| true |
bigbench
| 1 |
[
"helm"
] |
[
"helm:bigbench:conlang_translation:postpositive_english_from"
] | false |
helm:bigbench:conlang_translation:postpositive_english_to
|
bigbench:conlang_translation:postpositive_english_to
|
helm
|
lighteval/bigbench_helm
|
conlang_translation-postpositive_english_to
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 3,417 |
direct_call
| true |
bigbench
| 1 |
[
"helm"
] |
[
"helm:bigbench:conlang_translation:postpositive_english_to"
] | false |
helm:bigbench:conlang_translation:unapuri_from
|
bigbench:conlang_translation:unapuri_from
|
helm
|
lighteval/bigbench_helm
|
conlang_translation-unapuri_from
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 3,432 |
direct_call
| true |
bigbench
| 1 |
[
"helm"
] |
[
"helm:bigbench:conlang_translation:unapuri_from"
] | false |
helm:bigbench:conlang_translation:unapuri_to
|
bigbench:conlang_translation:unapuri_to
|
helm
|
lighteval/bigbench_helm
|
conlang_translation-unapuri_to
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 3,447 |
direct_call
| true |
bigbench
| 1 |
[
"helm"
] |
[
"helm:bigbench:conlang_translation:unapuri_to"
] | false |
helm:bigbench:conlang_translation:vaomi_from
|
bigbench:conlang_translation:vaomi_from
|
helm
|
lighteval/bigbench_helm
|
conlang_translation-vaomi_from
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 3,462 |
direct_call
| true |
bigbench
| 1 |
[
"helm"
] |
[
"helm:bigbench:conlang_translation:vaomi_from"
] | false |
helm:bigbench:conlang_translation:vaomi_to
|
bigbench:conlang_translation:vaomi_to
|
helm
|
lighteval/bigbench_helm
|
conlang_translation-vaomi_to
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 3,477 |
direct_call
| true |
bigbench
| 1 |
[
"helm"
] |
[
"helm:bigbench:conlang_translation:vaomi_to"
] | false |
helm:bigbench:emoji_movie
|
bigbench:emoji_movie
|
helm
|
lighteval/bigbench_helm
|
emoji_movie
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 3,492 |
direct_call
| true |
bigbench
| 1 |
[
"helm"
] |
[
"helm:bigbench:emoji_movie"
] | false |
helm:bigbench:formal_fallacies_syllogisms_negation
|
bigbench:formal_fallacies_syllogisms_negation
|
helm
|
lighteval/bigbench_helm
|
formal_fallacies_syllogisms_negation
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 3,518 |
direct_call
| true |
bigbench
| 1 |
[
"helm"
] |
[
"helm:bigbench:formal_fallacies_syllogisms_negation"
] | false |
helm:bigbench:hindu_knowledge
|
bigbench:hindu_knowledge
|
helm
|
lighteval/bigbench_helm
|
hindu_knowledge
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 3,544 |
direct_call
| true |
bigbench
| 1 |
[
"helm"
] |
[
"helm:bigbench:hindu_knowledge"
] | false |
helm:bigbench:known_unknowns
|
bigbench:known_unknowns
|
helm
|
lighteval/bigbench_helm
|
known_unknowns
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 3,570 |
direct_call
| true |
bigbench
| 1 |
[
"helm"
] |
[
"helm:bigbench:known_unknowns"
] | false |
helm:bigbench:language_identification
|
bigbench:language_identification
|
helm
|
lighteval/bigbench_helm
|
language_identification
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 3,596 |
direct_call
| true |
bigbench
| 1 |
[
"helm"
] |
[
"helm:bigbench:language_identification"
] | false |
helm:bigbench:linguistics_puzzles
|
bigbench:linguistics_puzzles
|
helm
|
lighteval/bigbench_helm
|
linguistics_puzzles
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 3,622 |
direct_call
| true |
bigbench
| 1 |
[
"helm"
] |
[
"helm:bigbench:linguistics_puzzles"
] | false |
helm:bigbench:logic_grid_puzzle
|
bigbench:logic_grid_puzzle
|
helm
|
lighteval/bigbench_helm
|
logic_grid_puzzle
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 3,640 |
direct_call
| true |
bigbench
| 1 |
[
"helm"
] |
[
"helm:bigbench:logic_grid_puzzle"
] | false |
helm:bigbench:logical_deduction-five_objects
|
bigbench:logical_deduction-five_objects
|
helm
|
lighteval/bigbench_helm
|
logical_deduction-five_objects
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 3,666 |
direct_call
| true |
bigbench
| 1 |
[
"helm"
] |
[
"helm:bigbench:logical_deduction-five_objects"
] | false |
helm:bigbench:logical_deduction-seven_objects
|
bigbench:logical_deduction-seven_objects
|
helm
|
lighteval/bigbench_helm
|
logical_deduction-seven_objects
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 3,692 |
direct_call
| true |
bigbench
| 1 |
[
"helm"
] |
[
"helm:bigbench:logical_deduction-seven_objects"
] | false |
helm:bigbench:logical_deduction-three_objects
|
bigbench:logical_deduction-three_objects
|
helm
|
lighteval/bigbench_helm
|
logical_deduction-three_objects
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 3,718 |
direct_call
| true |
bigbench
| 1 |
[
"helm"
] |
[
"helm:bigbench:logical_deduction-three_objects"
] | false |
helm:bigbench:misconceptions_russian
|
bigbench:misconceptions_russian
|
helm
|
lighteval/bigbench_helm
|
misconceptions_russian
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 3,744 |
direct_call
| true |
bigbench
| 1 |
[
"helm"
] |
[
"helm:bigbench:misconceptions_russian"
] | false |
helm:bigbench:novel_concepts
|
bigbench:novel_concepts
|
helm
|
lighteval/bigbench_helm
|
novel_concepts
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 3,770 |
direct_call
| true |
bigbench
| 1 |
[
"helm"
] |
[
"helm:bigbench:novel_concepts"
] | false |
helm:bigbench:operators
|
bigbench:operators
|
helm
|
lighteval/bigbench_helm
|
operators
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 3,796 |
direct_call
| true |
bigbench
| 1 |
[
"helm"
] |
[
"helm:bigbench:operators"
] | false |
helm:bigbench:parsinlu_reading_comprehension
|
bigbench:parsinlu_reading_comprehension
|
helm
|
lighteval/bigbench_helm
|
parsinlu_reading_comprehension
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 3,814 |
direct_call
| true |
bigbench
| 1 |
[
"helm"
] |
[
"helm:bigbench:parsinlu_reading_comprehension"
] | false |
helm:bigbench:play_dialog_same_or_different
|
bigbench:play_dialog_same_or_different
|
helm
|
lighteval/bigbench_helm
|
play_dialog_same_or_different
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 3,832 |
direct_call
| true |
bigbench
| 1 |
[
"helm"
] |
[
"helm:bigbench:play_dialog_same_or_different"
] | false |
helm:bigbench:repeat_copy_logic
|
bigbench:repeat_copy_logic
|
helm
|
lighteval/bigbench_helm
|
repeat_copy_logic
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 3,858 |
direct_call
| true |
bigbench
| 1 |
[
"helm"
] |
[
"helm:bigbench:repeat_copy_logic"
] | false |
helm:bigbench:strange_stories-boolean
|
bigbench:strange_stories-boolean
|
helm
|
lighteval/bigbench_helm
|
strange_stories-boolean
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 3,876 |
direct_call
| true |
bigbench
| 1 |
[
"helm"
] |
[
"helm:bigbench:strange_stories-boolean"
] | false |
helm:bigbench:strange_stories-multiple_choice
|
bigbench:strange_stories-multiple_choice
|
helm
|
lighteval/bigbench_helm
|
strange_stories-multiple_choice
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 3,902 |
direct_call
| true |
bigbench
| 1 |
[
"helm"
] |
[
"helm:bigbench:strange_stories-multiple_choice"
] | false |
helm:bigbench:strategyqa
|
bigbench:strategyqa
|
helm
|
lighteval/bigbench_helm
|
strategyqa
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 3,928 |
direct_call
| true |
bigbench
| 1 |
[
"helm"
] |
[
"helm:bigbench:strategyqa"
] | false |
helm:bigbench:symbol_interpretation-adversarial
|
bigbench:symbol_interpretation-adversarial
|
helm
|
lighteval/bigbench_helm
|
symbol_interpretation-adversarial
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 3,954 |
direct_call
| true |
bigbench
| 1 |
[
"helm"
] |
[
"helm:bigbench:symbol_interpretation-adversarial"
] | false |
helm:bigbench:symbol_interpretation-emoji_agnostic
|
bigbench:symbol_interpretation-emoji_agnostic
|
helm
|
lighteval/bigbench_helm
|
symbol_interpretation-emoji_agnostic
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 3,980 |
direct_call
| true |
bigbench
| 1 |
[
"helm"
] |
[
"helm:bigbench:symbol_interpretation-emoji_agnostic"
] | false |
helm:bigbench:symbol_interpretation-name_agnostic
|
bigbench:symbol_interpretation-name_agnostic
|
helm
|
lighteval/bigbench_helm
|
symbol_interpretation-name_agnostic
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 4,006 |
direct_call
| true |
bigbench
| 1 |
[
"helm"
] |
[
"helm:bigbench:symbol_interpretation-name_agnostic"
] | false |
helm:bigbench:symbol_interpretation-plain
|
bigbench:symbol_interpretation-plain
|
helm
|
lighteval/bigbench_helm
|
symbol_interpretation-plain
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 4,032 |
direct_call
| true |
bigbench
| 1 |
[
"helm"
] |
[
"helm:bigbench:symbol_interpretation-plain"
] | false |
helm:bigbench:symbol_interpretation-tricky
|
bigbench:symbol_interpretation-tricky
|
helm
|
lighteval/bigbench_helm
|
symbol_interpretation-tricky
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 4,058 |
direct_call
| true |
bigbench
| 1 |
[
"helm"
] |
[
"helm:bigbench:symbol_interpretation-tricky"
] | false |
helm:bigbench:vitaminc_fact_verification
|
bigbench:vitaminc_fact_verification
|
helm
|
lighteval/bigbench_helm
|
vitaminc_fact_verification
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 4,084 |
direct_call
| true |
bigbench
| 1 |
[
"helm"
] |
[
"helm:bigbench:vitaminc_fact_verification"
] | false |
helm:bigbench:winowhy
|
bigbench:winowhy
|
helm
|
lighteval/bigbench_helm
|
winowhy
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 4,110 |
direct_call
| true |
bigbench
| 1 |
[
"helm"
] |
[
"helm:bigbench:winowhy"
] | false |
lighteval:blimp:adjunct_island
|
blimp:adjunct_island
|
lighteval
|
blimp
|
adjunct_island
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 4,136 |
direct_call
| true |
blimp
| 1 |
[
"lighteval"
] |
[
"lighteval:blimp:adjunct_island"
] | false |
helm:blimp:adjunct_island
|
blimp:adjunct_island
|
helm
|
blimp
|
adjunct_island
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 4,151 |
direct_call
| true |
blimp
| 1 |
[
"helm"
] |
[
"helm:blimp:adjunct_island"
] | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.