datasetId
stringlengths 2
117
| card
stringlengths 19
1.01M
|
---|---|
ChrisWilson/twitter_dataset_1712508023 | ---
dataset_info:
features:
- name: id
dtype: string
- name: tweet_content
dtype: string
- name: user_name
dtype: string
- name: user_id
dtype: string
- name: created_at
dtype: string
- name: url
dtype: string
- name: favourite_count
dtype: int64
- name: scraped_at
dtype: string
- name: image_urls
dtype: string
splits:
- name: train
num_bytes: 10705
num_examples: 32
download_size: 11885
dataset_size: 10705
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
danjacobellis/aria_ea_rgb_25k | ---
dataset_info:
features:
- name: image
dtype: image
- name: label
dtype:
class_label:
names:
'0': loc1_script1_seq1_rec1
'1': loc1_script1_seq3_rec1
'2': loc1_script1_seq5_rec1
'3': loc1_script1_seq6_rec1
'4': loc1_script1_seq7_rec1
'5': loc1_script2_seq1_rec1
'6': loc1_script2_seq1_rec2
'7': loc1_script2_seq3_rec1
'8': loc1_script2_seq3_rec2
'9': loc1_script2_seq4_rec1
'10': loc1_script2_seq4_rec2
'11': loc1_script2_seq6_rec1
'12': loc1_script2_seq6_rec2
'13': loc1_script2_seq7_rec1
'14': loc1_script2_seq8_rec1
'15': loc1_script2_seq8_rec2
'16': loc1_script3_seq1_rec1
'17': loc1_script3_seq2_rec1
'18': loc1_script3_seq5_rec1
'19': loc1_script4_seq2_rec1
'20': loc1_script4_seq3_rec1
'21': loc1_script4_seq4_rec1
'22': loc1_script4_seq5_rec1
'23': loc1_script5_seq1_rec1
'24': loc1_script5_seq2_rec1
'25': loc1_script5_seq3_rec1
'26': loc1_script5_seq5_rec1
'27': loc1_script5_seq6_rec1
'28': loc2_script1_seq1_rec1
'29': loc2_script1_seq2_rec1
'30': loc2_script1_seq3_rec1
'31': loc2_script1_seq4_rec1
'32': loc2_script1_seq5_rec1
'33': loc2_script1_seq6_rec1
'34': loc2_script1_seq7_rec1
'35': loc2_script2_seq1_rec1
'36': loc2_script2_seq1_rec2
'37': loc2_script2_seq2_rec1
'38': loc2_script2_seq2_rec2
'39': loc2_script2_seq3_rec1
'40': loc2_script2_seq3_rec2
'41': loc2_script2_seq4_rec1
'42': loc2_script2_seq4_rec2
'43': loc2_script2_seq5_rec1
'44': loc2_script2_seq5_rec2
'45': loc2_script2_seq6_rec1
'46': loc2_script2_seq6_rec2
'47': loc2_script2_seq8_rec1
'48': loc2_script2_seq8_rec2
'49': loc2_script3_seq1_rec1
'50': loc2_script3_seq1_rec2
'51': loc2_script3_seq2_rec1
'52': loc2_script3_seq2_rec2
'53': loc2_script3_seq3_rec1
'54': loc2_script3_seq3_rec2
'55': loc2_script3_seq4_rec1
'56': loc2_script3_seq4_rec2
'57': loc2_script3_seq5_rec1
'58': loc2_script3_seq5_rec2
'59': loc2_script4_seq3_rec1
'60': loc2_script4_seq4_rec1
'61': loc2_script4_seq5_rec1
'62': loc2_script4_seq7_rec1
'63': loc2_script5_seq1_rec1
'64': loc2_script5_seq2_rec1
'65': loc2_script5_seq3_rec1
'66': loc2_script5_seq4_rec1
'67': loc2_script5_seq5_rec1
'68': loc2_script5_seq6_rec1
'69': loc2_script5_seq7_rec1
'70': loc3_script1_seq1_rec1
'71': loc3_script1_seq2_rec1
'72': loc3_script1_seq3_rec1
'73': loc3_script1_seq4_rec1
'74': loc3_script1_seq5_rec1
'75': loc3_script1_seq6_rec1
'76': loc3_script1_seq7_rec1
'77': loc3_script2_seq1_rec1
'78': loc3_script2_seq1_rec2
'79': loc3_script2_seq2_rec1
'80': loc3_script2_seq3_rec1
'81': loc3_script2_seq3_rec2
'82': loc3_script2_seq4_rec1
'83': loc3_script2_seq4_rec2
'84': loc3_script2_seq5_rec1
'85': loc3_script2_seq5_rec2
'86': loc3_script2_seq7_rec1
'87': loc3_script2_seq7_rec2
'88': loc3_script3_seq1_rec1
'89': loc3_script3_seq1_rec2
'90': loc3_script3_seq2_rec1
'91': loc3_script3_seq2_rec2
'92': loc3_script3_seq4_rec1
'93': loc3_script3_seq4_rec2
'94': loc3_script3_seq5_rec1
'95': loc3_script3_seq5_rec2
'96': loc3_script4_seq2_rec1
'97': loc3_script4_seq3_rec1
'98': loc3_script4_seq4_rec1
'99': loc3_script4_seq5_rec1
'100': loc3_script4_seq7_rec1
'101': loc3_script5_seq1_rec1
'102': loc3_script5_seq2_rec1
'103': loc3_script5_seq3_rec1
'104': loc3_script5_seq4_rec1
'105': loc3_script5_seq5_rec1
'106': loc3_script5_seq6_rec1
'107': loc3_script5_seq7_rec1
'108': loc4_script1_seq1_rec1
'109': loc4_script1_seq3_rec1
'110': loc4_script1_seq5_rec1
'111': loc4_script1_seq6_rec1
'112': loc4_script2_seq1_rec2
'113': loc4_script2_seq2_rec1
'114': loc4_script2_seq3_rec2
'115': loc4_script2_seq4_rec1
'116': loc4_script2_seq6_rec1
'117': loc4_script2_seq7_rec1
'118': loc4_script2_seq8_rec2
'119': loc4_script3_seq1_rec2
'120': loc4_script3_seq2_rec2
'121': loc4_script3_seq3_rec1
'122': loc4_script3_seq4_rec1
'123': loc4_script4_seq2_rec1
'124': loc4_script5_seq1_rec1
'125': loc4_script5_seq3_rec1
'126': loc4_script5_seq7_rec1
'127': loc5_script4_seq1_rec1
'128': loc5_script4_seq2_rec1
'129': loc5_script4_seq3_rec1
'130': loc5_script4_seq4_rec1
'131': loc5_script4_seq5_rec1
'132': loc5_script4_seq6_rec1
'133': loc5_script5_seq1_rec1
'134': loc5_script5_seq2_rec1
'135': loc5_script5_seq3_rec1
'136': loc5_script5_seq4_rec1
'137': loc5_script5_seq5_rec1
'138': loc5_script5_seq6_rec1
'139': loc5_script5_seq7_rec1
splits:
- name: train
num_bytes: 11215580059.518394
num_examples: 25500
download_size: 11096081791
dataset_size: 11215580059.518394
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
StellarHouse/RequestDisect | ---
license: mit
---
|
atmallen/amazon_polarity_embeddings_random1 | ---
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
- split: test
path: data/test-*
dataset_info:
features:
- name: content
dtype: string
- name: label
dtype:
class_label:
names:
'0': neg
'1': pos
- name: embedding
sequence: float32
- name: title
dtype: string
splits:
- name: train
num_bytes: 7148364432
num_examples: 3600000
- name: test
num_bytes: 19940712
num_examples: 10000
download_size: 3902806188
dataset_size: 7168305144
---
# Dataset Card for "amazon_polarity_embeddings_random1"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
CyberHarem/ahagon_umiko_newgame | ---
license: mit
task_categories:
- text-to-image
tags:
- art
- not-for-all-audiences
size_categories:
- n<1K
---
# Dataset of Ahagon Umiko
This is the dataset of Ahagon Umiko, containing 223 images and their tags.
Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by [DeepGHS Team](https://github.com/deepghs)([huggingface organization](https://huggingface.co/deepghs)).
| Name | Images | Download | Description |
|:----------------|---------:|:----------------------------------------|:-----------------------------------------------------------------------------------------|
| raw | 223 | [Download](dataset-raw.zip) | Raw data with meta information. |
| raw-stage3 | 519 | [Download](dataset-raw-stage3.zip) | 3-stage cropped raw data with meta information. |
| raw-stage3-eyes | 561 | [Download](dataset-raw-stage3-eyes.zip) | 3-stage cropped (with eye-focus) raw data with meta information. |
| 384x512 | 223 | [Download](dataset-384x512.zip) | 384x512 aligned dataset. |
| 512x704 | 223 | [Download](dataset-512x704.zip) | 512x704 aligned dataset. |
| 640x880 | 223 | [Download](dataset-640x880.zip) | 640x880 aligned dataset. |
| stage3-640 | 519 | [Download](dataset-stage3-640.zip) | 3-stage cropped dataset with the shorter side not exceeding 640 pixels. |
| stage3-800 | 519 | [Download](dataset-stage3-800.zip) | 3-stage cropped dataset with the shorter side not exceeding 800 pixels. |
| stage3-p512-640 | 459 | [Download](dataset-stage3-p512-640.zip) | 3-stage cropped dataset with the area not less than 512x512 pixels. |
| stage3-eyes-640 | 561 | [Download](dataset-stage3-eyes-640.zip) | 3-stage cropped (with eye-focus) dataset with the shorter side not exceeding 640 pixels. |
| stage3-eyes-800 | 561 | [Download](dataset-stage3-eyes-800.zip) | 3-stage cropped (with eye-focus) dataset with the shorter side not exceeding 800 pixels. |
|
hhhaaahhhaa/text-guided-vc-google-tts-api-speech_tokenizer | ---
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
- split: validation
path: data/validation-*
- split: test
path: data/test-*
dataset_info:
features:
- name: file_id
dtype: string
- name: instruction
dtype: string
- name: transcription
dtype: string
- name: src_speech_tokenizer_0
sequence: int64
- name: src_speech_tokenizer_1
sequence: int64
- name: src_speech_tokenizer_2
sequence: int64
- name: src_speech_tokenizer_3
sequence: int64
- name: src_speech_tokenizer_4
sequence: int64
- name: src_speech_tokenizer_5
sequence: int64
- name: src_speech_tokenizer_6
sequence: int64
- name: src_speech_tokenizer_7
sequence: int64
- name: tgt_speech_tokenizer_0
sequence: int64
- name: tgt_speech_tokenizer_1
sequence: int64
- name: tgt_speech_tokenizer_2
sequence: int64
- name: tgt_speech_tokenizer_3
sequence: int64
- name: tgt_speech_tokenizer_4
sequence: int64
- name: tgt_speech_tokenizer_5
sequence: int64
- name: tgt_speech_tokenizer_6
sequence: int64
- name: tgt_speech_tokenizer_7
sequence: int64
splits:
- name: train
num_bytes: 2476215704
num_examples: 90000
- name: validation
num_bytes: 135757316
num_examples: 5000
- name: test
num_bytes: 139761511
num_examples: 5000
download_size: 147633674
dataset_size: 2751734531
---
# Dataset Card for "text-guided-vc-google-tts-api-speech_tokenizer"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
open-llm-leaderboard/details_abhishekchohan__mistral-7B-forest-merge-v0.1 | ---
pretty_name: Evaluation run of abhishekchohan/mistral-7B-forest-merge-v0.1
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [abhishekchohan/mistral-7B-forest-merge-v0.1](https://huggingface.co/abhishekchohan/mistral-7B-forest-merge-v0.1)\
\ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\
\nThe dataset is composed of 63 configuration, each one coresponding to one of the\
\ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\
\ found as a specific split in each configuration, the split being named using the\
\ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\
\nAn additional configuration \"results\" store all the aggregated results of the\
\ run (and is used to compute and display the aggregated metrics on the [Open LLM\
\ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\
\nTo load the details from a run, you can for instance do the following:\n```python\n\
from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_abhishekchohan__mistral-7B-forest-merge-v0.1\"\
,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\
These are the [latest results from run 2024-01-22T17:22:14.145358](https://huggingface.co/datasets/open-llm-leaderboard/details_abhishekchohan__mistral-7B-forest-merge-v0.1/blob/main/results_2024-01-22T17-22-14.145358.json)(note\
\ that their might be results for other tasks in the repos if successive evals didn't\
\ cover the same tasks. You find each in the results and the \"latest\" split for\
\ each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6025264269069641,\n\
\ \"acc_stderr\": 0.032921649449251675,\n \"acc_norm\": 0.6050419928736916,\n\
\ \"acc_norm_stderr\": 0.033582448395703776,\n \"mc1\": 0.423500611995104,\n\
\ \"mc1_stderr\": 0.01729742144853473,\n \"mc2\": 0.5852690107055646,\n\
\ \"mc2_stderr\": 0.01561479793889522\n },\n \"harness|arc:challenge|25\"\
: {\n \"acc\": 0.6040955631399317,\n \"acc_stderr\": 0.014291228393536588,\n\
\ \"acc_norm\": 0.6279863481228669,\n \"acc_norm_stderr\": 0.014124597881844461\n\
\ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6521609241187014,\n\
\ \"acc_stderr\": 0.0047531124327286995,\n \"acc_norm\": 0.8431587333200558,\n\
\ \"acc_norm_stderr\": 0.0036290784658809796\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\
: {\n \"acc\": 0.29,\n \"acc_stderr\": 0.045604802157206845,\n \
\ \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.045604802157206845\n \
\ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.5703703703703704,\n\
\ \"acc_stderr\": 0.042763494943765995,\n \"acc_norm\": 0.5703703703703704,\n\
\ \"acc_norm_stderr\": 0.042763494943765995\n },\n \"harness|hendrycksTest-astronomy|5\"\
: {\n \"acc\": 0.6513157894736842,\n \"acc_stderr\": 0.0387813988879761,\n\
\ \"acc_norm\": 0.6513157894736842,\n \"acc_norm_stderr\": 0.0387813988879761\n\
\ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.62,\n\
\ \"acc_stderr\": 0.04878317312145632,\n \"acc_norm\": 0.62,\n \
\ \"acc_norm_stderr\": 0.04878317312145632\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\
: {\n \"acc\": 0.6679245283018868,\n \"acc_stderr\": 0.02898545565233439,\n\
\ \"acc_norm\": 0.6679245283018868,\n \"acc_norm_stderr\": 0.02898545565233439\n\
\ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.6736111111111112,\n\
\ \"acc_stderr\": 0.03921067198982266,\n \"acc_norm\": 0.6736111111111112,\n\
\ \"acc_norm_stderr\": 0.03921067198982266\n },\n \"harness|hendrycksTest-college_chemistry|5\"\
: {\n \"acc\": 0.41,\n \"acc_stderr\": 0.04943110704237102,\n \
\ \"acc_norm\": 0.41,\n \"acc_norm_stderr\": 0.04943110704237102\n \
\ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\"\
: 0.47,\n \"acc_stderr\": 0.05016135580465919,\n \"acc_norm\": 0.47,\n\
\ \"acc_norm_stderr\": 0.05016135580465919\n },\n \"harness|hendrycksTest-college_mathematics|5\"\
: {\n \"acc\": 0.29,\n \"acc_stderr\": 0.045604802157206845,\n \
\ \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.045604802157206845\n \
\ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6069364161849711,\n\
\ \"acc_stderr\": 0.0372424959581773,\n \"acc_norm\": 0.6069364161849711,\n\
\ \"acc_norm_stderr\": 0.0372424959581773\n },\n \"harness|hendrycksTest-college_physics|5\"\
: {\n \"acc\": 0.3431372549019608,\n \"acc_stderr\": 0.04724007352383888,\n\
\ \"acc_norm\": 0.3431372549019608,\n \"acc_norm_stderr\": 0.04724007352383888\n\
\ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\
\ 0.77,\n \"acc_stderr\": 0.04229525846816507,\n \"acc_norm\": 0.77,\n\
\ \"acc_norm_stderr\": 0.04229525846816507\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\
: {\n \"acc\": 0.5319148936170213,\n \"acc_stderr\": 0.03261936918467382,\n\
\ \"acc_norm\": 0.5319148936170213,\n \"acc_norm_stderr\": 0.03261936918467382\n\
\ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.38596491228070173,\n\
\ \"acc_stderr\": 0.04579639422070434,\n \"acc_norm\": 0.38596491228070173,\n\
\ \"acc_norm_stderr\": 0.04579639422070434\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\
: {\n \"acc\": 0.5103448275862069,\n \"acc_stderr\": 0.04165774775728763,\n\
\ \"acc_norm\": 0.5103448275862069,\n \"acc_norm_stderr\": 0.04165774775728763\n\
\ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\
: 0.4126984126984127,\n \"acc_stderr\": 0.02535574126305527,\n \"\
acc_norm\": 0.4126984126984127,\n \"acc_norm_stderr\": 0.02535574126305527\n\
\ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.42857142857142855,\n\
\ \"acc_stderr\": 0.04426266681379909,\n \"acc_norm\": 0.42857142857142855,\n\
\ \"acc_norm_stderr\": 0.04426266681379909\n },\n \"harness|hendrycksTest-global_facts|5\"\
: {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695235,\n \
\ \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695235\n \
\ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7193548387096774,\n\
\ \"acc_stderr\": 0.0255606047210229,\n \"acc_norm\": 0.7193548387096774,\n\
\ \"acc_norm_stderr\": 0.0255606047210229\n },\n \"harness|hendrycksTest-high_school_chemistry|5\"\
: {\n \"acc\": 0.41379310344827586,\n \"acc_stderr\": 0.03465304488406795,\n\
\ \"acc_norm\": 0.41379310344827586,\n \"acc_norm_stderr\": 0.03465304488406795\n\
\ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \
\ \"acc\": 0.68,\n \"acc_stderr\": 0.04688261722621505,\n \"acc_norm\"\
: 0.68,\n \"acc_norm_stderr\": 0.04688261722621505\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\
: {\n \"acc\": 0.7393939393939394,\n \"acc_stderr\": 0.034277431758165236,\n\
\ \"acc_norm\": 0.7393939393939394,\n \"acc_norm_stderr\": 0.034277431758165236\n\
\ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\
: 0.7272727272727273,\n \"acc_stderr\": 0.03173071239071724,\n \"\
acc_norm\": 0.7272727272727273,\n \"acc_norm_stderr\": 0.03173071239071724\n\
\ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\
\ \"acc\": 0.8134715025906736,\n \"acc_stderr\": 0.02811209121011747,\n\
\ \"acc_norm\": 0.8134715025906736,\n \"acc_norm_stderr\": 0.02811209121011747\n\
\ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \
\ \"acc\": 0.558974358974359,\n \"acc_stderr\": 0.02517404838400074,\n \
\ \"acc_norm\": 0.558974358974359,\n \"acc_norm_stderr\": 0.02517404838400074\n\
\ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\
acc\": 0.3,\n \"acc_stderr\": 0.02794045713622841,\n \"acc_norm\"\
: 0.3,\n \"acc_norm_stderr\": 0.02794045713622841\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\"\
: {\n \"acc\": 0.5798319327731093,\n \"acc_stderr\": 0.03206183783236152,\n\
\ \"acc_norm\": 0.5798319327731093,\n \"acc_norm_stderr\": 0.03206183783236152\n\
\ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\
: 0.3576158940397351,\n \"acc_stderr\": 0.03913453431177258,\n \"\
acc_norm\": 0.3576158940397351,\n \"acc_norm_stderr\": 0.03913453431177258\n\
\ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\
: 0.7798165137614679,\n \"acc_stderr\": 0.017765978652327537,\n \"\
acc_norm\": 0.7798165137614679,\n \"acc_norm_stderr\": 0.017765978652327537\n\
\ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\
: 0.4351851851851852,\n \"acc_stderr\": 0.03381200005643525,\n \"\
acc_norm\": 0.4351851851851852,\n \"acc_norm_stderr\": 0.03381200005643525\n\
\ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\
: 0.803921568627451,\n \"acc_stderr\": 0.027865942286639325,\n \"\
acc_norm\": 0.803921568627451,\n \"acc_norm_stderr\": 0.027865942286639325\n\
\ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\
acc\": 0.7932489451476793,\n \"acc_stderr\": 0.0263616516683891,\n \
\ \"acc_norm\": 0.7932489451476793,\n \"acc_norm_stderr\": 0.0263616516683891\n\
\ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.672645739910314,\n\
\ \"acc_stderr\": 0.031493846709941306,\n \"acc_norm\": 0.672645739910314,\n\
\ \"acc_norm_stderr\": 0.031493846709941306\n },\n \"harness|hendrycksTest-human_sexuality|5\"\
: {\n \"acc\": 0.732824427480916,\n \"acc_stderr\": 0.03880848301082394,\n\
\ \"acc_norm\": 0.732824427480916,\n \"acc_norm_stderr\": 0.03880848301082394\n\
\ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\
\ 0.7851239669421488,\n \"acc_stderr\": 0.037494924487096966,\n \"\
acc_norm\": 0.7851239669421488,\n \"acc_norm_stderr\": 0.037494924487096966\n\
\ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.8055555555555556,\n\
\ \"acc_stderr\": 0.038260763248848646,\n \"acc_norm\": 0.8055555555555556,\n\
\ \"acc_norm_stderr\": 0.038260763248848646\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\
: {\n \"acc\": 0.6748466257668712,\n \"acc_stderr\": 0.03680350371286461,\n\
\ \"acc_norm\": 0.6748466257668712,\n \"acc_norm_stderr\": 0.03680350371286461\n\
\ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.41964285714285715,\n\
\ \"acc_stderr\": 0.04684099321077106,\n \"acc_norm\": 0.41964285714285715,\n\
\ \"acc_norm_stderr\": 0.04684099321077106\n },\n \"harness|hendrycksTest-management|5\"\
: {\n \"acc\": 0.7864077669902912,\n \"acc_stderr\": 0.04058042015646034,\n\
\ \"acc_norm\": 0.7864077669902912,\n \"acc_norm_stderr\": 0.04058042015646034\n\
\ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8589743589743589,\n\
\ \"acc_stderr\": 0.02280138253459754,\n \"acc_norm\": 0.8589743589743589,\n\
\ \"acc_norm_stderr\": 0.02280138253459754\n },\n \"harness|hendrycksTest-medical_genetics|5\"\
: {\n \"acc\": 0.64,\n \"acc_stderr\": 0.048241815132442176,\n \
\ \"acc_norm\": 0.64,\n \"acc_norm_stderr\": 0.048241815132442176\n \
\ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7854406130268199,\n\
\ \"acc_stderr\": 0.014680033956893346,\n \"acc_norm\": 0.7854406130268199,\n\
\ \"acc_norm_stderr\": 0.014680033956893346\n },\n \"harness|hendrycksTest-moral_disputes|5\"\
: {\n \"acc\": 0.6560693641618497,\n \"acc_stderr\": 0.025574123786546665,\n\
\ \"acc_norm\": 0.6560693641618497,\n \"acc_norm_stderr\": 0.025574123786546665\n\
\ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.3653631284916201,\n\
\ \"acc_stderr\": 0.016104833880142295,\n \"acc_norm\": 0.3653631284916201,\n\
\ \"acc_norm_stderr\": 0.016104833880142295\n },\n \"harness|hendrycksTest-nutrition|5\"\
: {\n \"acc\": 0.6405228758169934,\n \"acc_stderr\": 0.027475969910660952,\n\
\ \"acc_norm\": 0.6405228758169934,\n \"acc_norm_stderr\": 0.027475969910660952\n\
\ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7009646302250804,\n\
\ \"acc_stderr\": 0.02600330111788514,\n \"acc_norm\": 0.7009646302250804,\n\
\ \"acc_norm_stderr\": 0.02600330111788514\n },\n \"harness|hendrycksTest-prehistory|5\"\
: {\n \"acc\": 0.6851851851851852,\n \"acc_stderr\": 0.025842248700902168,\n\
\ \"acc_norm\": 0.6851851851851852,\n \"acc_norm_stderr\": 0.025842248700902168\n\
\ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\
acc\": 0.4326241134751773,\n \"acc_stderr\": 0.02955545423677885,\n \
\ \"acc_norm\": 0.4326241134751773,\n \"acc_norm_stderr\": 0.02955545423677885\n\
\ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.43741851368970014,\n\
\ \"acc_stderr\": 0.012669813464935726,\n \"acc_norm\": 0.43741851368970014,\n\
\ \"acc_norm_stderr\": 0.012669813464935726\n },\n \"harness|hendrycksTest-professional_medicine|5\"\
: {\n \"acc\": 0.6176470588235294,\n \"acc_stderr\": 0.02952009569768776,\n\
\ \"acc_norm\": 0.6176470588235294,\n \"acc_norm_stderr\": 0.02952009569768776\n\
\ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\
acc\": 0.6143790849673203,\n \"acc_stderr\": 0.019691459052354025,\n \
\ \"acc_norm\": 0.6143790849673203,\n \"acc_norm_stderr\": 0.019691459052354025\n\
\ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6545454545454545,\n\
\ \"acc_stderr\": 0.04554619617541054,\n \"acc_norm\": 0.6545454545454545,\n\
\ \"acc_norm_stderr\": 0.04554619617541054\n },\n \"harness|hendrycksTest-security_studies|5\"\
: {\n \"acc\": 0.673469387755102,\n \"acc_stderr\": 0.03002105623844031,\n\
\ \"acc_norm\": 0.673469387755102,\n \"acc_norm_stderr\": 0.03002105623844031\n\
\ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.7611940298507462,\n\
\ \"acc_stderr\": 0.030147775935409217,\n \"acc_norm\": 0.7611940298507462,\n\
\ \"acc_norm_stderr\": 0.030147775935409217\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\
: {\n \"acc\": 0.85,\n \"acc_stderr\": 0.03588702812826371,\n \
\ \"acc_norm\": 0.85,\n \"acc_norm_stderr\": 0.03588702812826371\n \
\ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.4879518072289157,\n\
\ \"acc_stderr\": 0.03891364495835821,\n \"acc_norm\": 0.4879518072289157,\n\
\ \"acc_norm_stderr\": 0.03891364495835821\n },\n \"harness|hendrycksTest-world_religions|5\"\
: {\n \"acc\": 0.8128654970760234,\n \"acc_stderr\": 0.029913127232368036,\n\
\ \"acc_norm\": 0.8128654970760234,\n \"acc_norm_stderr\": 0.029913127232368036\n\
\ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.423500611995104,\n\
\ \"mc1_stderr\": 0.01729742144853473,\n \"mc2\": 0.5852690107055646,\n\
\ \"mc2_stderr\": 0.01561479793889522\n },\n \"harness|winogrande|5\"\
: {\n \"acc\": 0.7719021310181531,\n \"acc_stderr\": 0.011793015817663597\n\
\ },\n \"harness|gsm8k|5\": {\n \"acc\": 0.49962092494313876,\n \
\ \"acc_stderr\": 0.013772480761626167\n }\n}\n```"
repo_url: https://huggingface.co/abhishekchohan/mistral-7B-forest-merge-v0.1
leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard
point_of_contact: clementine@hf.co
configs:
- config_name: harness_arc_challenge_25
data_files:
- split: 2024_01_22T17_22_14.145358
path:
- '**/details_harness|arc:challenge|25_2024-01-22T17-22-14.145358.parquet'
- split: latest
path:
- '**/details_harness|arc:challenge|25_2024-01-22T17-22-14.145358.parquet'
- config_name: harness_gsm8k_5
data_files:
- split: 2024_01_22T17_22_14.145358
path:
- '**/details_harness|gsm8k|5_2024-01-22T17-22-14.145358.parquet'
- split: latest
path:
- '**/details_harness|gsm8k|5_2024-01-22T17-22-14.145358.parquet'
- config_name: harness_hellaswag_10
data_files:
- split: 2024_01_22T17_22_14.145358
path:
- '**/details_harness|hellaswag|10_2024-01-22T17-22-14.145358.parquet'
- split: latest
path:
- '**/details_harness|hellaswag|10_2024-01-22T17-22-14.145358.parquet'
- config_name: harness_hendrycksTest_5
data_files:
- split: 2024_01_22T17_22_14.145358
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-management|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-virology|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2024-01-22T17-22-14.145358.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-management|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-virology|5_2024-01-22T17-22-14.145358.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2024-01-22T17-22-14.145358.parquet'
- config_name: harness_hendrycksTest_abstract_algebra_5
data_files:
- split: 2024_01_22T17_22_14.145358
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-22T17-22-14.145358.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-22T17-22-14.145358.parquet'
- config_name: harness_hendrycksTest_anatomy_5
data_files:
- split: 2024_01_22T17_22_14.145358
path:
- '**/details_harness|hendrycksTest-anatomy|5_2024-01-22T17-22-14.145358.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-anatomy|5_2024-01-22T17-22-14.145358.parquet'
- config_name: harness_hendrycksTest_astronomy_5
data_files:
- split: 2024_01_22T17_22_14.145358
path:
- '**/details_harness|hendrycksTest-astronomy|5_2024-01-22T17-22-14.145358.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-astronomy|5_2024-01-22T17-22-14.145358.parquet'
- config_name: harness_hendrycksTest_business_ethics_5
data_files:
- split: 2024_01_22T17_22_14.145358
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2024-01-22T17-22-14.145358.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2024-01-22T17-22-14.145358.parquet'
- config_name: harness_hendrycksTest_clinical_knowledge_5
data_files:
- split: 2024_01_22T17_22_14.145358
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-22T17-22-14.145358.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-22T17-22-14.145358.parquet'
- config_name: harness_hendrycksTest_college_biology_5
data_files:
- split: 2024_01_22T17_22_14.145358
path:
- '**/details_harness|hendrycksTest-college_biology|5_2024-01-22T17-22-14.145358.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_biology|5_2024-01-22T17-22-14.145358.parquet'
- config_name: harness_hendrycksTest_college_chemistry_5
data_files:
- split: 2024_01_22T17_22_14.145358
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-01-22T17-22-14.145358.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-01-22T17-22-14.145358.parquet'
- config_name: harness_hendrycksTest_college_computer_science_5
data_files:
- split: 2024_01_22T17_22_14.145358
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-01-22T17-22-14.145358.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-01-22T17-22-14.145358.parquet'
- config_name: harness_hendrycksTest_college_mathematics_5
data_files:
- split: 2024_01_22T17_22_14.145358
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-01-22T17-22-14.145358.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-01-22T17-22-14.145358.parquet'
- config_name: harness_hendrycksTest_college_medicine_5
data_files:
- split: 2024_01_22T17_22_14.145358
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2024-01-22T17-22-14.145358.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2024-01-22T17-22-14.145358.parquet'
- config_name: harness_hendrycksTest_college_physics_5
data_files:
- split: 2024_01_22T17_22_14.145358
path:
- '**/details_harness|hendrycksTest-college_physics|5_2024-01-22T17-22-14.145358.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_physics|5_2024-01-22T17-22-14.145358.parquet'
- config_name: harness_hendrycksTest_computer_security_5
data_files:
- split: 2024_01_22T17_22_14.145358
path:
- '**/details_harness|hendrycksTest-computer_security|5_2024-01-22T17-22-14.145358.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-computer_security|5_2024-01-22T17-22-14.145358.parquet'
- config_name: harness_hendrycksTest_conceptual_physics_5
data_files:
- split: 2024_01_22T17_22_14.145358
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-22T17-22-14.145358.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-22T17-22-14.145358.parquet'
- config_name: harness_hendrycksTest_econometrics_5
data_files:
- split: 2024_01_22T17_22_14.145358
path:
- '**/details_harness|hendrycksTest-econometrics|5_2024-01-22T17-22-14.145358.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-econometrics|5_2024-01-22T17-22-14.145358.parquet'
- config_name: harness_hendrycksTest_electrical_engineering_5
data_files:
- split: 2024_01_22T17_22_14.145358
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-22T17-22-14.145358.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-22T17-22-14.145358.parquet'
- config_name: harness_hendrycksTest_elementary_mathematics_5
data_files:
- split: 2024_01_22T17_22_14.145358
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-22T17-22-14.145358.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-22T17-22-14.145358.parquet'
- config_name: harness_hendrycksTest_formal_logic_5
data_files:
- split: 2024_01_22T17_22_14.145358
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2024-01-22T17-22-14.145358.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2024-01-22T17-22-14.145358.parquet'
- config_name: harness_hendrycksTest_global_facts_5
data_files:
- split: 2024_01_22T17_22_14.145358
path:
- '**/details_harness|hendrycksTest-global_facts|5_2024-01-22T17-22-14.145358.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-global_facts|5_2024-01-22T17-22-14.145358.parquet'
- config_name: harness_hendrycksTest_high_school_biology_5
data_files:
- split: 2024_01_22T17_22_14.145358
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-01-22T17-22-14.145358.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-01-22T17-22-14.145358.parquet'
- config_name: harness_hendrycksTest_high_school_chemistry_5
data_files:
- split: 2024_01_22T17_22_14.145358
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-22T17-22-14.145358.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-22T17-22-14.145358.parquet'
- config_name: harness_hendrycksTest_high_school_computer_science_5
data_files:
- split: 2024_01_22T17_22_14.145358
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-22T17-22-14.145358.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-22T17-22-14.145358.parquet'
- config_name: harness_hendrycksTest_high_school_european_history_5
data_files:
- split: 2024_01_22T17_22_14.145358
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-22T17-22-14.145358.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-22T17-22-14.145358.parquet'
- config_name: harness_hendrycksTest_high_school_geography_5
data_files:
- split: 2024_01_22T17_22_14.145358
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-01-22T17-22-14.145358.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-01-22T17-22-14.145358.parquet'
- config_name: harness_hendrycksTest_high_school_government_and_politics_5
data_files:
- split: 2024_01_22T17_22_14.145358
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-22T17-22-14.145358.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-22T17-22-14.145358.parquet'
- config_name: harness_hendrycksTest_high_school_macroeconomics_5
data_files:
- split: 2024_01_22T17_22_14.145358
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-22T17-22-14.145358.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-22T17-22-14.145358.parquet'
- config_name: harness_hendrycksTest_high_school_mathematics_5
data_files:
- split: 2024_01_22T17_22_14.145358
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-22T17-22-14.145358.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-22T17-22-14.145358.parquet'
- config_name: harness_hendrycksTest_high_school_microeconomics_5
data_files:
- split: 2024_01_22T17_22_14.145358
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-22T17-22-14.145358.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-22T17-22-14.145358.parquet'
- config_name: harness_hendrycksTest_high_school_physics_5
data_files:
- split: 2024_01_22T17_22_14.145358
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-01-22T17-22-14.145358.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-01-22T17-22-14.145358.parquet'
- config_name: harness_hendrycksTest_high_school_psychology_5
data_files:
- split: 2024_01_22T17_22_14.145358
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-22T17-22-14.145358.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-22T17-22-14.145358.parquet'
- config_name: harness_hendrycksTest_high_school_statistics_5
data_files:
- split: 2024_01_22T17_22_14.145358
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-22T17-22-14.145358.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-22T17-22-14.145358.parquet'
- config_name: harness_hendrycksTest_high_school_us_history_5
data_files:
- split: 2024_01_22T17_22_14.145358
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-22T17-22-14.145358.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-22T17-22-14.145358.parquet'
- config_name: harness_hendrycksTest_high_school_world_history_5
data_files:
- split: 2024_01_22T17_22_14.145358
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-22T17-22-14.145358.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-22T17-22-14.145358.parquet'
- config_name: harness_hendrycksTest_human_aging_5
data_files:
- split: 2024_01_22T17_22_14.145358
path:
- '**/details_harness|hendrycksTest-human_aging|5_2024-01-22T17-22-14.145358.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_aging|5_2024-01-22T17-22-14.145358.parquet'
- config_name: harness_hendrycksTest_human_sexuality_5
data_files:
- split: 2024_01_22T17_22_14.145358
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-01-22T17-22-14.145358.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-01-22T17-22-14.145358.parquet'
- config_name: harness_hendrycksTest_international_law_5
data_files:
- split: 2024_01_22T17_22_14.145358
path:
- '**/details_harness|hendrycksTest-international_law|5_2024-01-22T17-22-14.145358.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-international_law|5_2024-01-22T17-22-14.145358.parquet'
- config_name: harness_hendrycksTest_jurisprudence_5
data_files:
- split: 2024_01_22T17_22_14.145358
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-01-22T17-22-14.145358.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-01-22T17-22-14.145358.parquet'
- config_name: harness_hendrycksTest_logical_fallacies_5
data_files:
- split: 2024_01_22T17_22_14.145358
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-22T17-22-14.145358.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-22T17-22-14.145358.parquet'
- config_name: harness_hendrycksTest_machine_learning_5
data_files:
- split: 2024_01_22T17_22_14.145358
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2024-01-22T17-22-14.145358.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2024-01-22T17-22-14.145358.parquet'
- config_name: harness_hendrycksTest_management_5
data_files:
- split: 2024_01_22T17_22_14.145358
path:
- '**/details_harness|hendrycksTest-management|5_2024-01-22T17-22-14.145358.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-management|5_2024-01-22T17-22-14.145358.parquet'
- config_name: harness_hendrycksTest_marketing_5
data_files:
- split: 2024_01_22T17_22_14.145358
path:
- '**/details_harness|hendrycksTest-marketing|5_2024-01-22T17-22-14.145358.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-marketing|5_2024-01-22T17-22-14.145358.parquet'
- config_name: harness_hendrycksTest_medical_genetics_5
data_files:
- split: 2024_01_22T17_22_14.145358
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-01-22T17-22-14.145358.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-01-22T17-22-14.145358.parquet'
- config_name: harness_hendrycksTest_miscellaneous_5
data_files:
- split: 2024_01_22T17_22_14.145358
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-01-22T17-22-14.145358.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-01-22T17-22-14.145358.parquet'
- config_name: harness_hendrycksTest_moral_disputes_5
data_files:
- split: 2024_01_22T17_22_14.145358
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-01-22T17-22-14.145358.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-01-22T17-22-14.145358.parquet'
- config_name: harness_hendrycksTest_moral_scenarios_5
data_files:
- split: 2024_01_22T17_22_14.145358
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-22T17-22-14.145358.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-22T17-22-14.145358.parquet'
- config_name: harness_hendrycksTest_nutrition_5
data_files:
- split: 2024_01_22T17_22_14.145358
path:
- '**/details_harness|hendrycksTest-nutrition|5_2024-01-22T17-22-14.145358.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-nutrition|5_2024-01-22T17-22-14.145358.parquet'
- config_name: harness_hendrycksTest_philosophy_5
data_files:
- split: 2024_01_22T17_22_14.145358
path:
- '**/details_harness|hendrycksTest-philosophy|5_2024-01-22T17-22-14.145358.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-philosophy|5_2024-01-22T17-22-14.145358.parquet'
- config_name: harness_hendrycksTest_prehistory_5
data_files:
- split: 2024_01_22T17_22_14.145358
path:
- '**/details_harness|hendrycksTest-prehistory|5_2024-01-22T17-22-14.145358.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-prehistory|5_2024-01-22T17-22-14.145358.parquet'
- config_name: harness_hendrycksTest_professional_accounting_5
data_files:
- split: 2024_01_22T17_22_14.145358
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-01-22T17-22-14.145358.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-01-22T17-22-14.145358.parquet'
- config_name: harness_hendrycksTest_professional_law_5
data_files:
- split: 2024_01_22T17_22_14.145358
path:
- '**/details_harness|hendrycksTest-professional_law|5_2024-01-22T17-22-14.145358.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_law|5_2024-01-22T17-22-14.145358.parquet'
- config_name: harness_hendrycksTest_professional_medicine_5
data_files:
- split: 2024_01_22T17_22_14.145358
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-01-22T17-22-14.145358.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-01-22T17-22-14.145358.parquet'
- config_name: harness_hendrycksTest_professional_psychology_5
data_files:
- split: 2024_01_22T17_22_14.145358
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-01-22T17-22-14.145358.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-01-22T17-22-14.145358.parquet'
- config_name: harness_hendrycksTest_public_relations_5
data_files:
- split: 2024_01_22T17_22_14.145358
path:
- '**/details_harness|hendrycksTest-public_relations|5_2024-01-22T17-22-14.145358.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-public_relations|5_2024-01-22T17-22-14.145358.parquet'
- config_name: harness_hendrycksTest_security_studies_5
data_files:
- split: 2024_01_22T17_22_14.145358
path:
- '**/details_harness|hendrycksTest-security_studies|5_2024-01-22T17-22-14.145358.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-security_studies|5_2024-01-22T17-22-14.145358.parquet'
- config_name: harness_hendrycksTest_sociology_5
data_files:
- split: 2024_01_22T17_22_14.145358
path:
- '**/details_harness|hendrycksTest-sociology|5_2024-01-22T17-22-14.145358.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-sociology|5_2024-01-22T17-22-14.145358.parquet'
- config_name: harness_hendrycksTest_us_foreign_policy_5
data_files:
- split: 2024_01_22T17_22_14.145358
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-22T17-22-14.145358.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-22T17-22-14.145358.parquet'
- config_name: harness_hendrycksTest_virology_5
data_files:
- split: 2024_01_22T17_22_14.145358
path:
- '**/details_harness|hendrycksTest-virology|5_2024-01-22T17-22-14.145358.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-virology|5_2024-01-22T17-22-14.145358.parquet'
- config_name: harness_hendrycksTest_world_religions_5
data_files:
- split: 2024_01_22T17_22_14.145358
path:
- '**/details_harness|hendrycksTest-world_religions|5_2024-01-22T17-22-14.145358.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-world_religions|5_2024-01-22T17-22-14.145358.parquet'
- config_name: harness_truthfulqa_mc_0
data_files:
- split: 2024_01_22T17_22_14.145358
path:
- '**/details_harness|truthfulqa:mc|0_2024-01-22T17-22-14.145358.parquet'
- split: latest
path:
- '**/details_harness|truthfulqa:mc|0_2024-01-22T17-22-14.145358.parquet'
- config_name: harness_winogrande_5
data_files:
- split: 2024_01_22T17_22_14.145358
path:
- '**/details_harness|winogrande|5_2024-01-22T17-22-14.145358.parquet'
- split: latest
path:
- '**/details_harness|winogrande|5_2024-01-22T17-22-14.145358.parquet'
- config_name: results
data_files:
- split: 2024_01_22T17_22_14.145358
path:
- results_2024-01-22T17-22-14.145358.parquet
- split: latest
path:
- results_2024-01-22T17-22-14.145358.parquet
---
# Dataset Card for Evaluation run of abhishekchohan/mistral-7B-forest-merge-v0.1
<!-- Provide a quick summary of the dataset. -->
Dataset automatically created during the evaluation run of model [abhishekchohan/mistral-7B-forest-merge-v0.1](https://huggingface.co/abhishekchohan/mistral-7B-forest-merge-v0.1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).
The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset("open-llm-leaderboard/details_abhishekchohan__mistral-7B-forest-merge-v0.1",
"harness_winogrande_5",
split="train")
```
## Latest results
These are the [latest results from run 2024-01-22T17:22:14.145358](https://huggingface.co/datasets/open-llm-leaderboard/details_abhishekchohan__mistral-7B-forest-merge-v0.1/blob/main/results_2024-01-22T17-22-14.145358.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"acc": 0.6025264269069641,
"acc_stderr": 0.032921649449251675,
"acc_norm": 0.6050419928736916,
"acc_norm_stderr": 0.033582448395703776,
"mc1": 0.423500611995104,
"mc1_stderr": 0.01729742144853473,
"mc2": 0.5852690107055646,
"mc2_stderr": 0.01561479793889522
},
"harness|arc:challenge|25": {
"acc": 0.6040955631399317,
"acc_stderr": 0.014291228393536588,
"acc_norm": 0.6279863481228669,
"acc_norm_stderr": 0.014124597881844461
},
"harness|hellaswag|10": {
"acc": 0.6521609241187014,
"acc_stderr": 0.0047531124327286995,
"acc_norm": 0.8431587333200558,
"acc_norm_stderr": 0.0036290784658809796
},
"harness|hendrycksTest-abstract_algebra|5": {
"acc": 0.29,
"acc_stderr": 0.045604802157206845,
"acc_norm": 0.29,
"acc_norm_stderr": 0.045604802157206845
},
"harness|hendrycksTest-anatomy|5": {
"acc": 0.5703703703703704,
"acc_stderr": 0.042763494943765995,
"acc_norm": 0.5703703703703704,
"acc_norm_stderr": 0.042763494943765995
},
"harness|hendrycksTest-astronomy|5": {
"acc": 0.6513157894736842,
"acc_stderr": 0.0387813988879761,
"acc_norm": 0.6513157894736842,
"acc_norm_stderr": 0.0387813988879761
},
"harness|hendrycksTest-business_ethics|5": {
"acc": 0.62,
"acc_stderr": 0.04878317312145632,
"acc_norm": 0.62,
"acc_norm_stderr": 0.04878317312145632
},
"harness|hendrycksTest-clinical_knowledge|5": {
"acc": 0.6679245283018868,
"acc_stderr": 0.02898545565233439,
"acc_norm": 0.6679245283018868,
"acc_norm_stderr": 0.02898545565233439
},
"harness|hendrycksTest-college_biology|5": {
"acc": 0.6736111111111112,
"acc_stderr": 0.03921067198982266,
"acc_norm": 0.6736111111111112,
"acc_norm_stderr": 0.03921067198982266
},
"harness|hendrycksTest-college_chemistry|5": {
"acc": 0.41,
"acc_stderr": 0.04943110704237102,
"acc_norm": 0.41,
"acc_norm_stderr": 0.04943110704237102
},
"harness|hendrycksTest-college_computer_science|5": {
"acc": 0.47,
"acc_stderr": 0.05016135580465919,
"acc_norm": 0.47,
"acc_norm_stderr": 0.05016135580465919
},
"harness|hendrycksTest-college_mathematics|5": {
"acc": 0.29,
"acc_stderr": 0.045604802157206845,
"acc_norm": 0.29,
"acc_norm_stderr": 0.045604802157206845
},
"harness|hendrycksTest-college_medicine|5": {
"acc": 0.6069364161849711,
"acc_stderr": 0.0372424959581773,
"acc_norm": 0.6069364161849711,
"acc_norm_stderr": 0.0372424959581773
},
"harness|hendrycksTest-college_physics|5": {
"acc": 0.3431372549019608,
"acc_stderr": 0.04724007352383888,
"acc_norm": 0.3431372549019608,
"acc_norm_stderr": 0.04724007352383888
},
"harness|hendrycksTest-computer_security|5": {
"acc": 0.77,
"acc_stderr": 0.04229525846816507,
"acc_norm": 0.77,
"acc_norm_stderr": 0.04229525846816507
},
"harness|hendrycksTest-conceptual_physics|5": {
"acc": 0.5319148936170213,
"acc_stderr": 0.03261936918467382,
"acc_norm": 0.5319148936170213,
"acc_norm_stderr": 0.03261936918467382
},
"harness|hendrycksTest-econometrics|5": {
"acc": 0.38596491228070173,
"acc_stderr": 0.04579639422070434,
"acc_norm": 0.38596491228070173,
"acc_norm_stderr": 0.04579639422070434
},
"harness|hendrycksTest-electrical_engineering|5": {
"acc": 0.5103448275862069,
"acc_stderr": 0.04165774775728763,
"acc_norm": 0.5103448275862069,
"acc_norm_stderr": 0.04165774775728763
},
"harness|hendrycksTest-elementary_mathematics|5": {
"acc": 0.4126984126984127,
"acc_stderr": 0.02535574126305527,
"acc_norm": 0.4126984126984127,
"acc_norm_stderr": 0.02535574126305527
},
"harness|hendrycksTest-formal_logic|5": {
"acc": 0.42857142857142855,
"acc_stderr": 0.04426266681379909,
"acc_norm": 0.42857142857142855,
"acc_norm_stderr": 0.04426266681379909
},
"harness|hendrycksTest-global_facts|5": {
"acc": 0.34,
"acc_stderr": 0.04760952285695235,
"acc_norm": 0.34,
"acc_norm_stderr": 0.04760952285695235
},
"harness|hendrycksTest-high_school_biology|5": {
"acc": 0.7193548387096774,
"acc_stderr": 0.0255606047210229,
"acc_norm": 0.7193548387096774,
"acc_norm_stderr": 0.0255606047210229
},
"harness|hendrycksTest-high_school_chemistry|5": {
"acc": 0.41379310344827586,
"acc_stderr": 0.03465304488406795,
"acc_norm": 0.41379310344827586,
"acc_norm_stderr": 0.03465304488406795
},
"harness|hendrycksTest-high_school_computer_science|5": {
"acc": 0.68,
"acc_stderr": 0.04688261722621505,
"acc_norm": 0.68,
"acc_norm_stderr": 0.04688261722621505
},
"harness|hendrycksTest-high_school_european_history|5": {
"acc": 0.7393939393939394,
"acc_stderr": 0.034277431758165236,
"acc_norm": 0.7393939393939394,
"acc_norm_stderr": 0.034277431758165236
},
"harness|hendrycksTest-high_school_geography|5": {
"acc": 0.7272727272727273,
"acc_stderr": 0.03173071239071724,
"acc_norm": 0.7272727272727273,
"acc_norm_stderr": 0.03173071239071724
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"acc": 0.8134715025906736,
"acc_stderr": 0.02811209121011747,
"acc_norm": 0.8134715025906736,
"acc_norm_stderr": 0.02811209121011747
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"acc": 0.558974358974359,
"acc_stderr": 0.02517404838400074,
"acc_norm": 0.558974358974359,
"acc_norm_stderr": 0.02517404838400074
},
"harness|hendrycksTest-high_school_mathematics|5": {
"acc": 0.3,
"acc_stderr": 0.02794045713622841,
"acc_norm": 0.3,
"acc_norm_stderr": 0.02794045713622841
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"acc": 0.5798319327731093,
"acc_stderr": 0.03206183783236152,
"acc_norm": 0.5798319327731093,
"acc_norm_stderr": 0.03206183783236152
},
"harness|hendrycksTest-high_school_physics|5": {
"acc": 0.3576158940397351,
"acc_stderr": 0.03913453431177258,
"acc_norm": 0.3576158940397351,
"acc_norm_stderr": 0.03913453431177258
},
"harness|hendrycksTest-high_school_psychology|5": {
"acc": 0.7798165137614679,
"acc_stderr": 0.017765978652327537,
"acc_norm": 0.7798165137614679,
"acc_norm_stderr": 0.017765978652327537
},
"harness|hendrycksTest-high_school_statistics|5": {
"acc": 0.4351851851851852,
"acc_stderr": 0.03381200005643525,
"acc_norm": 0.4351851851851852,
"acc_norm_stderr": 0.03381200005643525
},
"harness|hendrycksTest-high_school_us_history|5": {
"acc": 0.803921568627451,
"acc_stderr": 0.027865942286639325,
"acc_norm": 0.803921568627451,
"acc_norm_stderr": 0.027865942286639325
},
"harness|hendrycksTest-high_school_world_history|5": {
"acc": 0.7932489451476793,
"acc_stderr": 0.0263616516683891,
"acc_norm": 0.7932489451476793,
"acc_norm_stderr": 0.0263616516683891
},
"harness|hendrycksTest-human_aging|5": {
"acc": 0.672645739910314,
"acc_stderr": 0.031493846709941306,
"acc_norm": 0.672645739910314,
"acc_norm_stderr": 0.031493846709941306
},
"harness|hendrycksTest-human_sexuality|5": {
"acc": 0.732824427480916,
"acc_stderr": 0.03880848301082394,
"acc_norm": 0.732824427480916,
"acc_norm_stderr": 0.03880848301082394
},
"harness|hendrycksTest-international_law|5": {
"acc": 0.7851239669421488,
"acc_stderr": 0.037494924487096966,
"acc_norm": 0.7851239669421488,
"acc_norm_stderr": 0.037494924487096966
},
"harness|hendrycksTest-jurisprudence|5": {
"acc": 0.8055555555555556,
"acc_stderr": 0.038260763248848646,
"acc_norm": 0.8055555555555556,
"acc_norm_stderr": 0.038260763248848646
},
"harness|hendrycksTest-logical_fallacies|5": {
"acc": 0.6748466257668712,
"acc_stderr": 0.03680350371286461,
"acc_norm": 0.6748466257668712,
"acc_norm_stderr": 0.03680350371286461
},
"harness|hendrycksTest-machine_learning|5": {
"acc": 0.41964285714285715,
"acc_stderr": 0.04684099321077106,
"acc_norm": 0.41964285714285715,
"acc_norm_stderr": 0.04684099321077106
},
"harness|hendrycksTest-management|5": {
"acc": 0.7864077669902912,
"acc_stderr": 0.04058042015646034,
"acc_norm": 0.7864077669902912,
"acc_norm_stderr": 0.04058042015646034
},
"harness|hendrycksTest-marketing|5": {
"acc": 0.8589743589743589,
"acc_stderr": 0.02280138253459754,
"acc_norm": 0.8589743589743589,
"acc_norm_stderr": 0.02280138253459754
},
"harness|hendrycksTest-medical_genetics|5": {
"acc": 0.64,
"acc_stderr": 0.048241815132442176,
"acc_norm": 0.64,
"acc_norm_stderr": 0.048241815132442176
},
"harness|hendrycksTest-miscellaneous|5": {
"acc": 0.7854406130268199,
"acc_stderr": 0.014680033956893346,
"acc_norm": 0.7854406130268199,
"acc_norm_stderr": 0.014680033956893346
},
"harness|hendrycksTest-moral_disputes|5": {
"acc": 0.6560693641618497,
"acc_stderr": 0.025574123786546665,
"acc_norm": 0.6560693641618497,
"acc_norm_stderr": 0.025574123786546665
},
"harness|hendrycksTest-moral_scenarios|5": {
"acc": 0.3653631284916201,
"acc_stderr": 0.016104833880142295,
"acc_norm": 0.3653631284916201,
"acc_norm_stderr": 0.016104833880142295
},
"harness|hendrycksTest-nutrition|5": {
"acc": 0.6405228758169934,
"acc_stderr": 0.027475969910660952,
"acc_norm": 0.6405228758169934,
"acc_norm_stderr": 0.027475969910660952
},
"harness|hendrycksTest-philosophy|5": {
"acc": 0.7009646302250804,
"acc_stderr": 0.02600330111788514,
"acc_norm": 0.7009646302250804,
"acc_norm_stderr": 0.02600330111788514
},
"harness|hendrycksTest-prehistory|5": {
"acc": 0.6851851851851852,
"acc_stderr": 0.025842248700902168,
"acc_norm": 0.6851851851851852,
"acc_norm_stderr": 0.025842248700902168
},
"harness|hendrycksTest-professional_accounting|5": {
"acc": 0.4326241134751773,
"acc_stderr": 0.02955545423677885,
"acc_norm": 0.4326241134751773,
"acc_norm_stderr": 0.02955545423677885
},
"harness|hendrycksTest-professional_law|5": {
"acc": 0.43741851368970014,
"acc_stderr": 0.012669813464935726,
"acc_norm": 0.43741851368970014,
"acc_norm_stderr": 0.012669813464935726
},
"harness|hendrycksTest-professional_medicine|5": {
"acc": 0.6176470588235294,
"acc_stderr": 0.02952009569768776,
"acc_norm": 0.6176470588235294,
"acc_norm_stderr": 0.02952009569768776
},
"harness|hendrycksTest-professional_psychology|5": {
"acc": 0.6143790849673203,
"acc_stderr": 0.019691459052354025,
"acc_norm": 0.6143790849673203,
"acc_norm_stderr": 0.019691459052354025
},
"harness|hendrycksTest-public_relations|5": {
"acc": 0.6545454545454545,
"acc_stderr": 0.04554619617541054,
"acc_norm": 0.6545454545454545,
"acc_norm_stderr": 0.04554619617541054
},
"harness|hendrycksTest-security_studies|5": {
"acc": 0.673469387755102,
"acc_stderr": 0.03002105623844031,
"acc_norm": 0.673469387755102,
"acc_norm_stderr": 0.03002105623844031
},
"harness|hendrycksTest-sociology|5": {
"acc": 0.7611940298507462,
"acc_stderr": 0.030147775935409217,
"acc_norm": 0.7611940298507462,
"acc_norm_stderr": 0.030147775935409217
},
"harness|hendrycksTest-us_foreign_policy|5": {
"acc": 0.85,
"acc_stderr": 0.03588702812826371,
"acc_norm": 0.85,
"acc_norm_stderr": 0.03588702812826371
},
"harness|hendrycksTest-virology|5": {
"acc": 0.4879518072289157,
"acc_stderr": 0.03891364495835821,
"acc_norm": 0.4879518072289157,
"acc_norm_stderr": 0.03891364495835821
},
"harness|hendrycksTest-world_religions|5": {
"acc": 0.8128654970760234,
"acc_stderr": 0.029913127232368036,
"acc_norm": 0.8128654970760234,
"acc_norm_stderr": 0.029913127232368036
},
"harness|truthfulqa:mc|0": {
"mc1": 0.423500611995104,
"mc1_stderr": 0.01729742144853473,
"mc2": 0.5852690107055646,
"mc2_stderr": 0.01561479793889522
},
"harness|winogrande|5": {
"acc": 0.7719021310181531,
"acc_stderr": 0.011793015817663597
},
"harness|gsm8k|5": {
"acc": 0.49962092494313876,
"acc_stderr": 0.013772480761626167
}
}
```
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] |
bastienp/visible-watermark-pita | ---
task_categories:
- object-detection
tags:
- watermak
- computer-vision
- object-detection
configs:
- config_name: default
data_files:
- split: train
path: "data/train.zip"
- split: test
path: "data/test.zip"
- split: val
path: "data/val.zip"
---
# Dataset Card for Dataset Name
<!-- Provide a quick summary of the dataset. -->
This dataset card aims to be a base template for new datasets. It has been generated using [this raw template](https://github.com/huggingface/huggingface_hub/blob/main/src/huggingface_hub/templates/datasetcard_template.md?plain=1).
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Licence:
Annotations & Website
The annotations in this dataset along with this website belong to the COCO Consortium and are licensed under a Creative Commons Attribution 4.0 License.
Images
The COCO Consortium does not own the copyright of the images. Use of the images must abide by the Flickr Terms of Use. The users of the images accept full responsibility for the use of the dataset, including but not limited to the use of any copies of copyrighted images that they may create from the dataset.
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed]
|
Yehor/ukrainian-tts-kateryna | ---
license: cc-by-nc-4.0
task_categories:
- text-to-speech
language:
- uk
---
# ๐บ๐ฆ Open Source Ukrainian Text-to-Speech dataset named Kateryna
Join Ukrainian community - https://t.me/speech_synthesis_uk
More details about this dataset - https://github.com/egorsmkv/ukrainian-tts-datasets/tree/main/kateryna
# Voice KATERYNA (female)
License (dual):
- For non-commerical applications: [CC-BY-NC](https://creativecommons.org/licenses/by-nc/2.0/)
- For commercial applications: contact the voice talent directly using https://t.me/shalenamotion
## Features
- Quality: high
- Duration: 2h40m
- Audio formats: OPUS
- Text format: JSONL (a `metadata.jsonl` file)
- Frequency: 48000 Hz
|
ISOBIM/GeometricCommand | ---
license: other
---
|
CyberHarem/shiranui_kantaicollection | ---
license: mit
task_categories:
- text-to-image
tags:
- art
- not-for-all-audiences
size_categories:
- n<1K
---
# Dataset of shiranui/ไธ็ฅ็ซ/ไธ็ฅ็ซ (Kantai Collection)
This is the dataset of shiranui/ไธ็ฅ็ซ/ไธ็ฅ็ซ (Kantai Collection), containing 500 images and their tags.
The core tags of this character are `pink_hair, short_hair, ponytail, blue_eyes, ribbon, neck_ribbon, red_ribbon, hair_ornament`, which are pruned in this dataset.
Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by [DeepGHS Team](https://github.com/deepghs)([huggingface organization](https://huggingface.co/deepghs)).
## List of Packages
| Name | Images | Size | Download | Type | Description |
|:-----------------|---------:|:-----------|:---------------------------------------------------------------------------------------------------------------------------|:-----------|:---------------------------------------------------------------------|
| raw | 500 | 429.53 MiB | [Download](https://huggingface.co/datasets/CyberHarem/shiranui_kantaicollection/resolve/main/dataset-raw.zip) | Waifuc-Raw | Raw data with meta information (min edge aligned to 1400 if larger). |
| 800 | 500 | 298.50 MiB | [Download](https://huggingface.co/datasets/CyberHarem/shiranui_kantaicollection/resolve/main/dataset-800.zip) | IMG+TXT | dataset with the shorter side not exceeding 800 pixels. |
| stage3-p480-800 | 1129 | 608.08 MiB | [Download](https://huggingface.co/datasets/CyberHarem/shiranui_kantaicollection/resolve/main/dataset-stage3-p480-800.zip) | IMG+TXT | 3-stage cropped dataset with the area not less than 480x480 pixels. |
| 1200 | 500 | 398.06 MiB | [Download](https://huggingface.co/datasets/CyberHarem/shiranui_kantaicollection/resolve/main/dataset-1200.zip) | IMG+TXT | dataset with the shorter side not exceeding 1200 pixels. |
| stage3-p480-1200 | 1129 | 768.35 MiB | [Download](https://huggingface.co/datasets/CyberHarem/shiranui_kantaicollection/resolve/main/dataset-stage3-p480-1200.zip) | IMG+TXT | 3-stage cropped dataset with the area not less than 480x480 pixels. |
### Load Raw Dataset with Waifuc
We provide raw dataset (including tagged images) for [waifuc](https://deepghs.github.io/waifuc/main/tutorials/installation/index.html) loading. If you need this, just run the following code
```python
import os
import zipfile
from huggingface_hub import hf_hub_download
from waifuc.source import LocalSource
# download raw archive file
zip_file = hf_hub_download(
repo_id='CyberHarem/shiranui_kantaicollection',
repo_type='dataset',
filename='dataset-raw.zip',
)
# extract files to your directory
dataset_dir = 'dataset_dir'
os.makedirs(dataset_dir, exist_ok=True)
with zipfile.ZipFile(zip_file, 'r') as zf:
zf.extractall(dataset_dir)
# load the dataset with waifuc
source = LocalSource(dataset_dir)
for item in source:
print(item.image, item.meta['filename'], item.meta['tags'])
```
## List of Clusters
List of tag clustering result, maybe some outfits can be mined here.
### Raw Text Version
| # | Samples | Img-1 | Img-2 | Img-3 | Img-4 | Img-5 | Tags |
|----:|----------:|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| 0 | 14 | ![](samples/0/clu0-sample0.png) | ![](samples/0/clu0-sample1.png) | ![](samples/0/clu0-sample2.png) | ![](samples/0/clu0-sample3.png) | ![](samples/0/clu0-sample4.png) | 1girl, school_uniform, short_sleeves, solo, vest, white_gloves, bike_shorts_under_skirt, looking_at_viewer, pleated_skirt, turret, cowboy_shot, white_shirt, machinery |
| 1 | 5 | ![](samples/1/clu1-sample0.png) | ![](samples/1/clu1-sample1.png) | ![](samples/1/clu1-sample2.png) | ![](samples/1/clu1-sample3.png) | ![](samples/1/clu1-sample4.png) | 1girl, looking_at_viewer, pleated_skirt, school_uniform, short_sleeves, solo, vest, white_gloves, turret |
| 2 | 18 | ![](samples/2/clu2-sample0.png) | ![](samples/2/clu2-sample1.png) | ![](samples/2/clu2-sample2.png) | ![](samples/2/clu2-sample3.png) | ![](samples/2/clu2-sample4.png) | 1girl, pleated_skirt, school_uniform, short_sleeves, shorts_under_skirt, solo, white_shirt, dress_shirt, grey_vest, white_gloves, bike_shorts, looking_at_viewer, black_shorts, simple_background, grey_skirt, short_ponytail, white_background, sitting, socks |
| 3 | 5 | ![](samples/3/clu3-sample0.png) | ![](samples/3/clu3-sample1.png) | ![](samples/3/clu3-sample2.png) | ![](samples/3/clu3-sample3.png) | ![](samples/3/clu3-sample4.png) | 1girl, dress_shirt, grey_vest, pleated_skirt, school_uniform, short_sleeves, simple_background, solo, white_background, white_shirt, black_skirt, black_vest, cowboy_shot, looking_at_viewer, arms_behind_back, grey_skirt, hair_ribbon, sitting |
| 4 | 8 | ![](samples/4/clu4-sample0.png) | ![](samples/4/clu4-sample1.png) | ![](samples/4/clu4-sample2.png) | ![](samples/4/clu4-sample3.png) | ![](samples/4/clu4-sample4.png) | 1girl, black_vest, simple_background, solo, upper_body, white_background, white_shirt, dress_shirt, school_uniform, looking_at_viewer, short_ponytail, short_sleeves, grey_vest, portrait |
| 5 | 15 | ![](samples/5/clu5-sample0.png) | ![](samples/5/clu5-sample1.png) | ![](samples/5/clu5-sample2.png) | ![](samples/5/clu5-sample3.png) | ![](samples/5/clu5-sample4.png) | 1girl, school_uniform, short_sleeves, solo, upper_body, white_gloves, looking_at_viewer, white_shirt, grey_vest, dress_shirt, simple_background, white_background, blush |
| 6 | 8 | ![](samples/6/clu6-sample0.png) | ![](samples/6/clu6-sample1.png) | ![](samples/6/clu6-sample2.png) | ![](samples/6/clu6-sample3.png) | ![](samples/6/clu6-sample4.png) | 1boy, 1girl, hetero, solo_focus, white_gloves, bar_censor, gloved_handjob, school_uniform, blush, vest, shirt, cum, licking_penis, nipples, open_mouth, small_breasts, tongue |
| 7 | 6 | ![](samples/7/clu7-sample0.png) | ![](samples/7/clu7-sample1.png) | ![](samples/7/clu7-sample2.png) | ![](samples/7/clu7-sample3.png) | ![](samples/7/clu7-sample4.png) | 1boy, 1girl, bike_shorts, hetero, nipples, white_gloves, censored, sex, solo_focus, vaginal, cum_in_pussy, open_mouth, open_shirt, penis, small_breasts, spread_legs, torn_clothes, bed_sheet, blush, on_back, school_uniform |
| 8 | 25 | ![](samples/8/clu8-sample0.png) | ![](samples/8/clu8-sample1.png) | ![](samples/8/clu8-sample2.png) | ![](samples/8/clu8-sample3.png) | ![](samples/8/clu8-sample4.png) | 1girl, solo, grey_jacket, simple_background, hooded_jacket, looking_at_viewer, white_background, alternate_costume, backpack, long_sleeves, hood_down, black_pantyhose, bangs, blush, grey_shorts, hair_between_eyes, cowboy_shot, grey_hoodie |
### Table Version
| # | Samples | Img-1 | Img-2 | Img-3 | Img-4 | Img-5 | 1girl | school_uniform | short_sleeves | solo | vest | white_gloves | bike_shorts_under_skirt | looking_at_viewer | pleated_skirt | turret | cowboy_shot | white_shirt | machinery | shorts_under_skirt | dress_shirt | grey_vest | bike_shorts | black_shorts | simple_background | grey_skirt | short_ponytail | white_background | sitting | socks | black_skirt | black_vest | arms_behind_back | hair_ribbon | upper_body | portrait | blush | 1boy | hetero | solo_focus | bar_censor | gloved_handjob | shirt | cum | licking_penis | nipples | open_mouth | small_breasts | tongue | censored | sex | vaginal | cum_in_pussy | open_shirt | penis | spread_legs | torn_clothes | bed_sheet | on_back | grey_jacket | hooded_jacket | alternate_costume | backpack | long_sleeves | hood_down | black_pantyhose | bangs | grey_shorts | hair_between_eyes | grey_hoodie |
|----:|----------:|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------|:-----------------|:----------------|:-------|:-------|:---------------|:--------------------------|:--------------------|:----------------|:---------|:--------------|:--------------|:------------|:---------------------|:--------------|:------------|:--------------|:---------------|:--------------------|:-------------|:-----------------|:-------------------|:----------|:--------|:--------------|:-------------|:-------------------|:--------------|:-------------|:-----------|:--------|:-------|:---------|:-------------|:-------------|:-----------------|:--------|:------|:----------------|:----------|:-------------|:----------------|:---------|:-----------|:------|:----------|:---------------|:-------------|:--------|:--------------|:---------------|:------------|:----------|:--------------|:----------------|:--------------------|:-----------|:---------------|:------------|:------------------|:--------|:--------------|:--------------------|:--------------|
| 0 | 14 | ![](samples/0/clu0-sample0.png) | ![](samples/0/clu0-sample1.png) | ![](samples/0/clu0-sample2.png) | ![](samples/0/clu0-sample3.png) | ![](samples/0/clu0-sample4.png) | X | X | X | X | X | X | X | X | X | X | X | X | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | |
| 1 | 5 | ![](samples/1/clu1-sample0.png) | ![](samples/1/clu1-sample1.png) | ![](samples/1/clu1-sample2.png) | ![](samples/1/clu1-sample3.png) | ![](samples/1/clu1-sample4.png) | X | X | X | X | X | X | | X | X | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | |
| 2 | 18 | ![](samples/2/clu2-sample0.png) | ![](samples/2/clu2-sample1.png) | ![](samples/2/clu2-sample2.png) | ![](samples/2/clu2-sample3.png) | ![](samples/2/clu2-sample4.png) | X | X | X | X | | X | | X | X | | | X | | X | X | X | X | X | X | X | X | X | X | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | |
| 3 | 5 | ![](samples/3/clu3-sample0.png) | ![](samples/3/clu3-sample1.png) | ![](samples/3/clu3-sample2.png) | ![](samples/3/clu3-sample3.png) | ![](samples/3/clu3-sample4.png) | X | X | X | X | | | | X | X | | X | X | | | X | X | | | X | X | | X | X | | X | X | X | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | |
| 4 | 8 | ![](samples/4/clu4-sample0.png) | ![](samples/4/clu4-sample1.png) | ![](samples/4/clu4-sample2.png) | ![](samples/4/clu4-sample3.png) | ![](samples/4/clu4-sample4.png) | X | X | X | X | | | | X | | | | X | | | X | X | | | X | | X | X | | | | X | | | X | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | |
| 5 | 15 | ![](samples/5/clu5-sample0.png) | ![](samples/5/clu5-sample1.png) | ![](samples/5/clu5-sample2.png) | ![](samples/5/clu5-sample3.png) | ![](samples/5/clu5-sample4.png) | X | X | X | X | | X | | X | | | | X | | | X | X | | | X | | | X | | | | | | | X | | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | |
| 6 | 8 | ![](samples/6/clu6-sample0.png) | ![](samples/6/clu6-sample1.png) | ![](samples/6/clu6-sample2.png) | ![](samples/6/clu6-sample3.png) | ![](samples/6/clu6-sample4.png) | X | X | | | X | X | | | | | | | | | | | | | | | | | | | | | | | | | X | X | X | X | X | X | X | X | X | X | X | X | X | | | | | | | | | | | | | | | | | | | | | |
| 7 | 6 | ![](samples/7/clu7-sample0.png) | ![](samples/7/clu7-sample1.png) | ![](samples/7/clu7-sample2.png) | ![](samples/7/clu7-sample3.png) | ![](samples/7/clu7-sample4.png) | X | X | | | | X | | | | | | | | | | | X | | | | | | | | | | | | | | X | X | X | X | | | | | | X | X | X | | X | X | X | X | X | X | X | X | X | X | | | | | | | | | | | |
| 8 | 25 | ![](samples/8/clu8-sample0.png) | ![](samples/8/clu8-sample1.png) | ![](samples/8/clu8-sample2.png) | ![](samples/8/clu8-sample3.png) | ![](samples/8/clu8-sample4.png) | X | | | X | | | | X | | | X | | | | | | | | X | | | X | | | | | | | | | X | | | | | | | | | | | | | | | | | | | | | | | X | X | X | X | X | X | X | X | X | X | X |
|
Arham-Imran/Test | ---
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
dataset_info:
features:
- name: image
dtype: image
- name: label
dtype: image
splits:
- name: train
num_bytes: 217519583.0
num_examples: 94
download_size: 217260116
dataset_size: 217519583.0
---
# Dataset Card for "Test"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
metaeval/defeasible-nli | ---
license: apache-2.0
task_ids:
- natural-language-inference
task_categories:
- text-classification
language:
- en
---
https://github.com/rudinger/defeasible-nli
```
@inproceedings{rudinger2020thinking,
title={Thinking like a skeptic:
feasible inference in natural language},
author={Rudinger, Rachel and Shwartz, Vered and Hwang, Jena D and Bhagavatula, Chandra and Forbes, Maxwell and Le Bras, Ronan and Smith, Noah A and Choi, Yejin},
booktitle={Findings of the Association for Computational Linguistics: EMNLP 2020},
pages={4661--4675},
year={2020}
}
``` |
AMead10/lvl_5_vital_wikipedia_articles | ---
size_categories:
- 10K<n<100K
language:
- en
---
All [level 5 vital articles](https://en.wikipedia.org/wiki/Wikipedia:Vital_articles/Level/5) from Wikipedia.
Dataset made from the [20240320](https://huggingface.co/datasets/AMead10/wikipedia_20240320_en) wikipedia dump |
CVasNLPExperiments/Hatefulmemes_test_google_flan_t5_xl_mode_C_A_T_OCR_rices_ns_1000 | ---
dataset_info:
features:
- name: id
dtype: int64
- name: prompt
sequence: string
- name: true_label
dtype: string
- name: prediction
dtype: string
splits:
- name: fewshot_0
num_bytes: 1175673
num_examples: 1000
download_size: 205591
dataset_size: 1175673
---
# Dataset Card for "Hatefulmemes_test_google_flan_t5_xl_mode_C_A_T_OCR_rices_ns_1000"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
one-sec-cv12/chunk_92 | ---
dataset_info:
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
splits:
- name: train
num_bytes: 23811451776.0
num_examples: 247912
download_size: 22034658039
dataset_size: 23811451776.0
---
# Dataset Card for "chunk_92"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
vitaliy-sharandin/climate-krakow-temp-monthly | ---
dataset_info:
features:
- name: Absolute maximum temperature [ยฐC]
dtype: float64
- name: Absolute minimum temperature [ยฐC]
dtype: float64
- name: Average monthly temperature [ยฐC]
dtype: float64
- name: dt
dtype: timestamp[ns]
splits:
- name: train
num_bytes: 27904
num_examples: 872
download_size: 17326
dataset_size: 27904
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
# Dataset Card for "climate-krakow-temp-monthly"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
grasool/data-to16Hz | ---
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
- split: test
path: data/test-*
dataset_info:
features:
- name: input_features
sequence:
sequence: float32
- name: labels
sequence: int64
splits:
- name: train
num_bytes: 10844658264
num_examples: 11291
- name: test
num_bytes: 2710421968
num_examples: 2822
download_size: 1783591438
dataset_size: 13555080232
---
# Dataset Card for "data-to16Hz"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
westbrook/Speech_separation | ---
license: apache-2.0
---
|
BangumiBase/mahoushoujoprettysammy | ---
license: mit
tags:
- art
size_categories:
- 1K<n<10K
---
# Bangumi Image Base of Mahou Shoujo Pretty Sammy
This is the image base of bangumi Mahou Shoujo Pretty Sammy, we detected 40 characters, 2878 images in total. The full dataset is [here](all.zip).
**Please note that these image bases are not guaranteed to be 100% cleaned, they may be noisy actual.** If you intend to manually train models using this dataset, we recommend performing necessary preprocessing on the downloaded dataset to eliminate potential noisy samples (approximately 1% probability).
Here is the characters' preview:
| # | Images | Download | Preview 1 | Preview 2 | Preview 3 | Preview 4 | Preview 5 | Preview 6 | Preview 7 | Preview 8 |
|:------|---------:|:---------------------------|:-------------------------------|:-------------------------------|:-------------------------------|:-------------------------------|:-------------------------------|:-------------------------------|:-------------------------------|:-------------------------------|
| 0 | 1023 | [Download](0/dataset.zip) | ![preview 1](0/preview_1.png) | ![preview 2](0/preview_2.png) | ![preview 3](0/preview_3.png) | ![preview 4](0/preview_4.png) | ![preview 5](0/preview_5.png) | ![preview 6](0/preview_6.png) | ![preview 7](0/preview_7.png) | ![preview 8](0/preview_8.png) |
| 1 | 72 | [Download](1/dataset.zip) | ![preview 1](1/preview_1.png) | ![preview 2](1/preview_2.png) | ![preview 3](1/preview_3.png) | ![preview 4](1/preview_4.png) | ![preview 5](1/preview_5.png) | ![preview 6](1/preview_6.png) | ![preview 7](1/preview_7.png) | ![preview 8](1/preview_8.png) |
| 2 | 17 | [Download](2/dataset.zip) | ![preview 1](2/preview_1.png) | ![preview 2](2/preview_2.png) | ![preview 3](2/preview_3.png) | ![preview 4](2/preview_4.png) | ![preview 5](2/preview_5.png) | ![preview 6](2/preview_6.png) | ![preview 7](2/preview_7.png) | ![preview 8](2/preview_8.png) |
| 3 | 19 | [Download](3/dataset.zip) | ![preview 1](3/preview_1.png) | ![preview 2](3/preview_2.png) | ![preview 3](3/preview_3.png) | ![preview 4](3/preview_4.png) | ![preview 5](3/preview_5.png) | ![preview 6](3/preview_6.png) | ![preview 7](3/preview_7.png) | ![preview 8](3/preview_8.png) |
| 4 | 15 | [Download](4/dataset.zip) | ![preview 1](4/preview_1.png) | ![preview 2](4/preview_2.png) | ![preview 3](4/preview_3.png) | ![preview 4](4/preview_4.png) | ![preview 5](4/preview_5.png) | ![preview 6](4/preview_6.png) | ![preview 7](4/preview_7.png) | ![preview 8](4/preview_8.png) |
| 5 | 56 | [Download](5/dataset.zip) | ![preview 1](5/preview_1.png) | ![preview 2](5/preview_2.png) | ![preview 3](5/preview_3.png) | ![preview 4](5/preview_4.png) | ![preview 5](5/preview_5.png) | ![preview 6](5/preview_6.png) | ![preview 7](5/preview_7.png) | ![preview 8](5/preview_8.png) |
| 6 | 18 | [Download](6/dataset.zip) | ![preview 1](6/preview_1.png) | ![preview 2](6/preview_2.png) | ![preview 3](6/preview_3.png) | ![preview 4](6/preview_4.png) | ![preview 5](6/preview_5.png) | ![preview 6](6/preview_6.png) | ![preview 7](6/preview_7.png) | ![preview 8](6/preview_8.png) |
| 7 | 58 | [Download](7/dataset.zip) | ![preview 1](7/preview_1.png) | ![preview 2](7/preview_2.png) | ![preview 3](7/preview_3.png) | ![preview 4](7/preview_4.png) | ![preview 5](7/preview_5.png) | ![preview 6](7/preview_6.png) | ![preview 7](7/preview_7.png) | ![preview 8](7/preview_8.png) |
| 8 | 168 | [Download](8/dataset.zip) | ![preview 1](8/preview_1.png) | ![preview 2](8/preview_2.png) | ![preview 3](8/preview_3.png) | ![preview 4](8/preview_4.png) | ![preview 5](8/preview_5.png) | ![preview 6](8/preview_6.png) | ![preview 7](8/preview_7.png) | ![preview 8](8/preview_8.png) |
| 9 | 39 | [Download](9/dataset.zip) | ![preview 1](9/preview_1.png) | ![preview 2](9/preview_2.png) | ![preview 3](9/preview_3.png) | ![preview 4](9/preview_4.png) | ![preview 5](9/preview_5.png) | ![preview 6](9/preview_6.png) | ![preview 7](9/preview_7.png) | ![preview 8](9/preview_8.png) |
| 10 | 107 | [Download](10/dataset.zip) | ![preview 1](10/preview_1.png) | ![preview 2](10/preview_2.png) | ![preview 3](10/preview_3.png) | ![preview 4](10/preview_4.png) | ![preview 5](10/preview_5.png) | ![preview 6](10/preview_6.png) | ![preview 7](10/preview_7.png) | ![preview 8](10/preview_8.png) |
| 11 | 22 | [Download](11/dataset.zip) | ![preview 1](11/preview_1.png) | ![preview 2](11/preview_2.png) | ![preview 3](11/preview_3.png) | ![preview 4](11/preview_4.png) | ![preview 5](11/preview_5.png) | ![preview 6](11/preview_6.png) | ![preview 7](11/preview_7.png) | ![preview 8](11/preview_8.png) |
| 12 | 20 | [Download](12/dataset.zip) | ![preview 1](12/preview_1.png) | ![preview 2](12/preview_2.png) | ![preview 3](12/preview_3.png) | ![preview 4](12/preview_4.png) | ![preview 5](12/preview_5.png) | ![preview 6](12/preview_6.png) | ![preview 7](12/preview_7.png) | ![preview 8](12/preview_8.png) |
| 13 | 12 | [Download](13/dataset.zip) | ![preview 1](13/preview_1.png) | ![preview 2](13/preview_2.png) | ![preview 3](13/preview_3.png) | ![preview 4](13/preview_4.png) | ![preview 5](13/preview_5.png) | ![preview 6](13/preview_6.png) | ![preview 7](13/preview_7.png) | ![preview 8](13/preview_8.png) |
| 14 | 22 | [Download](14/dataset.zip) | ![preview 1](14/preview_1.png) | ![preview 2](14/preview_2.png) | ![preview 3](14/preview_3.png) | ![preview 4](14/preview_4.png) | ![preview 5](14/preview_5.png) | ![preview 6](14/preview_6.png) | ![preview 7](14/preview_7.png) | ![preview 8](14/preview_8.png) |
| 15 | 12 | [Download](15/dataset.zip) | ![preview 1](15/preview_1.png) | ![preview 2](15/preview_2.png) | ![preview 3](15/preview_3.png) | ![preview 4](15/preview_4.png) | ![preview 5](15/preview_5.png) | ![preview 6](15/preview_6.png) | ![preview 7](15/preview_7.png) | ![preview 8](15/preview_8.png) |
| 16 | 46 | [Download](16/dataset.zip) | ![preview 1](16/preview_1.png) | ![preview 2](16/preview_2.png) | ![preview 3](16/preview_3.png) | ![preview 4](16/preview_4.png) | ![preview 5](16/preview_5.png) | ![preview 6](16/preview_6.png) | ![preview 7](16/preview_7.png) | ![preview 8](16/preview_8.png) |
| 17 | 60 | [Download](17/dataset.zip) | ![preview 1](17/preview_1.png) | ![preview 2](17/preview_2.png) | ![preview 3](17/preview_3.png) | ![preview 4](17/preview_4.png) | ![preview 5](17/preview_5.png) | ![preview 6](17/preview_6.png) | ![preview 7](17/preview_7.png) | ![preview 8](17/preview_8.png) |
| 18 | 22 | [Download](18/dataset.zip) | ![preview 1](18/preview_1.png) | ![preview 2](18/preview_2.png) | ![preview 3](18/preview_3.png) | ![preview 4](18/preview_4.png) | ![preview 5](18/preview_5.png) | ![preview 6](18/preview_6.png) | ![preview 7](18/preview_7.png) | ![preview 8](18/preview_8.png) |
| 19 | 19 | [Download](19/dataset.zip) | ![preview 1](19/preview_1.png) | ![preview 2](19/preview_2.png) | ![preview 3](19/preview_3.png) | ![preview 4](19/preview_4.png) | ![preview 5](19/preview_5.png) | ![preview 6](19/preview_6.png) | ![preview 7](19/preview_7.png) | ![preview 8](19/preview_8.png) |
| 20 | 7 | [Download](20/dataset.zip) | ![preview 1](20/preview_1.png) | ![preview 2](20/preview_2.png) | ![preview 3](20/preview_3.png) | ![preview 4](20/preview_4.png) | ![preview 5](20/preview_5.png) | ![preview 6](20/preview_6.png) | ![preview 7](20/preview_7.png) | N/A |
| 21 | 15 | [Download](21/dataset.zip) | ![preview 1](21/preview_1.png) | ![preview 2](21/preview_2.png) | ![preview 3](21/preview_3.png) | ![preview 4](21/preview_4.png) | ![preview 5](21/preview_5.png) | ![preview 6](21/preview_6.png) | ![preview 7](21/preview_7.png) | ![preview 8](21/preview_8.png) |
| 22 | 34 | [Download](22/dataset.zip) | ![preview 1](22/preview_1.png) | ![preview 2](22/preview_2.png) | ![preview 3](22/preview_3.png) | ![preview 4](22/preview_4.png) | ![preview 5](22/preview_5.png) | ![preview 6](22/preview_6.png) | ![preview 7](22/preview_7.png) | ![preview 8](22/preview_8.png) |
| 23 | 5 | [Download](23/dataset.zip) | ![preview 1](23/preview_1.png) | ![preview 2](23/preview_2.png) | ![preview 3](23/preview_3.png) | ![preview 4](23/preview_4.png) | ![preview 5](23/preview_5.png) | N/A | N/A | N/A |
| 24 | 35 | [Download](24/dataset.zip) | ![preview 1](24/preview_1.png) | ![preview 2](24/preview_2.png) | ![preview 3](24/preview_3.png) | ![preview 4](24/preview_4.png) | ![preview 5](24/preview_5.png) | ![preview 6](24/preview_6.png) | ![preview 7](24/preview_7.png) | ![preview 8](24/preview_8.png) |
| 25 | 12 | [Download](25/dataset.zip) | ![preview 1](25/preview_1.png) | ![preview 2](25/preview_2.png) | ![preview 3](25/preview_3.png) | ![preview 4](25/preview_4.png) | ![preview 5](25/preview_5.png) | ![preview 6](25/preview_6.png) | ![preview 7](25/preview_7.png) | ![preview 8](25/preview_8.png) |
| 26 | 98 | [Download](26/dataset.zip) | ![preview 1](26/preview_1.png) | ![preview 2](26/preview_2.png) | ![preview 3](26/preview_3.png) | ![preview 4](26/preview_4.png) | ![preview 5](26/preview_5.png) | ![preview 6](26/preview_6.png) | ![preview 7](26/preview_7.png) | ![preview 8](26/preview_8.png) |
| 27 | 169 | [Download](27/dataset.zip) | ![preview 1](27/preview_1.png) | ![preview 2](27/preview_2.png) | ![preview 3](27/preview_3.png) | ![preview 4](27/preview_4.png) | ![preview 5](27/preview_5.png) | ![preview 6](27/preview_6.png) | ![preview 7](27/preview_7.png) | ![preview 8](27/preview_8.png) |
| 28 | 34 | [Download](28/dataset.zip) | ![preview 1](28/preview_1.png) | ![preview 2](28/preview_2.png) | ![preview 3](28/preview_3.png) | ![preview 4](28/preview_4.png) | ![preview 5](28/preview_5.png) | ![preview 6](28/preview_6.png) | ![preview 7](28/preview_7.png) | ![preview 8](28/preview_8.png) |
| 29 | 18 | [Download](29/dataset.zip) | ![preview 1](29/preview_1.png) | ![preview 2](29/preview_2.png) | ![preview 3](29/preview_3.png) | ![preview 4](29/preview_4.png) | ![preview 5](29/preview_5.png) | ![preview 6](29/preview_6.png) | ![preview 7](29/preview_7.png) | ![preview 8](29/preview_8.png) |
| 30 | 60 | [Download](30/dataset.zip) | ![preview 1](30/preview_1.png) | ![preview 2](30/preview_2.png) | ![preview 3](30/preview_3.png) | ![preview 4](30/preview_4.png) | ![preview 5](30/preview_5.png) | ![preview 6](30/preview_6.png) | ![preview 7](30/preview_7.png) | ![preview 8](30/preview_8.png) |
| 31 | 27 | [Download](31/dataset.zip) | ![preview 1](31/preview_1.png) | ![preview 2](31/preview_2.png) | ![preview 3](31/preview_3.png) | ![preview 4](31/preview_4.png) | ![preview 5](31/preview_5.png) | ![preview 6](31/preview_6.png) | ![preview 7](31/preview_7.png) | ![preview 8](31/preview_8.png) |
| 32 | 10 | [Download](32/dataset.zip) | ![preview 1](32/preview_1.png) | ![preview 2](32/preview_2.png) | ![preview 3](32/preview_3.png) | ![preview 4](32/preview_4.png) | ![preview 5](32/preview_5.png) | ![preview 6](32/preview_6.png) | ![preview 7](32/preview_7.png) | ![preview 8](32/preview_8.png) |
| 33 | 8 | [Download](33/dataset.zip) | ![preview 1](33/preview_1.png) | ![preview 2](33/preview_2.png) | ![preview 3](33/preview_3.png) | ![preview 4](33/preview_4.png) | ![preview 5](33/preview_5.png) | ![preview 6](33/preview_6.png) | ![preview 7](33/preview_7.png) | ![preview 8](33/preview_8.png) |
| 34 | 17 | [Download](34/dataset.zip) | ![preview 1](34/preview_1.png) | ![preview 2](34/preview_2.png) | ![preview 3](34/preview_3.png) | ![preview 4](34/preview_4.png) | ![preview 5](34/preview_5.png) | ![preview 6](34/preview_6.png) | ![preview 7](34/preview_7.png) | ![preview 8](34/preview_8.png) |
| 35 | 76 | [Download](35/dataset.zip) | ![preview 1](35/preview_1.png) | ![preview 2](35/preview_2.png) | ![preview 3](35/preview_3.png) | ![preview 4](35/preview_4.png) | ![preview 5](35/preview_5.png) | ![preview 6](35/preview_6.png) | ![preview 7](35/preview_7.png) | ![preview 8](35/preview_8.png) |
| 36 | 271 | [Download](36/dataset.zip) | ![preview 1](36/preview_1.png) | ![preview 2](36/preview_2.png) | ![preview 3](36/preview_3.png) | ![preview 4](36/preview_4.png) | ![preview 5](36/preview_5.png) | ![preview 6](36/preview_6.png) | ![preview 7](36/preview_7.png) | ![preview 8](36/preview_8.png) |
| 37 | 26 | [Download](37/dataset.zip) | ![preview 1](37/preview_1.png) | ![preview 2](37/preview_2.png) | ![preview 3](37/preview_3.png) | ![preview 4](37/preview_4.png) | ![preview 5](37/preview_5.png) | ![preview 6](37/preview_6.png) | ![preview 7](37/preview_7.png) | ![preview 8](37/preview_8.png) |
| 38 | 5 | [Download](38/dataset.zip) | ![preview 1](38/preview_1.png) | ![preview 2](38/preview_2.png) | ![preview 3](38/preview_3.png) | ![preview 4](38/preview_4.png) | ![preview 5](38/preview_5.png) | N/A | N/A | N/A |
| noise | 124 | [Download](-1/dataset.zip) | ![preview 1](-1/preview_1.png) | ![preview 2](-1/preview_2.png) | ![preview 3](-1/preview_3.png) | ![preview 4](-1/preview_4.png) | ![preview 5](-1/preview_5.png) | ![preview 6](-1/preview_6.png) | ![preview 7](-1/preview_7.png) | ![preview 8](-1/preview_8.png) |
|
range3/wikipedia-ja-20230101 | ---
license:
- cc-by-sa-3.0
- gfdl
task_categories:
- text-generation
- fill-mask
language:
- ja
---
# range3/wikipedia-ja-20230101
This dataset consists of a parquet file from the wikipedia dataset with only Japanese data extracted. It is generated by the following python code.
ใใฎใใผใฟใปใใใฏใwikipediaใใผใฟใปใใใฎๆฅๆฌ่ชใใผใฟใฎใฟใๆฝๅบใใparquetใใกใคใซใงๆงๆใใใพใใไปฅไธใฎpythonใณใผใใซใใฃใฆ็ๆใใฆใใพใใ
```py
import datasets
dss = datasets.load_dataset(
"wikipedia",
language="ja",
date="20230101",
beam_runner="DirectRunner",
)
for split,ds in dss.items():
ds.to_parquet(f"wikipedia-ja-20230101/{split}.parquet")
```
|
open-llm-leaderboard/details_chargoddard__MelangeB-70b | ---
pretty_name: Evaluation run of chargoddard/MelangeB-70b
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [chargoddard/MelangeB-70b](https://huggingface.co/chargoddard/MelangeB-70b) on\
\ the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\
\nThe dataset is composed of 64 configuration, each one coresponding to one of the\
\ evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be\
\ found as a specific split in each configuration, the split being named using the\
\ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\
\nAn additional configuration \"results\" store all the aggregated results of the\
\ run (and is used to compute and display the agregated metrics on the [Open LLM\
\ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\
\nTo load the details from a run, you can for instance do the following:\n```python\n\
from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_chargoddard__MelangeB-70b\"\
,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\
These are the [latest results from run 2023-10-17T13:18:04.928943](https://huggingface.co/datasets/open-llm-leaderboard/details_chargoddard__MelangeB-70b/blob/main/results_2023-10-17T13-18-04.928943.json)(note\
\ that their might be results for other tasks in the repos if successive evals didn't\
\ cover the same tasks. You find each in the results and the \"latest\" split for\
\ each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.49958053691275167,\n\
\ \"em_stderr\": 0.005120466189311586,\n \"f1\": 0.5792397231543648,\n\
\ \"f1_stderr\": 0.004704767839498484,\n \"acc\": 0.570668027786471,\n\
\ \"acc_stderr\": 0.01156392378740017\n },\n \"harness|drop|3\": {\n\
\ \"em\": 0.49958053691275167,\n \"em_stderr\": 0.005120466189311586,\n\
\ \"f1\": 0.5792397231543648,\n \"f1_stderr\": 0.004704767839498484\n\
\ },\n \"harness|gsm8k|5\": {\n \"acc\": 0.3062926459438969,\n \
\ \"acc_stderr\": 0.0126969301065629\n },\n \"harness|winogrande|5\": {\n\
\ \"acc\": 0.835043409629045,\n \"acc_stderr\": 0.010430917468237438\n\
\ }\n}\n```"
repo_url: https://huggingface.co/chargoddard/MelangeB-70b
leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard
point_of_contact: clementine@hf.co
configs:
- config_name: harness_arc_challenge_25
data_files:
- split: 2023_08_23T14_27_52.893839
path:
- '**/details_harness|arc:challenge|25_2023-08-23T14:27:52.893839.parquet'
- split: latest
path:
- '**/details_harness|arc:challenge|25_2023-08-23T14:27:52.893839.parquet'
- config_name: harness_drop_3
data_files:
- split: 2023_10_17T13_18_04.928943
path:
- '**/details_harness|drop|3_2023-10-17T13-18-04.928943.parquet'
- split: latest
path:
- '**/details_harness|drop|3_2023-10-17T13-18-04.928943.parquet'
- config_name: harness_gsm8k_5
data_files:
- split: 2023_10_17T13_18_04.928943
path:
- '**/details_harness|gsm8k|5_2023-10-17T13-18-04.928943.parquet'
- split: latest
path:
- '**/details_harness|gsm8k|5_2023-10-17T13-18-04.928943.parquet'
- config_name: harness_hellaswag_10
data_files:
- split: 2023_08_23T14_27_52.893839
path:
- '**/details_harness|hellaswag|10_2023-08-23T14:27:52.893839.parquet'
- split: latest
path:
- '**/details_harness|hellaswag|10_2023-08-23T14:27:52.893839.parquet'
- config_name: harness_hendrycksTest_5
data_files:
- split: 2023_08_23T14_27_52.893839
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-management|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-virology|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2023-08-23T14:27:52.893839.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-management|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-virology|5_2023-08-23T14:27:52.893839.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2023-08-23T14:27:52.893839.parquet'
- config_name: harness_hendrycksTest_abstract_algebra_5
data_files:
- split: 2023_08_23T14_27_52.893839
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-23T14:27:52.893839.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-23T14:27:52.893839.parquet'
- config_name: harness_hendrycksTest_anatomy_5
data_files:
- split: 2023_08_23T14_27_52.893839
path:
- '**/details_harness|hendrycksTest-anatomy|5_2023-08-23T14:27:52.893839.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-anatomy|5_2023-08-23T14:27:52.893839.parquet'
- config_name: harness_hendrycksTest_astronomy_5
data_files:
- split: 2023_08_23T14_27_52.893839
path:
- '**/details_harness|hendrycksTest-astronomy|5_2023-08-23T14:27:52.893839.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-astronomy|5_2023-08-23T14:27:52.893839.parquet'
- config_name: harness_hendrycksTest_business_ethics_5
data_files:
- split: 2023_08_23T14_27_52.893839
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2023-08-23T14:27:52.893839.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2023-08-23T14:27:52.893839.parquet'
- config_name: harness_hendrycksTest_clinical_knowledge_5
data_files:
- split: 2023_08_23T14_27_52.893839
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-23T14:27:52.893839.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-23T14:27:52.893839.parquet'
- config_name: harness_hendrycksTest_college_biology_5
data_files:
- split: 2023_08_23T14_27_52.893839
path:
- '**/details_harness|hendrycksTest-college_biology|5_2023-08-23T14:27:52.893839.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_biology|5_2023-08-23T14:27:52.893839.parquet'
- config_name: harness_hendrycksTest_college_chemistry_5
data_files:
- split: 2023_08_23T14_27_52.893839
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-23T14:27:52.893839.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-23T14:27:52.893839.parquet'
- config_name: harness_hendrycksTest_college_computer_science_5
data_files:
- split: 2023_08_23T14_27_52.893839
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-23T14:27:52.893839.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-23T14:27:52.893839.parquet'
- config_name: harness_hendrycksTest_college_mathematics_5
data_files:
- split: 2023_08_23T14_27_52.893839
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-23T14:27:52.893839.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-23T14:27:52.893839.parquet'
- config_name: harness_hendrycksTest_college_medicine_5
data_files:
- split: 2023_08_23T14_27_52.893839
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2023-08-23T14:27:52.893839.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2023-08-23T14:27:52.893839.parquet'
- config_name: harness_hendrycksTest_college_physics_5
data_files:
- split: 2023_08_23T14_27_52.893839
path:
- '**/details_harness|hendrycksTest-college_physics|5_2023-08-23T14:27:52.893839.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_physics|5_2023-08-23T14:27:52.893839.parquet'
- config_name: harness_hendrycksTest_computer_security_5
data_files:
- split: 2023_08_23T14_27_52.893839
path:
- '**/details_harness|hendrycksTest-computer_security|5_2023-08-23T14:27:52.893839.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-computer_security|5_2023-08-23T14:27:52.893839.parquet'
- config_name: harness_hendrycksTest_conceptual_physics_5
data_files:
- split: 2023_08_23T14_27_52.893839
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-23T14:27:52.893839.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-23T14:27:52.893839.parquet'
- config_name: harness_hendrycksTest_econometrics_5
data_files:
- split: 2023_08_23T14_27_52.893839
path:
- '**/details_harness|hendrycksTest-econometrics|5_2023-08-23T14:27:52.893839.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-econometrics|5_2023-08-23T14:27:52.893839.parquet'
- config_name: harness_hendrycksTest_electrical_engineering_5
data_files:
- split: 2023_08_23T14_27_52.893839
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-23T14:27:52.893839.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-23T14:27:52.893839.parquet'
- config_name: harness_hendrycksTest_elementary_mathematics_5
data_files:
- split: 2023_08_23T14_27_52.893839
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-23T14:27:52.893839.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-23T14:27:52.893839.parquet'
- config_name: harness_hendrycksTest_formal_logic_5
data_files:
- split: 2023_08_23T14_27_52.893839
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2023-08-23T14:27:52.893839.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2023-08-23T14:27:52.893839.parquet'
- config_name: harness_hendrycksTest_global_facts_5
data_files:
- split: 2023_08_23T14_27_52.893839
path:
- '**/details_harness|hendrycksTest-global_facts|5_2023-08-23T14:27:52.893839.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-global_facts|5_2023-08-23T14:27:52.893839.parquet'
- config_name: harness_hendrycksTest_high_school_biology_5
data_files:
- split: 2023_08_23T14_27_52.893839
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-23T14:27:52.893839.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-23T14:27:52.893839.parquet'
- config_name: harness_hendrycksTest_high_school_chemistry_5
data_files:
- split: 2023_08_23T14_27_52.893839
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-23T14:27:52.893839.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-23T14:27:52.893839.parquet'
- config_name: harness_hendrycksTest_high_school_computer_science_5
data_files:
- split: 2023_08_23T14_27_52.893839
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-23T14:27:52.893839.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-23T14:27:52.893839.parquet'
- config_name: harness_hendrycksTest_high_school_european_history_5
data_files:
- split: 2023_08_23T14_27_52.893839
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-23T14:27:52.893839.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-23T14:27:52.893839.parquet'
- config_name: harness_hendrycksTest_high_school_geography_5
data_files:
- split: 2023_08_23T14_27_52.893839
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-23T14:27:52.893839.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-23T14:27:52.893839.parquet'
- config_name: harness_hendrycksTest_high_school_government_and_politics_5
data_files:
- split: 2023_08_23T14_27_52.893839
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-23T14:27:52.893839.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-23T14:27:52.893839.parquet'
- config_name: harness_hendrycksTest_high_school_macroeconomics_5
data_files:
- split: 2023_08_23T14_27_52.893839
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-23T14:27:52.893839.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-23T14:27:52.893839.parquet'
- config_name: harness_hendrycksTest_high_school_mathematics_5
data_files:
- split: 2023_08_23T14_27_52.893839
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-23T14:27:52.893839.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-23T14:27:52.893839.parquet'
- config_name: harness_hendrycksTest_high_school_microeconomics_5
data_files:
- split: 2023_08_23T14_27_52.893839
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-23T14:27:52.893839.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-23T14:27:52.893839.parquet'
- config_name: harness_hendrycksTest_high_school_physics_5
data_files:
- split: 2023_08_23T14_27_52.893839
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-23T14:27:52.893839.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-23T14:27:52.893839.parquet'
- config_name: harness_hendrycksTest_high_school_psychology_5
data_files:
- split: 2023_08_23T14_27_52.893839
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-23T14:27:52.893839.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-23T14:27:52.893839.parquet'
- config_name: harness_hendrycksTest_high_school_statistics_5
data_files:
- split: 2023_08_23T14_27_52.893839
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-23T14:27:52.893839.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-23T14:27:52.893839.parquet'
- config_name: harness_hendrycksTest_high_school_us_history_5
data_files:
- split: 2023_08_23T14_27_52.893839
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-23T14:27:52.893839.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-23T14:27:52.893839.parquet'
- config_name: harness_hendrycksTest_high_school_world_history_5
data_files:
- split: 2023_08_23T14_27_52.893839
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-23T14:27:52.893839.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-23T14:27:52.893839.parquet'
- config_name: harness_hendrycksTest_human_aging_5
data_files:
- split: 2023_08_23T14_27_52.893839
path:
- '**/details_harness|hendrycksTest-human_aging|5_2023-08-23T14:27:52.893839.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_aging|5_2023-08-23T14:27:52.893839.parquet'
- config_name: harness_hendrycksTest_human_sexuality_5
data_files:
- split: 2023_08_23T14_27_52.893839
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-23T14:27:52.893839.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-23T14:27:52.893839.parquet'
- config_name: harness_hendrycksTest_international_law_5
data_files:
- split: 2023_08_23T14_27_52.893839
path:
- '**/details_harness|hendrycksTest-international_law|5_2023-08-23T14:27:52.893839.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-international_law|5_2023-08-23T14:27:52.893839.parquet'
- config_name: harness_hendrycksTest_jurisprudence_5
data_files:
- split: 2023_08_23T14_27_52.893839
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-23T14:27:52.893839.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-23T14:27:52.893839.parquet'
- config_name: harness_hendrycksTest_logical_fallacies_5
data_files:
- split: 2023_08_23T14_27_52.893839
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-23T14:27:52.893839.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-23T14:27:52.893839.parquet'
- config_name: harness_hendrycksTest_machine_learning_5
data_files:
- split: 2023_08_23T14_27_52.893839
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2023-08-23T14:27:52.893839.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2023-08-23T14:27:52.893839.parquet'
- config_name: harness_hendrycksTest_management_5
data_files:
- split: 2023_08_23T14_27_52.893839
path:
- '**/details_harness|hendrycksTest-management|5_2023-08-23T14:27:52.893839.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-management|5_2023-08-23T14:27:52.893839.parquet'
- config_name: harness_hendrycksTest_marketing_5
data_files:
- split: 2023_08_23T14_27_52.893839
path:
- '**/details_harness|hendrycksTest-marketing|5_2023-08-23T14:27:52.893839.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-marketing|5_2023-08-23T14:27:52.893839.parquet'
- config_name: harness_hendrycksTest_medical_genetics_5
data_files:
- split: 2023_08_23T14_27_52.893839
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-23T14:27:52.893839.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-23T14:27:52.893839.parquet'
- config_name: harness_hendrycksTest_miscellaneous_5
data_files:
- split: 2023_08_23T14_27_52.893839
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-23T14:27:52.893839.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-23T14:27:52.893839.parquet'
- config_name: harness_hendrycksTest_moral_disputes_5
data_files:
- split: 2023_08_23T14_27_52.893839
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-23T14:27:52.893839.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-23T14:27:52.893839.parquet'
- config_name: harness_hendrycksTest_moral_scenarios_5
data_files:
- split: 2023_08_23T14_27_52.893839
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-23T14:27:52.893839.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-23T14:27:52.893839.parquet'
- config_name: harness_hendrycksTest_nutrition_5
data_files:
- split: 2023_08_23T14_27_52.893839
path:
- '**/details_harness|hendrycksTest-nutrition|5_2023-08-23T14:27:52.893839.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-nutrition|5_2023-08-23T14:27:52.893839.parquet'
- config_name: harness_hendrycksTest_philosophy_5
data_files:
- split: 2023_08_23T14_27_52.893839
path:
- '**/details_harness|hendrycksTest-philosophy|5_2023-08-23T14:27:52.893839.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-philosophy|5_2023-08-23T14:27:52.893839.parquet'
- config_name: harness_hendrycksTest_prehistory_5
data_files:
- split: 2023_08_23T14_27_52.893839
path:
- '**/details_harness|hendrycksTest-prehistory|5_2023-08-23T14:27:52.893839.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-prehistory|5_2023-08-23T14:27:52.893839.parquet'
- config_name: harness_hendrycksTest_professional_accounting_5
data_files:
- split: 2023_08_23T14_27_52.893839
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-23T14:27:52.893839.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-23T14:27:52.893839.parquet'
- config_name: harness_hendrycksTest_professional_law_5
data_files:
- split: 2023_08_23T14_27_52.893839
path:
- '**/details_harness|hendrycksTest-professional_law|5_2023-08-23T14:27:52.893839.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_law|5_2023-08-23T14:27:52.893839.parquet'
- config_name: harness_hendrycksTest_professional_medicine_5
data_files:
- split: 2023_08_23T14_27_52.893839
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-23T14:27:52.893839.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-23T14:27:52.893839.parquet'
- config_name: harness_hendrycksTest_professional_psychology_5
data_files:
- split: 2023_08_23T14_27_52.893839
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-23T14:27:52.893839.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-23T14:27:52.893839.parquet'
- config_name: harness_hendrycksTest_public_relations_5
data_files:
- split: 2023_08_23T14_27_52.893839
path:
- '**/details_harness|hendrycksTest-public_relations|5_2023-08-23T14:27:52.893839.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-public_relations|5_2023-08-23T14:27:52.893839.parquet'
- config_name: harness_hendrycksTest_security_studies_5
data_files:
- split: 2023_08_23T14_27_52.893839
path:
- '**/details_harness|hendrycksTest-security_studies|5_2023-08-23T14:27:52.893839.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-security_studies|5_2023-08-23T14:27:52.893839.parquet'
- config_name: harness_hendrycksTest_sociology_5
data_files:
- split: 2023_08_23T14_27_52.893839
path:
- '**/details_harness|hendrycksTest-sociology|5_2023-08-23T14:27:52.893839.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-sociology|5_2023-08-23T14:27:52.893839.parquet'
- config_name: harness_hendrycksTest_us_foreign_policy_5
data_files:
- split: 2023_08_23T14_27_52.893839
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-23T14:27:52.893839.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-23T14:27:52.893839.parquet'
- config_name: harness_hendrycksTest_virology_5
data_files:
- split: 2023_08_23T14_27_52.893839
path:
- '**/details_harness|hendrycksTest-virology|5_2023-08-23T14:27:52.893839.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-virology|5_2023-08-23T14:27:52.893839.parquet'
- config_name: harness_hendrycksTest_world_religions_5
data_files:
- split: 2023_08_23T14_27_52.893839
path:
- '**/details_harness|hendrycksTest-world_religions|5_2023-08-23T14:27:52.893839.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-world_religions|5_2023-08-23T14:27:52.893839.parquet'
- config_name: harness_truthfulqa_mc_0
data_files:
- split: 2023_08_23T14_27_52.893839
path:
- '**/details_harness|truthfulqa:mc|0_2023-08-23T14:27:52.893839.parquet'
- split: latest
path:
- '**/details_harness|truthfulqa:mc|0_2023-08-23T14:27:52.893839.parquet'
- config_name: harness_winogrande_5
data_files:
- split: 2023_10_17T13_18_04.928943
path:
- '**/details_harness|winogrande|5_2023-10-17T13-18-04.928943.parquet'
- split: latest
path:
- '**/details_harness|winogrande|5_2023-10-17T13-18-04.928943.parquet'
- config_name: results
data_files:
- split: 2023_10_17T13_18_04.928943
path:
- results_2023-10-17T13-18-04.928943.parquet
- split: latest
path:
- results_2023-10-17T13-18-04.928943.parquet
---
# Dataset Card for Evaluation run of chargoddard/MelangeB-70b
## Dataset Description
- **Homepage:**
- **Repository:** https://huggingface.co/chargoddard/MelangeB-70b
- **Paper:**
- **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard
- **Point of Contact:** clementine@hf.co
### Dataset Summary
Dataset automatically created during the evaluation run of model [chargoddard/MelangeB-70b](https://huggingface.co/chargoddard/MelangeB-70b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).
The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.
The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset("open-llm-leaderboard/details_chargoddard__MelangeB-70b",
"harness_winogrande_5",
split="train")
```
## Latest results
These are the [latest results from run 2023-10-17T13:18:04.928943](https://huggingface.co/datasets/open-llm-leaderboard/details_chargoddard__MelangeB-70b/blob/main/results_2023-10-17T13-18-04.928943.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"em": 0.49958053691275167,
"em_stderr": 0.005120466189311586,
"f1": 0.5792397231543648,
"f1_stderr": 0.004704767839498484,
"acc": 0.570668027786471,
"acc_stderr": 0.01156392378740017
},
"harness|drop|3": {
"em": 0.49958053691275167,
"em_stderr": 0.005120466189311586,
"f1": 0.5792397231543648,
"f1_stderr": 0.004704767839498484
},
"harness|gsm8k|5": {
"acc": 0.3062926459438969,
"acc_stderr": 0.0126969301065629
},
"harness|winogrande|5": {
"acc": 0.835043409629045,
"acc_stderr": 0.010430917468237438
}
}
```
### Supported Tasks and Leaderboards
[More Information Needed]
### Languages
[More Information Needed]
## Dataset Structure
### Data Instances
[More Information Needed]
### Data Fields
[More Information Needed]
### Data Splits
[More Information Needed]
## Dataset Creation
### Curation Rationale
[More Information Needed]
### Source Data
#### Initial Data Collection and Normalization
[More Information Needed]
#### Who are the source language producers?
[More Information Needed]
### Annotations
#### Annotation process
[More Information Needed]
#### Who are the annotators?
[More Information Needed]
### Personal and Sensitive Information
[More Information Needed]
## Considerations for Using the Data
### Social Impact of Dataset
[More Information Needed]
### Discussion of Biases
[More Information Needed]
### Other Known Limitations
[More Information Needed]
## Additional Information
### Dataset Curators
[More Information Needed]
### Licensing Information
[More Information Needed]
### Citation Information
[More Information Needed]
### Contributions
[More Information Needed] |
open-llm-leaderboard/details_Gille__StrangeMerges_47-7B-dare_ties | ---
pretty_name: Evaluation run of Gille/StrangeMerges_47-7B-dare_ties
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [Gille/StrangeMerges_47-7B-dare_ties](https://huggingface.co/Gille/StrangeMerges_47-7B-dare_ties)\
\ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\
\nThe dataset is composed of 63 configuration, each one coresponding to one of the\
\ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\
\ found as a specific split in each configuration, the split being named using the\
\ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\
\nAn additional configuration \"results\" store all the aggregated results of the\
\ run (and is used to compute and display the aggregated metrics on the [Open LLM\
\ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\
\nTo load the details from a run, you can for instance do the following:\n```python\n\
from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_Gille__StrangeMerges_47-7B-dare_ties\"\
,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\
These are the [latest results from run 2024-04-02T20:30:41.647453](https://huggingface.co/datasets/open-llm-leaderboard/details_Gille__StrangeMerges_47-7B-dare_ties/blob/main/results_2024-04-02T20-30-41.647453.json)(note\
\ that their might be results for other tasks in the repos if successive evals didn't\
\ cover the same tasks. You find each in the results and the \"latest\" split for\
\ each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6367859829101828,\n\
\ \"acc_stderr\": 0.032488134334004146,\n \"acc_norm\": 0.6377335973539087,\n\
\ \"acc_norm_stderr\": 0.03315290807892043,\n \"mc1\": 0.5128518971848225,\n\
\ \"mc1_stderr\": 0.01749771794429982,\n \"mc2\": 0.6785725906165029,\n\
\ \"mc2_stderr\": 0.014784490269410245\n },\n \"harness|arc:challenge|25\"\
: {\n \"acc\": 0.6629692832764505,\n \"acc_stderr\": 0.013813476652902272,\n\
\ \"acc_norm\": 0.6945392491467577,\n \"acc_norm_stderr\": 0.013460080478002508\n\
\ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6742680740888269,\n\
\ \"acc_stderr\": 0.0046768988619789115,\n \"acc_norm\": 0.8668591913961362,\n\
\ \"acc_norm_stderr\": 0.0033903254580202576\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\
: {\n \"acc\": 0.35,\n \"acc_stderr\": 0.04793724854411021,\n \
\ \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.04793724854411021\n \
\ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6444444444444445,\n\
\ \"acc_stderr\": 0.04135176749720385,\n \"acc_norm\": 0.6444444444444445,\n\
\ \"acc_norm_stderr\": 0.04135176749720385\n },\n \"harness|hendrycksTest-astronomy|5\"\
: {\n \"acc\": 0.7039473684210527,\n \"acc_stderr\": 0.03715062154998904,\n\
\ \"acc_norm\": 0.7039473684210527,\n \"acc_norm_stderr\": 0.03715062154998904\n\
\ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.58,\n\
\ \"acc_stderr\": 0.049604496374885836,\n \"acc_norm\": 0.58,\n \
\ \"acc_norm_stderr\": 0.049604496374885836\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\
: {\n \"acc\": 0.690566037735849,\n \"acc_stderr\": 0.028450154794118637,\n\
\ \"acc_norm\": 0.690566037735849,\n \"acc_norm_stderr\": 0.028450154794118637\n\
\ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.6805555555555556,\n\
\ \"acc_stderr\": 0.038990736873573344,\n \"acc_norm\": 0.6805555555555556,\n\
\ \"acc_norm_stderr\": 0.038990736873573344\n },\n \"harness|hendrycksTest-college_chemistry|5\"\
: {\n \"acc\": 0.44,\n \"acc_stderr\": 0.049888765156985884,\n \
\ \"acc_norm\": 0.44,\n \"acc_norm_stderr\": 0.049888765156985884\n \
\ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"\
acc\": 0.57,\n \"acc_stderr\": 0.04975698519562428,\n \"acc_norm\"\
: 0.57,\n \"acc_norm_stderr\": 0.04975698519562428\n },\n \"harness|hendrycksTest-college_mathematics|5\"\
: {\n \"acc\": 0.26,\n \"acc_stderr\": 0.04408440022768079,\n \
\ \"acc_norm\": 0.26,\n \"acc_norm_stderr\": 0.04408440022768079\n \
\ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6069364161849711,\n\
\ \"acc_stderr\": 0.037242495958177295,\n \"acc_norm\": 0.6069364161849711,\n\
\ \"acc_norm_stderr\": 0.037242495958177295\n },\n \"harness|hendrycksTest-college_physics|5\"\
: {\n \"acc\": 0.4117647058823529,\n \"acc_stderr\": 0.048971049527263666,\n\
\ \"acc_norm\": 0.4117647058823529,\n \"acc_norm_stderr\": 0.048971049527263666\n\
\ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\
\ 0.76,\n \"acc_stderr\": 0.04292346959909284,\n \"acc_norm\": 0.76,\n\
\ \"acc_norm_stderr\": 0.04292346959909284\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\
: {\n \"acc\": 0.5702127659574469,\n \"acc_stderr\": 0.03236214467715564,\n\
\ \"acc_norm\": 0.5702127659574469,\n \"acc_norm_stderr\": 0.03236214467715564\n\
\ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.43859649122807015,\n\
\ \"acc_stderr\": 0.04668000738510455,\n \"acc_norm\": 0.43859649122807015,\n\
\ \"acc_norm_stderr\": 0.04668000738510455\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\
: {\n \"acc\": 0.5517241379310345,\n \"acc_stderr\": 0.04144311810878152,\n\
\ \"acc_norm\": 0.5517241379310345,\n \"acc_norm_stderr\": 0.04144311810878152\n\
\ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\
: 0.42592592592592593,\n \"acc_stderr\": 0.02546714904546955,\n \"\
acc_norm\": 0.42592592592592593,\n \"acc_norm_stderr\": 0.02546714904546955\n\
\ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.4365079365079365,\n\
\ \"acc_stderr\": 0.04435932892851466,\n \"acc_norm\": 0.4365079365079365,\n\
\ \"acc_norm_stderr\": 0.04435932892851466\n },\n \"harness|hendrycksTest-global_facts|5\"\
: {\n \"acc\": 0.35,\n \"acc_stderr\": 0.047937248544110196,\n \
\ \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.047937248544110196\n \
\ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\"\
: 0.7645161290322581,\n \"acc_stderr\": 0.02413763242933771,\n \"\
acc_norm\": 0.7645161290322581,\n \"acc_norm_stderr\": 0.02413763242933771\n\
\ },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\"\
: 0.5369458128078818,\n \"acc_stderr\": 0.035083705204426656,\n \"\
acc_norm\": 0.5369458128078818,\n \"acc_norm_stderr\": 0.035083705204426656\n\
\ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \
\ \"acc\": 0.73,\n \"acc_stderr\": 0.044619604333847394,\n \"acc_norm\"\
: 0.73,\n \"acc_norm_stderr\": 0.044619604333847394\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\
: {\n \"acc\": 0.7696969696969697,\n \"acc_stderr\": 0.0328766675860349,\n\
\ \"acc_norm\": 0.7696969696969697,\n \"acc_norm_stderr\": 0.0328766675860349\n\
\ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\
: 0.7929292929292929,\n \"acc_stderr\": 0.02886977846026705,\n \"\
acc_norm\": 0.7929292929292929,\n \"acc_norm_stderr\": 0.02886977846026705\n\
\ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\
\ \"acc\": 0.8963730569948186,\n \"acc_stderr\": 0.02199531196364424,\n\
\ \"acc_norm\": 0.8963730569948186,\n \"acc_norm_stderr\": 0.02199531196364424\n\
\ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \
\ \"acc\": 0.6538461538461539,\n \"acc_stderr\": 0.02412112541694119,\n \
\ \"acc_norm\": 0.6538461538461539,\n \"acc_norm_stderr\": 0.02412112541694119\n\
\ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\
acc\": 0.3814814814814815,\n \"acc_stderr\": 0.029616718927497586,\n \
\ \"acc_norm\": 0.3814814814814815,\n \"acc_norm_stderr\": 0.029616718927497586\n\
\ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \
\ \"acc\": 0.6008403361344538,\n \"acc_stderr\": 0.03181110032413925,\n \
\ \"acc_norm\": 0.6008403361344538,\n \"acc_norm_stderr\": 0.03181110032413925\n\
\ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\
: 0.3443708609271523,\n \"acc_stderr\": 0.038796870240733264,\n \"\
acc_norm\": 0.3443708609271523,\n \"acc_norm_stderr\": 0.038796870240733264\n\
\ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\
: 0.8366972477064221,\n \"acc_stderr\": 0.01584825580650155,\n \"\
acc_norm\": 0.8366972477064221,\n \"acc_norm_stderr\": 0.01584825580650155\n\
\ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\
: 0.5648148148148148,\n \"acc_stderr\": 0.03381200005643526,\n \"\
acc_norm\": 0.5648148148148148,\n \"acc_norm_stderr\": 0.03381200005643526\n\
\ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\
: 0.7843137254901961,\n \"acc_stderr\": 0.028867431449849316,\n \"\
acc_norm\": 0.7843137254901961,\n \"acc_norm_stderr\": 0.028867431449849316\n\
\ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\
acc\": 0.7974683544303798,\n \"acc_stderr\": 0.026160568246601443,\n \
\ \"acc_norm\": 0.7974683544303798,\n \"acc_norm_stderr\": 0.026160568246601443\n\
\ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6816143497757847,\n\
\ \"acc_stderr\": 0.03126580522513713,\n \"acc_norm\": 0.6816143497757847,\n\
\ \"acc_norm_stderr\": 0.03126580522513713\n },\n \"harness|hendrycksTest-human_sexuality|5\"\
: {\n \"acc\": 0.7557251908396947,\n \"acc_stderr\": 0.03768335959728742,\n\
\ \"acc_norm\": 0.7557251908396947,\n \"acc_norm_stderr\": 0.03768335959728742\n\
\ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\
\ 0.768595041322314,\n \"acc_stderr\": 0.03849856098794087,\n \"acc_norm\"\
: 0.768595041322314,\n \"acc_norm_stderr\": 0.03849856098794087\n },\n\
\ \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7685185185185185,\n\
\ \"acc_stderr\": 0.04077494709252627,\n \"acc_norm\": 0.7685185185185185,\n\
\ \"acc_norm_stderr\": 0.04077494709252627\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\
: {\n \"acc\": 0.7361963190184049,\n \"acc_stderr\": 0.034624199316156234,\n\
\ \"acc_norm\": 0.7361963190184049,\n \"acc_norm_stderr\": 0.034624199316156234\n\
\ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.45535714285714285,\n\
\ \"acc_stderr\": 0.04726835553719099,\n \"acc_norm\": 0.45535714285714285,\n\
\ \"acc_norm_stderr\": 0.04726835553719099\n },\n \"harness|hendrycksTest-management|5\"\
: {\n \"acc\": 0.7766990291262136,\n \"acc_stderr\": 0.04123553189891431,\n\
\ \"acc_norm\": 0.7766990291262136,\n \"acc_norm_stderr\": 0.04123553189891431\n\
\ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8760683760683761,\n\
\ \"acc_stderr\": 0.02158649400128136,\n \"acc_norm\": 0.8760683760683761,\n\
\ \"acc_norm_stderr\": 0.02158649400128136\n },\n \"harness|hendrycksTest-medical_genetics|5\"\
: {\n \"acc\": 0.71,\n \"acc_stderr\": 0.045604802157206845,\n \
\ \"acc_norm\": 0.71,\n \"acc_norm_stderr\": 0.045604802157206845\n \
\ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.80970625798212,\n\
\ \"acc_stderr\": 0.014036945850381387,\n \"acc_norm\": 0.80970625798212,\n\
\ \"acc_norm_stderr\": 0.014036945850381387\n },\n \"harness|hendrycksTest-moral_disputes|5\"\
: {\n \"acc\": 0.708092485549133,\n \"acc_stderr\": 0.024476994076247326,\n\
\ \"acc_norm\": 0.708092485549133,\n \"acc_norm_stderr\": 0.024476994076247326\n\
\ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.3486033519553073,\n\
\ \"acc_stderr\": 0.01593748465668703,\n \"acc_norm\": 0.3486033519553073,\n\
\ \"acc_norm_stderr\": 0.01593748465668703\n },\n \"harness|hendrycksTest-nutrition|5\"\
: {\n \"acc\": 0.6797385620915033,\n \"acc_stderr\": 0.026716118380156847,\n\
\ \"acc_norm\": 0.6797385620915033,\n \"acc_norm_stderr\": 0.026716118380156847\n\
\ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7041800643086816,\n\
\ \"acc_stderr\": 0.025922371788818756,\n \"acc_norm\": 0.7041800643086816,\n\
\ \"acc_norm_stderr\": 0.025922371788818756\n },\n \"harness|hendrycksTest-prehistory|5\"\
: {\n \"acc\": 0.7037037037037037,\n \"acc_stderr\": 0.02540719779889016,\n\
\ \"acc_norm\": 0.7037037037037037,\n \"acc_norm_stderr\": 0.02540719779889016\n\
\ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\
acc\": 0.5,\n \"acc_stderr\": 0.029827499313594685,\n \"acc_norm\"\
: 0.5,\n \"acc_norm_stderr\": 0.029827499313594685\n },\n \"harness|hendrycksTest-professional_law|5\"\
: {\n \"acc\": 0.4556714471968709,\n \"acc_stderr\": 0.012719949543032205,\n\
\ \"acc_norm\": 0.4556714471968709,\n \"acc_norm_stderr\": 0.012719949543032205\n\
\ },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\"\
: 0.6360294117647058,\n \"acc_stderr\": 0.02922719246003203,\n \"\
acc_norm\": 0.6360294117647058,\n \"acc_norm_stderr\": 0.02922719246003203\n\
\ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\
acc\": 0.6519607843137255,\n \"acc_stderr\": 0.019270998708223977,\n \
\ \"acc_norm\": 0.6519607843137255,\n \"acc_norm_stderr\": 0.019270998708223977\n\
\ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6818181818181818,\n\
\ \"acc_stderr\": 0.044612721759105085,\n \"acc_norm\": 0.6818181818181818,\n\
\ \"acc_norm_stderr\": 0.044612721759105085\n },\n \"harness|hendrycksTest-security_studies|5\"\
: {\n \"acc\": 0.7061224489795919,\n \"acc_stderr\": 0.02916273841024977,\n\
\ \"acc_norm\": 0.7061224489795919,\n \"acc_norm_stderr\": 0.02916273841024977\n\
\ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.835820895522388,\n\
\ \"acc_stderr\": 0.02619392354445412,\n \"acc_norm\": 0.835820895522388,\n\
\ \"acc_norm_stderr\": 0.02619392354445412\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\
: {\n \"acc\": 0.84,\n \"acc_stderr\": 0.03684529491774709,\n \
\ \"acc_norm\": 0.84,\n \"acc_norm_stderr\": 0.03684529491774709\n \
\ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5421686746987951,\n\
\ \"acc_stderr\": 0.0387862677100236,\n \"acc_norm\": 0.5421686746987951,\n\
\ \"acc_norm_stderr\": 0.0387862677100236\n },\n \"harness|hendrycksTest-world_religions|5\"\
: {\n \"acc\": 0.8070175438596491,\n \"acc_stderr\": 0.030267457554898458,\n\
\ \"acc_norm\": 0.8070175438596491,\n \"acc_norm_stderr\": 0.030267457554898458\n\
\ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.5128518971848225,\n\
\ \"mc1_stderr\": 0.01749771794429982,\n \"mc2\": 0.6785725906165029,\n\
\ \"mc2_stderr\": 0.014784490269410245\n },\n \"harness|winogrande|5\"\
: {\n \"acc\": 0.8224151539068666,\n \"acc_stderr\": 0.010740676861359238\n\
\ },\n \"harness|gsm8k|5\": {\n \"acc\": 0.6194086429112965,\n \
\ \"acc_stderr\": 0.01337397127772981\n }\n}\n```"
repo_url: https://huggingface.co/Gille/StrangeMerges_47-7B-dare_ties
leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard
point_of_contact: clementine@hf.co
configs:
- config_name: harness_arc_challenge_25
data_files:
- split: 2024_04_02T20_30_41.647453
path:
- '**/details_harness|arc:challenge|25_2024-04-02T20-30-41.647453.parquet'
- split: latest
path:
- '**/details_harness|arc:challenge|25_2024-04-02T20-30-41.647453.parquet'
- config_name: harness_gsm8k_5
data_files:
- split: 2024_04_02T20_30_41.647453
path:
- '**/details_harness|gsm8k|5_2024-04-02T20-30-41.647453.parquet'
- split: latest
path:
- '**/details_harness|gsm8k|5_2024-04-02T20-30-41.647453.parquet'
- config_name: harness_hellaswag_10
data_files:
- split: 2024_04_02T20_30_41.647453
path:
- '**/details_harness|hellaswag|10_2024-04-02T20-30-41.647453.parquet'
- split: latest
path:
- '**/details_harness|hellaswag|10_2024-04-02T20-30-41.647453.parquet'
- config_name: harness_hendrycksTest_5
data_files:
- split: 2024_04_02T20_30_41.647453
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-management|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-virology|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2024-04-02T20-30-41.647453.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-management|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-virology|5_2024-04-02T20-30-41.647453.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2024-04-02T20-30-41.647453.parquet'
- config_name: harness_hendrycksTest_abstract_algebra_5
data_files:
- split: 2024_04_02T20_30_41.647453
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-04-02T20-30-41.647453.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-04-02T20-30-41.647453.parquet'
- config_name: harness_hendrycksTest_anatomy_5
data_files:
- split: 2024_04_02T20_30_41.647453
path:
- '**/details_harness|hendrycksTest-anatomy|5_2024-04-02T20-30-41.647453.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-anatomy|5_2024-04-02T20-30-41.647453.parquet'
- config_name: harness_hendrycksTest_astronomy_5
data_files:
- split: 2024_04_02T20_30_41.647453
path:
- '**/details_harness|hendrycksTest-astronomy|5_2024-04-02T20-30-41.647453.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-astronomy|5_2024-04-02T20-30-41.647453.parquet'
- config_name: harness_hendrycksTest_business_ethics_5
data_files:
- split: 2024_04_02T20_30_41.647453
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2024-04-02T20-30-41.647453.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2024-04-02T20-30-41.647453.parquet'
- config_name: harness_hendrycksTest_clinical_knowledge_5
data_files:
- split: 2024_04_02T20_30_41.647453
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-04-02T20-30-41.647453.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-04-02T20-30-41.647453.parquet'
- config_name: harness_hendrycksTest_college_biology_5
data_files:
- split: 2024_04_02T20_30_41.647453
path:
- '**/details_harness|hendrycksTest-college_biology|5_2024-04-02T20-30-41.647453.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_biology|5_2024-04-02T20-30-41.647453.parquet'
- config_name: harness_hendrycksTest_college_chemistry_5
data_files:
- split: 2024_04_02T20_30_41.647453
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-04-02T20-30-41.647453.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-04-02T20-30-41.647453.parquet'
- config_name: harness_hendrycksTest_college_computer_science_5
data_files:
- split: 2024_04_02T20_30_41.647453
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-04-02T20-30-41.647453.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-04-02T20-30-41.647453.parquet'
- config_name: harness_hendrycksTest_college_mathematics_5
data_files:
- split: 2024_04_02T20_30_41.647453
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-04-02T20-30-41.647453.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-04-02T20-30-41.647453.parquet'
- config_name: harness_hendrycksTest_college_medicine_5
data_files:
- split: 2024_04_02T20_30_41.647453
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2024-04-02T20-30-41.647453.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2024-04-02T20-30-41.647453.parquet'
- config_name: harness_hendrycksTest_college_physics_5
data_files:
- split: 2024_04_02T20_30_41.647453
path:
- '**/details_harness|hendrycksTest-college_physics|5_2024-04-02T20-30-41.647453.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_physics|5_2024-04-02T20-30-41.647453.parquet'
- config_name: harness_hendrycksTest_computer_security_5
data_files:
- split: 2024_04_02T20_30_41.647453
path:
- '**/details_harness|hendrycksTest-computer_security|5_2024-04-02T20-30-41.647453.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-computer_security|5_2024-04-02T20-30-41.647453.parquet'
- config_name: harness_hendrycksTest_conceptual_physics_5
data_files:
- split: 2024_04_02T20_30_41.647453
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-04-02T20-30-41.647453.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-04-02T20-30-41.647453.parquet'
- config_name: harness_hendrycksTest_econometrics_5
data_files:
- split: 2024_04_02T20_30_41.647453
path:
- '**/details_harness|hendrycksTest-econometrics|5_2024-04-02T20-30-41.647453.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-econometrics|5_2024-04-02T20-30-41.647453.parquet'
- config_name: harness_hendrycksTest_electrical_engineering_5
data_files:
- split: 2024_04_02T20_30_41.647453
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-04-02T20-30-41.647453.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-04-02T20-30-41.647453.parquet'
- config_name: harness_hendrycksTest_elementary_mathematics_5
data_files:
- split: 2024_04_02T20_30_41.647453
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-04-02T20-30-41.647453.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-04-02T20-30-41.647453.parquet'
- config_name: harness_hendrycksTest_formal_logic_5
data_files:
- split: 2024_04_02T20_30_41.647453
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2024-04-02T20-30-41.647453.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2024-04-02T20-30-41.647453.parquet'
- config_name: harness_hendrycksTest_global_facts_5
data_files:
- split: 2024_04_02T20_30_41.647453
path:
- '**/details_harness|hendrycksTest-global_facts|5_2024-04-02T20-30-41.647453.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-global_facts|5_2024-04-02T20-30-41.647453.parquet'
- config_name: harness_hendrycksTest_high_school_biology_5
data_files:
- split: 2024_04_02T20_30_41.647453
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-04-02T20-30-41.647453.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-04-02T20-30-41.647453.parquet'
- config_name: harness_hendrycksTest_high_school_chemistry_5
data_files:
- split: 2024_04_02T20_30_41.647453
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-04-02T20-30-41.647453.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-04-02T20-30-41.647453.parquet'
- config_name: harness_hendrycksTest_high_school_computer_science_5
data_files:
- split: 2024_04_02T20_30_41.647453
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-04-02T20-30-41.647453.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-04-02T20-30-41.647453.parquet'
- config_name: harness_hendrycksTest_high_school_european_history_5
data_files:
- split: 2024_04_02T20_30_41.647453
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-04-02T20-30-41.647453.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-04-02T20-30-41.647453.parquet'
- config_name: harness_hendrycksTest_high_school_geography_5
data_files:
- split: 2024_04_02T20_30_41.647453
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-04-02T20-30-41.647453.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-04-02T20-30-41.647453.parquet'
- config_name: harness_hendrycksTest_high_school_government_and_politics_5
data_files:
- split: 2024_04_02T20_30_41.647453
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-04-02T20-30-41.647453.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-04-02T20-30-41.647453.parquet'
- config_name: harness_hendrycksTest_high_school_macroeconomics_5
data_files:
- split: 2024_04_02T20_30_41.647453
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-04-02T20-30-41.647453.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-04-02T20-30-41.647453.parquet'
- config_name: harness_hendrycksTest_high_school_mathematics_5
data_files:
- split: 2024_04_02T20_30_41.647453
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-04-02T20-30-41.647453.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-04-02T20-30-41.647453.parquet'
- config_name: harness_hendrycksTest_high_school_microeconomics_5
data_files:
- split: 2024_04_02T20_30_41.647453
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-04-02T20-30-41.647453.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-04-02T20-30-41.647453.parquet'
- config_name: harness_hendrycksTest_high_school_physics_5
data_files:
- split: 2024_04_02T20_30_41.647453
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-04-02T20-30-41.647453.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-04-02T20-30-41.647453.parquet'
- config_name: harness_hendrycksTest_high_school_psychology_5
data_files:
- split: 2024_04_02T20_30_41.647453
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-04-02T20-30-41.647453.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-04-02T20-30-41.647453.parquet'
- config_name: harness_hendrycksTest_high_school_statistics_5
data_files:
- split: 2024_04_02T20_30_41.647453
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-04-02T20-30-41.647453.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-04-02T20-30-41.647453.parquet'
- config_name: harness_hendrycksTest_high_school_us_history_5
data_files:
- split: 2024_04_02T20_30_41.647453
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-04-02T20-30-41.647453.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-04-02T20-30-41.647453.parquet'
- config_name: harness_hendrycksTest_high_school_world_history_5
data_files:
- split: 2024_04_02T20_30_41.647453
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-04-02T20-30-41.647453.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-04-02T20-30-41.647453.parquet'
- config_name: harness_hendrycksTest_human_aging_5
data_files:
- split: 2024_04_02T20_30_41.647453
path:
- '**/details_harness|hendrycksTest-human_aging|5_2024-04-02T20-30-41.647453.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_aging|5_2024-04-02T20-30-41.647453.parquet'
- config_name: harness_hendrycksTest_human_sexuality_5
data_files:
- split: 2024_04_02T20_30_41.647453
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-04-02T20-30-41.647453.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-04-02T20-30-41.647453.parquet'
- config_name: harness_hendrycksTest_international_law_5
data_files:
- split: 2024_04_02T20_30_41.647453
path:
- '**/details_harness|hendrycksTest-international_law|5_2024-04-02T20-30-41.647453.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-international_law|5_2024-04-02T20-30-41.647453.parquet'
- config_name: harness_hendrycksTest_jurisprudence_5
data_files:
- split: 2024_04_02T20_30_41.647453
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-04-02T20-30-41.647453.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-04-02T20-30-41.647453.parquet'
- config_name: harness_hendrycksTest_logical_fallacies_5
data_files:
- split: 2024_04_02T20_30_41.647453
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-04-02T20-30-41.647453.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-04-02T20-30-41.647453.parquet'
- config_name: harness_hendrycksTest_machine_learning_5
data_files:
- split: 2024_04_02T20_30_41.647453
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2024-04-02T20-30-41.647453.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2024-04-02T20-30-41.647453.parquet'
- config_name: harness_hendrycksTest_management_5
data_files:
- split: 2024_04_02T20_30_41.647453
path:
- '**/details_harness|hendrycksTest-management|5_2024-04-02T20-30-41.647453.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-management|5_2024-04-02T20-30-41.647453.parquet'
- config_name: harness_hendrycksTest_marketing_5
data_files:
- split: 2024_04_02T20_30_41.647453
path:
- '**/details_harness|hendrycksTest-marketing|5_2024-04-02T20-30-41.647453.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-marketing|5_2024-04-02T20-30-41.647453.parquet'
- config_name: harness_hendrycksTest_medical_genetics_5
data_files:
- split: 2024_04_02T20_30_41.647453
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-04-02T20-30-41.647453.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-04-02T20-30-41.647453.parquet'
- config_name: harness_hendrycksTest_miscellaneous_5
data_files:
- split: 2024_04_02T20_30_41.647453
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-04-02T20-30-41.647453.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-04-02T20-30-41.647453.parquet'
- config_name: harness_hendrycksTest_moral_disputes_5
data_files:
- split: 2024_04_02T20_30_41.647453
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-04-02T20-30-41.647453.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-04-02T20-30-41.647453.parquet'
- config_name: harness_hendrycksTest_moral_scenarios_5
data_files:
- split: 2024_04_02T20_30_41.647453
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-04-02T20-30-41.647453.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-04-02T20-30-41.647453.parquet'
- config_name: harness_hendrycksTest_nutrition_5
data_files:
- split: 2024_04_02T20_30_41.647453
path:
- '**/details_harness|hendrycksTest-nutrition|5_2024-04-02T20-30-41.647453.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-nutrition|5_2024-04-02T20-30-41.647453.parquet'
- config_name: harness_hendrycksTest_philosophy_5
data_files:
- split: 2024_04_02T20_30_41.647453
path:
- '**/details_harness|hendrycksTest-philosophy|5_2024-04-02T20-30-41.647453.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-philosophy|5_2024-04-02T20-30-41.647453.parquet'
- config_name: harness_hendrycksTest_prehistory_5
data_files:
- split: 2024_04_02T20_30_41.647453
path:
- '**/details_harness|hendrycksTest-prehistory|5_2024-04-02T20-30-41.647453.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-prehistory|5_2024-04-02T20-30-41.647453.parquet'
- config_name: harness_hendrycksTest_professional_accounting_5
data_files:
- split: 2024_04_02T20_30_41.647453
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-04-02T20-30-41.647453.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-04-02T20-30-41.647453.parquet'
- config_name: harness_hendrycksTest_professional_law_5
data_files:
- split: 2024_04_02T20_30_41.647453
path:
- '**/details_harness|hendrycksTest-professional_law|5_2024-04-02T20-30-41.647453.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_law|5_2024-04-02T20-30-41.647453.parquet'
- config_name: harness_hendrycksTest_professional_medicine_5
data_files:
- split: 2024_04_02T20_30_41.647453
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-04-02T20-30-41.647453.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-04-02T20-30-41.647453.parquet'
- config_name: harness_hendrycksTest_professional_psychology_5
data_files:
- split: 2024_04_02T20_30_41.647453
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-04-02T20-30-41.647453.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-04-02T20-30-41.647453.parquet'
- config_name: harness_hendrycksTest_public_relations_5
data_files:
- split: 2024_04_02T20_30_41.647453
path:
- '**/details_harness|hendrycksTest-public_relations|5_2024-04-02T20-30-41.647453.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-public_relations|5_2024-04-02T20-30-41.647453.parquet'
- config_name: harness_hendrycksTest_security_studies_5
data_files:
- split: 2024_04_02T20_30_41.647453
path:
- '**/details_harness|hendrycksTest-security_studies|5_2024-04-02T20-30-41.647453.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-security_studies|5_2024-04-02T20-30-41.647453.parquet'
- config_name: harness_hendrycksTest_sociology_5
data_files:
- split: 2024_04_02T20_30_41.647453
path:
- '**/details_harness|hendrycksTest-sociology|5_2024-04-02T20-30-41.647453.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-sociology|5_2024-04-02T20-30-41.647453.parquet'
- config_name: harness_hendrycksTest_us_foreign_policy_5
data_files:
- split: 2024_04_02T20_30_41.647453
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-04-02T20-30-41.647453.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-04-02T20-30-41.647453.parquet'
- config_name: harness_hendrycksTest_virology_5
data_files:
- split: 2024_04_02T20_30_41.647453
path:
- '**/details_harness|hendrycksTest-virology|5_2024-04-02T20-30-41.647453.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-virology|5_2024-04-02T20-30-41.647453.parquet'
- config_name: harness_hendrycksTest_world_religions_5
data_files:
- split: 2024_04_02T20_30_41.647453
path:
- '**/details_harness|hendrycksTest-world_religions|5_2024-04-02T20-30-41.647453.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-world_religions|5_2024-04-02T20-30-41.647453.parquet'
- config_name: harness_truthfulqa_mc_0
data_files:
- split: 2024_04_02T20_30_41.647453
path:
- '**/details_harness|truthfulqa:mc|0_2024-04-02T20-30-41.647453.parquet'
- split: latest
path:
- '**/details_harness|truthfulqa:mc|0_2024-04-02T20-30-41.647453.parquet'
- config_name: harness_winogrande_5
data_files:
- split: 2024_04_02T20_30_41.647453
path:
- '**/details_harness|winogrande|5_2024-04-02T20-30-41.647453.parquet'
- split: latest
path:
- '**/details_harness|winogrande|5_2024-04-02T20-30-41.647453.parquet'
- config_name: results
data_files:
- split: 2024_04_02T20_30_41.647453
path:
- results_2024-04-02T20-30-41.647453.parquet
- split: latest
path:
- results_2024-04-02T20-30-41.647453.parquet
---
# Dataset Card for Evaluation run of Gille/StrangeMerges_47-7B-dare_ties
<!-- Provide a quick summary of the dataset. -->
Dataset automatically created during the evaluation run of model [Gille/StrangeMerges_47-7B-dare_ties](https://huggingface.co/Gille/StrangeMerges_47-7B-dare_ties) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).
The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset("open-llm-leaderboard/details_Gille__StrangeMerges_47-7B-dare_ties",
"harness_winogrande_5",
split="train")
```
## Latest results
These are the [latest results from run 2024-04-02T20:30:41.647453](https://huggingface.co/datasets/open-llm-leaderboard/details_Gille__StrangeMerges_47-7B-dare_ties/blob/main/results_2024-04-02T20-30-41.647453.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"acc": 0.6367859829101828,
"acc_stderr": 0.032488134334004146,
"acc_norm": 0.6377335973539087,
"acc_norm_stderr": 0.03315290807892043,
"mc1": 0.5128518971848225,
"mc1_stderr": 0.01749771794429982,
"mc2": 0.6785725906165029,
"mc2_stderr": 0.014784490269410245
},
"harness|arc:challenge|25": {
"acc": 0.6629692832764505,
"acc_stderr": 0.013813476652902272,
"acc_norm": 0.6945392491467577,
"acc_norm_stderr": 0.013460080478002508
},
"harness|hellaswag|10": {
"acc": 0.6742680740888269,
"acc_stderr": 0.0046768988619789115,
"acc_norm": 0.8668591913961362,
"acc_norm_stderr": 0.0033903254580202576
},
"harness|hendrycksTest-abstract_algebra|5": {
"acc": 0.35,
"acc_stderr": 0.04793724854411021,
"acc_norm": 0.35,
"acc_norm_stderr": 0.04793724854411021
},
"harness|hendrycksTest-anatomy|5": {
"acc": 0.6444444444444445,
"acc_stderr": 0.04135176749720385,
"acc_norm": 0.6444444444444445,
"acc_norm_stderr": 0.04135176749720385
},
"harness|hendrycksTest-astronomy|5": {
"acc": 0.7039473684210527,
"acc_stderr": 0.03715062154998904,
"acc_norm": 0.7039473684210527,
"acc_norm_stderr": 0.03715062154998904
},
"harness|hendrycksTest-business_ethics|5": {
"acc": 0.58,
"acc_stderr": 0.049604496374885836,
"acc_norm": 0.58,
"acc_norm_stderr": 0.049604496374885836
},
"harness|hendrycksTest-clinical_knowledge|5": {
"acc": 0.690566037735849,
"acc_stderr": 0.028450154794118637,
"acc_norm": 0.690566037735849,
"acc_norm_stderr": 0.028450154794118637
},
"harness|hendrycksTest-college_biology|5": {
"acc": 0.6805555555555556,
"acc_stderr": 0.038990736873573344,
"acc_norm": 0.6805555555555556,
"acc_norm_stderr": 0.038990736873573344
},
"harness|hendrycksTest-college_chemistry|5": {
"acc": 0.44,
"acc_stderr": 0.049888765156985884,
"acc_norm": 0.44,
"acc_norm_stderr": 0.049888765156985884
},
"harness|hendrycksTest-college_computer_science|5": {
"acc": 0.57,
"acc_stderr": 0.04975698519562428,
"acc_norm": 0.57,
"acc_norm_stderr": 0.04975698519562428
},
"harness|hendrycksTest-college_mathematics|5": {
"acc": 0.26,
"acc_stderr": 0.04408440022768079,
"acc_norm": 0.26,
"acc_norm_stderr": 0.04408440022768079
},
"harness|hendrycksTest-college_medicine|5": {
"acc": 0.6069364161849711,
"acc_stderr": 0.037242495958177295,
"acc_norm": 0.6069364161849711,
"acc_norm_stderr": 0.037242495958177295
},
"harness|hendrycksTest-college_physics|5": {
"acc": 0.4117647058823529,
"acc_stderr": 0.048971049527263666,
"acc_norm": 0.4117647058823529,
"acc_norm_stderr": 0.048971049527263666
},
"harness|hendrycksTest-computer_security|5": {
"acc": 0.76,
"acc_stderr": 0.04292346959909284,
"acc_norm": 0.76,
"acc_norm_stderr": 0.04292346959909284
},
"harness|hendrycksTest-conceptual_physics|5": {
"acc": 0.5702127659574469,
"acc_stderr": 0.03236214467715564,
"acc_norm": 0.5702127659574469,
"acc_norm_stderr": 0.03236214467715564
},
"harness|hendrycksTest-econometrics|5": {
"acc": 0.43859649122807015,
"acc_stderr": 0.04668000738510455,
"acc_norm": 0.43859649122807015,
"acc_norm_stderr": 0.04668000738510455
},
"harness|hendrycksTest-electrical_engineering|5": {
"acc": 0.5517241379310345,
"acc_stderr": 0.04144311810878152,
"acc_norm": 0.5517241379310345,
"acc_norm_stderr": 0.04144311810878152
},
"harness|hendrycksTest-elementary_mathematics|5": {
"acc": 0.42592592592592593,
"acc_stderr": 0.02546714904546955,
"acc_norm": 0.42592592592592593,
"acc_norm_stderr": 0.02546714904546955
},
"harness|hendrycksTest-formal_logic|5": {
"acc": 0.4365079365079365,
"acc_stderr": 0.04435932892851466,
"acc_norm": 0.4365079365079365,
"acc_norm_stderr": 0.04435932892851466
},
"harness|hendrycksTest-global_facts|5": {
"acc": 0.35,
"acc_stderr": 0.047937248544110196,
"acc_norm": 0.35,
"acc_norm_stderr": 0.047937248544110196
},
"harness|hendrycksTest-high_school_biology|5": {
"acc": 0.7645161290322581,
"acc_stderr": 0.02413763242933771,
"acc_norm": 0.7645161290322581,
"acc_norm_stderr": 0.02413763242933771
},
"harness|hendrycksTest-high_school_chemistry|5": {
"acc": 0.5369458128078818,
"acc_stderr": 0.035083705204426656,
"acc_norm": 0.5369458128078818,
"acc_norm_stderr": 0.035083705204426656
},
"harness|hendrycksTest-high_school_computer_science|5": {
"acc": 0.73,
"acc_stderr": 0.044619604333847394,
"acc_norm": 0.73,
"acc_norm_stderr": 0.044619604333847394
},
"harness|hendrycksTest-high_school_european_history|5": {
"acc": 0.7696969696969697,
"acc_stderr": 0.0328766675860349,
"acc_norm": 0.7696969696969697,
"acc_norm_stderr": 0.0328766675860349
},
"harness|hendrycksTest-high_school_geography|5": {
"acc": 0.7929292929292929,
"acc_stderr": 0.02886977846026705,
"acc_norm": 0.7929292929292929,
"acc_norm_stderr": 0.02886977846026705
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"acc": 0.8963730569948186,
"acc_stderr": 0.02199531196364424,
"acc_norm": 0.8963730569948186,
"acc_norm_stderr": 0.02199531196364424
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"acc": 0.6538461538461539,
"acc_stderr": 0.02412112541694119,
"acc_norm": 0.6538461538461539,
"acc_norm_stderr": 0.02412112541694119
},
"harness|hendrycksTest-high_school_mathematics|5": {
"acc": 0.3814814814814815,
"acc_stderr": 0.029616718927497586,
"acc_norm": 0.3814814814814815,
"acc_norm_stderr": 0.029616718927497586
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"acc": 0.6008403361344538,
"acc_stderr": 0.03181110032413925,
"acc_norm": 0.6008403361344538,
"acc_norm_stderr": 0.03181110032413925
},
"harness|hendrycksTest-high_school_physics|5": {
"acc": 0.3443708609271523,
"acc_stderr": 0.038796870240733264,
"acc_norm": 0.3443708609271523,
"acc_norm_stderr": 0.038796870240733264
},
"harness|hendrycksTest-high_school_psychology|5": {
"acc": 0.8366972477064221,
"acc_stderr": 0.01584825580650155,
"acc_norm": 0.8366972477064221,
"acc_norm_stderr": 0.01584825580650155
},
"harness|hendrycksTest-high_school_statistics|5": {
"acc": 0.5648148148148148,
"acc_stderr": 0.03381200005643526,
"acc_norm": 0.5648148148148148,
"acc_norm_stderr": 0.03381200005643526
},
"harness|hendrycksTest-high_school_us_history|5": {
"acc": 0.7843137254901961,
"acc_stderr": 0.028867431449849316,
"acc_norm": 0.7843137254901961,
"acc_norm_stderr": 0.028867431449849316
},
"harness|hendrycksTest-high_school_world_history|5": {
"acc": 0.7974683544303798,
"acc_stderr": 0.026160568246601443,
"acc_norm": 0.7974683544303798,
"acc_norm_stderr": 0.026160568246601443
},
"harness|hendrycksTest-human_aging|5": {
"acc": 0.6816143497757847,
"acc_stderr": 0.03126580522513713,
"acc_norm": 0.6816143497757847,
"acc_norm_stderr": 0.03126580522513713
},
"harness|hendrycksTest-human_sexuality|5": {
"acc": 0.7557251908396947,
"acc_stderr": 0.03768335959728742,
"acc_norm": 0.7557251908396947,
"acc_norm_stderr": 0.03768335959728742
},
"harness|hendrycksTest-international_law|5": {
"acc": 0.768595041322314,
"acc_stderr": 0.03849856098794087,
"acc_norm": 0.768595041322314,
"acc_norm_stderr": 0.03849856098794087
},
"harness|hendrycksTest-jurisprudence|5": {
"acc": 0.7685185185185185,
"acc_stderr": 0.04077494709252627,
"acc_norm": 0.7685185185185185,
"acc_norm_stderr": 0.04077494709252627
},
"harness|hendrycksTest-logical_fallacies|5": {
"acc": 0.7361963190184049,
"acc_stderr": 0.034624199316156234,
"acc_norm": 0.7361963190184049,
"acc_norm_stderr": 0.034624199316156234
},
"harness|hendrycksTest-machine_learning|5": {
"acc": 0.45535714285714285,
"acc_stderr": 0.04726835553719099,
"acc_norm": 0.45535714285714285,
"acc_norm_stderr": 0.04726835553719099
},
"harness|hendrycksTest-management|5": {
"acc": 0.7766990291262136,
"acc_stderr": 0.04123553189891431,
"acc_norm": 0.7766990291262136,
"acc_norm_stderr": 0.04123553189891431
},
"harness|hendrycksTest-marketing|5": {
"acc": 0.8760683760683761,
"acc_stderr": 0.02158649400128136,
"acc_norm": 0.8760683760683761,
"acc_norm_stderr": 0.02158649400128136
},
"harness|hendrycksTest-medical_genetics|5": {
"acc": 0.71,
"acc_stderr": 0.045604802157206845,
"acc_norm": 0.71,
"acc_norm_stderr": 0.045604802157206845
},
"harness|hendrycksTest-miscellaneous|5": {
"acc": 0.80970625798212,
"acc_stderr": 0.014036945850381387,
"acc_norm": 0.80970625798212,
"acc_norm_stderr": 0.014036945850381387
},
"harness|hendrycksTest-moral_disputes|5": {
"acc": 0.708092485549133,
"acc_stderr": 0.024476994076247326,
"acc_norm": 0.708092485549133,
"acc_norm_stderr": 0.024476994076247326
},
"harness|hendrycksTest-moral_scenarios|5": {
"acc": 0.3486033519553073,
"acc_stderr": 0.01593748465668703,
"acc_norm": 0.3486033519553073,
"acc_norm_stderr": 0.01593748465668703
},
"harness|hendrycksTest-nutrition|5": {
"acc": 0.6797385620915033,
"acc_stderr": 0.026716118380156847,
"acc_norm": 0.6797385620915033,
"acc_norm_stderr": 0.026716118380156847
},
"harness|hendrycksTest-philosophy|5": {
"acc": 0.7041800643086816,
"acc_stderr": 0.025922371788818756,
"acc_norm": 0.7041800643086816,
"acc_norm_stderr": 0.025922371788818756
},
"harness|hendrycksTest-prehistory|5": {
"acc": 0.7037037037037037,
"acc_stderr": 0.02540719779889016,
"acc_norm": 0.7037037037037037,
"acc_norm_stderr": 0.02540719779889016
},
"harness|hendrycksTest-professional_accounting|5": {
"acc": 0.5,
"acc_stderr": 0.029827499313594685,
"acc_norm": 0.5,
"acc_norm_stderr": 0.029827499313594685
},
"harness|hendrycksTest-professional_law|5": {
"acc": 0.4556714471968709,
"acc_stderr": 0.012719949543032205,
"acc_norm": 0.4556714471968709,
"acc_norm_stderr": 0.012719949543032205
},
"harness|hendrycksTest-professional_medicine|5": {
"acc": 0.6360294117647058,
"acc_stderr": 0.02922719246003203,
"acc_norm": 0.6360294117647058,
"acc_norm_stderr": 0.02922719246003203
},
"harness|hendrycksTest-professional_psychology|5": {
"acc": 0.6519607843137255,
"acc_stderr": 0.019270998708223977,
"acc_norm": 0.6519607843137255,
"acc_norm_stderr": 0.019270998708223977
},
"harness|hendrycksTest-public_relations|5": {
"acc": 0.6818181818181818,
"acc_stderr": 0.044612721759105085,
"acc_norm": 0.6818181818181818,
"acc_norm_stderr": 0.044612721759105085
},
"harness|hendrycksTest-security_studies|5": {
"acc": 0.7061224489795919,
"acc_stderr": 0.02916273841024977,
"acc_norm": 0.7061224489795919,
"acc_norm_stderr": 0.02916273841024977
},
"harness|hendrycksTest-sociology|5": {
"acc": 0.835820895522388,
"acc_stderr": 0.02619392354445412,
"acc_norm": 0.835820895522388,
"acc_norm_stderr": 0.02619392354445412
},
"harness|hendrycksTest-us_foreign_policy|5": {
"acc": 0.84,
"acc_stderr": 0.03684529491774709,
"acc_norm": 0.84,
"acc_norm_stderr": 0.03684529491774709
},
"harness|hendrycksTest-virology|5": {
"acc": 0.5421686746987951,
"acc_stderr": 0.0387862677100236,
"acc_norm": 0.5421686746987951,
"acc_norm_stderr": 0.0387862677100236
},
"harness|hendrycksTest-world_religions|5": {
"acc": 0.8070175438596491,
"acc_stderr": 0.030267457554898458,
"acc_norm": 0.8070175438596491,
"acc_norm_stderr": 0.030267457554898458
},
"harness|truthfulqa:mc|0": {
"mc1": 0.5128518971848225,
"mc1_stderr": 0.01749771794429982,
"mc2": 0.6785725906165029,
"mc2_stderr": 0.014784490269410245
},
"harness|winogrande|5": {
"acc": 0.8224151539068666,
"acc_stderr": 0.010740676861359238
},
"harness|gsm8k|5": {
"acc": 0.6194086429112965,
"acc_stderr": 0.01337397127772981
}
}
```
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] |
WeijianQi/lama_trex | ---
dataset_info:
features:
- name: statement
dtype: string
- name: label
dtype: int64
splits:
- name: train
num_bytes: 1830465
num_examples: 34017
download_size: 715119
dataset_size: 1830465
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
neyruto10/modelonatan22 | ---
license: apache-2.0
---
|
kinit-tomassako/ver_claimdetection_demo | ---
configs:
- config_name: default
data_files:
- split: train
path: data.csv
---
# Dataset Card for Dataset Name
<!-- Provide a quick summary of the dataset. -->
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] |
CyberHarem/kazuno_sarah_lovelivesunshine | ---
license: mit
task_categories:
- text-to-image
tags:
- art
- not-for-all-audiences
size_categories:
- n<1K
---
# Dataset of kazuno_sarah/้นฟ่ง่่ฏ (Love Live! Sunshine!!)
This is the dataset of kazuno_sarah/้นฟ่ง่่ฏ (Love Live! Sunshine!!), containing 341 images and their tags.
The core tags of this character are `bangs, purple_hair, breasts, purple_eyes, sidelocks, side_ponytail, long_hair, large_breasts`, which are pruned in this dataset.
Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by [DeepGHS Team](https://github.com/deepghs)([huggingface organization](https://huggingface.co/deepghs)).
## List of Packages
| Name | Images | Size | Download | Type | Description |
|:-----------------|---------:|:-----------|:-------------------------------------------------------------------------------------------------------------------------------|:-----------|:---------------------------------------------------------------------|
| raw | 341 | 451.45 MiB | [Download](https://huggingface.co/datasets/CyberHarem/kazuno_sarah_lovelivesunshine/resolve/main/dataset-raw.zip) | Waifuc-Raw | Raw data with meta information (min edge aligned to 1400 if larger). |
| 800 | 341 | 248.36 MiB | [Download](https://huggingface.co/datasets/CyberHarem/kazuno_sarah_lovelivesunshine/resolve/main/dataset-800.zip) | IMG+TXT | dataset with the shorter side not exceeding 800 pixels. |
| stage3-p480-800 | 824 | 543.03 MiB | [Download](https://huggingface.co/datasets/CyberHarem/kazuno_sarah_lovelivesunshine/resolve/main/dataset-stage3-p480-800.zip) | IMG+TXT | 3-stage cropped dataset with the area not less than 480x480 pixels. |
| 1200 | 341 | 395.70 MiB | [Download](https://huggingface.co/datasets/CyberHarem/kazuno_sarah_lovelivesunshine/resolve/main/dataset-1200.zip) | IMG+TXT | dataset with the shorter side not exceeding 1200 pixels. |
| stage3-p480-1200 | 824 | 805.48 MiB | [Download](https://huggingface.co/datasets/CyberHarem/kazuno_sarah_lovelivesunshine/resolve/main/dataset-stage3-p480-1200.zip) | IMG+TXT | 3-stage cropped dataset with the area not less than 480x480 pixels. |
### Load Raw Dataset with Waifuc
We provide raw dataset (including tagged images) for [waifuc](https://deepghs.github.io/waifuc/main/tutorials/installation/index.html) loading. If you need this, just run the following code
```python
import os
import zipfile
from huggingface_hub import hf_hub_download
from waifuc.source import LocalSource
# download raw archive file
zip_file = hf_hub_download(
repo_id='CyberHarem/kazuno_sarah_lovelivesunshine',
repo_type='dataset',
filename='dataset-raw.zip',
)
# extract files to your directory
dataset_dir = 'dataset_dir'
os.makedirs(dataset_dir, exist_ok=True)
with zipfile.ZipFile(zip_file, 'r') as zf:
zf.extractall(dataset_dir)
# load the dataset with waifuc
source = LocalSource(dataset_dir)
for item in source:
print(item.image, item.meta['filename'], item.meta['tags'])
```
## List of Clusters
List of tag clustering result, maybe some outfits can be mined here.
### Raw Text Version
| # | Samples | Img-1 | Img-2 | Img-3 | Img-4 | Img-5 | Tags |
|----:|----------:|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| 0 | 7 | ![](samples/0/clu0-sample0.png) | ![](samples/0/clu0-sample1.png) | ![](samples/0/clu0-sample2.png) | ![](samples/0/clu0-sample3.png) | ![](samples/0/clu0-sample4.png) | 1girl, fingerless_gloves, hat, looking_at_viewer, red_gloves, smile, solo, white_jacket, belt, black_shorts, short_shorts, white_headwear, open_mouth, short_sleeves, blush, collared_shirt, dated, garter_straps, striped_necktie, black_shirt, choker, english_text, grey_thighhighs, hand_on_hip, happy_birthday, medium_breasts, pink_eyes, torn_thighhighs |
| 1 | 5 | ![](samples/1/clu1-sample0.png) | ![](samples/1/clu1-sample1.png) | ![](samples/1/clu1-sample2.png) | ![](samples/1/clu1-sample3.png) | ![](samples/1/clu1-sample4.png) | 1girl, fingerless_gloves, hat, looking_at_viewer, smile, solo, short_sleeves, upper_body, black_gloves, hair_down, english_text, happy_birthday, pink_eyes |
| 2 | 14 | ![](samples/2/clu2-sample0.png) | ![](samples/2/clu2-sample1.png) | ![](samples/2/clu2-sample2.png) | ![](samples/2/clu2-sample3.png) | ![](samples/2/clu2-sample4.png) | looking_at_viewer, smile, 1girl, solo, black_gloves, fingerless_gloves, hair_down, black_headwear, black_shorts, blush, short_shorts, short_sleeves, choker, fishnet_pantyhose, collarbone, headset, pink_eyes, peaked_cap |
| 3 | 9 | ![](samples/3/clu3-sample0.png) | ![](samples/3/clu3-sample1.png) | ![](samples/3/clu3-sample2.png) | ![](samples/3/clu3-sample3.png) | ![](samples/3/clu3-sample4.png) | 1girl, hair_down, looking_at_viewer, solo, black_gloves, choker, fingerless_gloves, hat, smile, upper_body, collarbone, jacket, black_headwear, medium_breasts, blush, cleavage, short_sleeves, white_background |
| 4 | 11 | ![](samples/4/clu4-sample0.png) | ![](samples/4/clu4-sample1.png) | ![](samples/4/clu4-sample2.png) | ![](samples/4/clu4-sample3.png) | ![](samples/4/clu4-sample4.png) | 1girl, english_text, looking_at_viewer, solo, character_name, happy_birthday, smile, dated, hair_ribbon, school_uniform, shiny_hair, upper_body, blush, pink_eyes, long_sleeves, medium_breasts, skirt |
| 5 | 5 | ![](samples/5/clu5-sample0.png) | ![](samples/5/clu5-sample1.png) | ![](samples/5/clu5-sample2.png) | ![](samples/5/clu5-sample3.png) | ![](samples/5/clu5-sample4.png) | 1girl, hair_ribbon, simple_background, smile, solo, white_background, blush, long_sleeves, looking_at_viewer, dated, pink_eyes, pleated_skirt, serafuku |
| 6 | 11 | ![](samples/6/clu6-sample0.png) | ![](samples/6/clu6-sample1.png) | ![](samples/6/clu6-sample2.png) | ![](samples/6/clu6-sample3.png) | ![](samples/6/clu6-sample4.png) | 1girl, looking_at_viewer, solo, blush, collarbone, navel, cleavage, smile, white_background, cowboy_shot, simple_background, hair_ribbon, jewelry, medium_breasts, white_ribbon, hair_flower, purple_bikini |
| 7 | 10 | ![](samples/7/clu7-sample0.png) | ![](samples/7/clu7-sample1.png) | ![](samples/7/clu7-sample2.png) | ![](samples/7/clu7-sample3.png) | ![](samples/7/clu7-sample4.png) | 1girl, blush, looking_at_viewer, maid_headdress, solo, wa_maid, yellow_kimono, maid_apron, white_apron, frilled_apron, pink_eyes, short_sleeves, :d, holding_tray, open_mouth, upper_body, indoors |
| 8 | 10 | ![](samples/8/clu8-sample0.png) | ![](samples/8/clu8-sample1.png) | ![](samples/8/clu8-sample2.png) | ![](samples/8/clu8-sample3.png) | ![](samples/8/clu8-sample4.png) | 1girl, collarbone, looking_at_viewer, bare_shoulders, blush, solo, cleavage, bare_arms, underwear_only, sitting, hair_ribbon, medium_breasts, smile, bed_sheet, black_bra, black_panties, on_bed, thighs |
### Table Version
| # | Samples | Img-1 | Img-2 | Img-3 | Img-4 | Img-5 | 1girl | fingerless_gloves | hat | looking_at_viewer | red_gloves | smile | solo | white_jacket | belt | black_shorts | short_shorts | white_headwear | open_mouth | short_sleeves | blush | collared_shirt | dated | garter_straps | striped_necktie | black_shirt | choker | english_text | grey_thighhighs | hand_on_hip | happy_birthday | medium_breasts | pink_eyes | torn_thighhighs | upper_body | black_gloves | hair_down | black_headwear | fishnet_pantyhose | collarbone | headset | peaked_cap | jacket | cleavage | white_background | character_name | hair_ribbon | school_uniform | shiny_hair | long_sleeves | skirt | simple_background | pleated_skirt | serafuku | navel | cowboy_shot | jewelry | white_ribbon | hair_flower | purple_bikini | maid_headdress | wa_maid | yellow_kimono | maid_apron | white_apron | frilled_apron | :d | holding_tray | indoors | bare_shoulders | bare_arms | underwear_only | sitting | bed_sheet | black_bra | black_panties | on_bed | thighs |
|----:|----------:|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------|:--------------------|:------|:--------------------|:-------------|:--------|:-------|:---------------|:-------|:---------------|:---------------|:-----------------|:-------------|:----------------|:--------|:-----------------|:--------|:----------------|:------------------|:--------------|:---------|:---------------|:------------------|:--------------|:-----------------|:-----------------|:------------|:------------------|:-------------|:---------------|:------------|:-----------------|:--------------------|:-------------|:----------|:-------------|:---------|:-----------|:-------------------|:-----------------|:--------------|:-----------------|:-------------|:---------------|:--------|:--------------------|:----------------|:-----------|:--------|:--------------|:----------|:---------------|:--------------|:----------------|:-----------------|:----------|:----------------|:-------------|:--------------|:----------------|:-----|:---------------|:----------|:-----------------|:------------|:-----------------|:----------|:------------|:------------|:----------------|:---------|:---------|
| 0 | 7 | ![](samples/0/clu0-sample0.png) | ![](samples/0/clu0-sample1.png) | ![](samples/0/clu0-sample2.png) | ![](samples/0/clu0-sample3.png) | ![](samples/0/clu0-sample4.png) | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | |
| 1 | 5 | ![](samples/1/clu1-sample0.png) | ![](samples/1/clu1-sample1.png) | ![](samples/1/clu1-sample2.png) | ![](samples/1/clu1-sample3.png) | ![](samples/1/clu1-sample4.png) | X | X | X | X | | X | X | | | | | | | X | | | | | | | | X | | | X | | X | | X | X | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | |
| 2 | 14 | ![](samples/2/clu2-sample0.png) | ![](samples/2/clu2-sample1.png) | ![](samples/2/clu2-sample2.png) | ![](samples/2/clu2-sample3.png) | ![](samples/2/clu2-sample4.png) | X | X | | X | | X | X | | | X | X | | | X | X | | | | | | X | | | | | | X | | | X | X | X | X | X | X | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | |
| 3 | 9 | ![](samples/3/clu3-sample0.png) | ![](samples/3/clu3-sample1.png) | ![](samples/3/clu3-sample2.png) | ![](samples/3/clu3-sample3.png) | ![](samples/3/clu3-sample4.png) | X | X | X | X | | X | X | | | | | | | X | X | | | | | | X | | | | | X | | | X | X | X | X | | X | | | X | X | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | |
| 4 | 11 | ![](samples/4/clu4-sample0.png) | ![](samples/4/clu4-sample1.png) | ![](samples/4/clu4-sample2.png) | ![](samples/4/clu4-sample3.png) | ![](samples/4/clu4-sample4.png) | X | | | X | | X | X | | | | | | | | X | | X | | | | | X | | | X | X | X | | X | | | | | | | | | | | X | X | X | X | X | X | | | | | | | | | | | | | | | | | | | | | | | | | | | |
| 5 | 5 | ![](samples/5/clu5-sample0.png) | ![](samples/5/clu5-sample1.png) | ![](samples/5/clu5-sample2.png) | ![](samples/5/clu5-sample3.png) | ![](samples/5/clu5-sample4.png) | X | | | X | | X | X | | | | | | | | X | | X | | | | | | | | | | X | | | | | | | | | | | | X | | X | | | X | | X | X | X | | | | | | | | | | | | | | | | | | | | | | | | |
| 6 | 11 | ![](samples/6/clu6-sample0.png) | ![](samples/6/clu6-sample1.png) | ![](samples/6/clu6-sample2.png) | ![](samples/6/clu6-sample3.png) | ![](samples/6/clu6-sample4.png) | X | | | X | | X | X | | | | | | | | X | | | | | | | | | | | X | | | | | | | | X | | | | X | X | | X | | | | | X | | | X | X | X | X | X | X | | | | | | | | | | | | | | | | | | |
| 7 | 10 | ![](samples/7/clu7-sample0.png) | ![](samples/7/clu7-sample1.png) | ![](samples/7/clu7-sample2.png) | ![](samples/7/clu7-sample3.png) | ![](samples/7/clu7-sample4.png) | X | | | X | | | X | | | | | | X | X | X | | | | | | | | | | | | X | | X | | | | | | | | | | | | | | | | | | | | | | | | | | X | X | X | X | X | X | X | X | X | | | | | | | | | |
| 8 | 10 | ![](samples/8/clu8-sample0.png) | ![](samples/8/clu8-sample1.png) | ![](samples/8/clu8-sample2.png) | ![](samples/8/clu8-sample3.png) | ![](samples/8/clu8-sample4.png) | X | | | X | | X | X | | | | | | | | X | | | | | | | | | | | X | | | | | | | | X | | | | X | | | X | | | | | | | | | | | | | | | | | | | | | | | X | X | X | X | X | X | X | X | X |
|
zliu333/truck_at_port4 | ---
dataset_info:
features:
- name: image
dtype: image
- name: text
dtype: string
splits:
- name: train
num_bytes: 54523532.0
num_examples: 37
download_size: 54514526
dataset_size: 54523532.0
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
AdapterOcean/augmentatio-standardized | ---
license: apache-2.0
---
|
aimankem32/data | ---
license: openrail
---
|
pszemraj/simplepile-lite | ---
license: apache-2.0
size_categories:
- 100K<n<1M
source_datasets:
- pszemraj/simple_wikipedia_LM
- JeanKaddour/minipile
task_categories:
- fill-mask
- text-generation
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
- split: validation
path: data/validation-*
- split: test
path: data/test-*
dataset_info:
features:
- name: text
dtype: string
splits:
- name: train
num_bytes: 1552622685
num_examples: 452432
- name: validation
num_bytes: 3202346
num_examples: 1000
- name: test
num_bytes: 41145686
num_examples: 11908
download_size: 867798625
dataset_size: 1596970717
language:
- en
---
# Dataset Card for "simplepile-lite"
Interleaved dataset using 'first exhausted' strategy. Counts:
```python
DatasetDict({
train: Dataset({
features: ['text'],
num_rows: 452432
})
validation: Dataset({
features: ['text'],
num_rows: 1000
})
test: Dataset({
features: ['text'],
num_rows: 11908
})
})
```
## token counts - train
using GPTNeoX Tokenizer:
| | token_count |
|:------|-----------------:|
| count | 452432 |
| mean | 868.642 |
| std | 4791.71 |
| min | 3 |
| 25% | 88 |
| 50% | 232 |
| 75% | 590 |
| max | 1.39747e+06 |
--- |
gryffindor-ISWS/1500_dbp_abs_withIMG | ---
license: gpl-3.0
---
|
yuval6967/OIG-small-chip2_deduplicated | ---
dataset_info:
features:
- name: user
dtype: string
- name: chip2
dtype: string
splits:
- name: train
num_bytes: 73795170.04573706
num_examples: 188892
download_size: 47456241
dataset_size: 73795170.04573706
---
# Dataset Card for "OIG-small-chip2_deduplicated"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
jilp00/youtoks-curious-amalgam | ---
dataset_info:
features:
- name: text
dtype: string
splits:
- name: train
num_bytes: 10554174
num_examples: 12590
download_size: 4006050
dataset_size: 10554174
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
gagan3012/arabic-training-embeddings-final | ---
dataset_info:
features:
- name: query
dtype: string
- name: positive
sequence: string
- name: negative
sequence: string
splits:
- name: train
num_bytes: 17650951101.740124
num_examples: 12950552
download_size: 8519784056
dataset_size: 17650951101.740124
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
joey234/mmlu-security_studies-rule-neg | ---
dataset_info:
features:
- name: choices
sequence: string
- name: answer
dtype:
class_label:
names:
'0': A
'1': B
'2': C
'3': D
- name: question
dtype: string
splits:
- name: test
num_bytes: 205412
num_examples: 245
download_size: 113725
dataset_size: 205412
---
# Dataset Card for "mmlu-security_studies-rule-neg"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
mwong/climatetext-climate_evidence-claim-related-evaluation | ---
annotations_creators:
- crowdsourced
language_creators:
- crowdsourced
language:
- en
license:
- cc-by-sa-3.0
- gpl-3.0
multilinguality:
- monolingual
size_categories:
- 100K<n<1M
source_datasets:
- extended|climate_text
task_categories:
- text-classification
task_ids:
- fact-checking
---
### Dataset Summary
This dataset is extracted from Climate Text dataset (https://www.sustainablefinance.uzh.ch/en/research/climate-fever/climatext.html), pre-processed and, ready to evaluate.
The evaluation objective is a text classification task - given a claim and climate related evidence, predict if claim is related to evidence. |
open-llm-leaderboard/details_allknowingroger__FrankenLong-15B-passthrough | ---
pretty_name: Evaluation run of allknowingroger/FrankenLong-15B-passthrough
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [allknowingroger/FrankenLong-15B-passthrough](https://huggingface.co/allknowingroger/FrankenLong-15B-passthrough)\
\ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\
\nThe dataset is composed of 63 configuration, each one coresponding to one of the\
\ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\
\ found as a specific split in each configuration, the split being named using the\
\ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\
\nAn additional configuration \"results\" store all the aggregated results of the\
\ run (and is used to compute and display the aggregated metrics on the [Open LLM\
\ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\
\nTo load the details from a run, you can for instance do the following:\n```python\n\
from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_allknowingroger__FrankenLong-15B-passthrough\"\
,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\
These are the [latest results from run 2024-04-11T08:23:35.335626](https://huggingface.co/datasets/open-llm-leaderboard/details_allknowingroger__FrankenLong-15B-passthrough/blob/main/results_2024-04-11T08-23-35.335626.json)(note\
\ that their might be results for other tasks in the repos if successive evals didn't\
\ cover the same tasks. You find each in the results and the \"latest\" split for\
\ each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.24540239366183794,\n\
\ \"acc_stderr\": 0.030571557769658596,\n \"acc_norm\": 0.24658088053054075,\n\
\ \"acc_norm_stderr\": 0.03138624578542628,\n \"mc1\": 0.23623011015911874,\n\
\ \"mc1_stderr\": 0.014869755015871086,\n \"mc2\": 0.4895265981417741,\n\
\ \"mc2_stderr\": 0.016899411744816118\n },\n \"harness|arc:challenge|25\"\
: {\n \"acc\": 0.23293515358361774,\n \"acc_stderr\": 0.012352507042617386,\n\
\ \"acc_norm\": 0.2909556313993174,\n \"acc_norm_stderr\": 0.01327307786590758\n\
\ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.25124477195777734,\n\
\ \"acc_stderr\": 0.00432842570099869,\n \"acc_norm\": 0.2605058753236407,\n\
\ \"acc_norm_stderr\": 0.004380136468543945\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\
: {\n \"acc\": 0.35,\n \"acc_stderr\": 0.0479372485441102,\n \
\ \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.0479372485441102\n },\n\
\ \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.3111111111111111,\n\
\ \"acc_stderr\": 0.039992628766177214,\n \"acc_norm\": 0.3111111111111111,\n\
\ \"acc_norm_stderr\": 0.039992628766177214\n },\n \"harness|hendrycksTest-astronomy|5\"\
: {\n \"acc\": 0.19736842105263158,\n \"acc_stderr\": 0.03238981601699397,\n\
\ \"acc_norm\": 0.19736842105263158,\n \"acc_norm_stderr\": 0.03238981601699397\n\
\ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.25,\n\
\ \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.25,\n \
\ \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\
: {\n \"acc\": 0.22641509433962265,\n \"acc_stderr\": 0.025757559893106737,\n\
\ \"acc_norm\": 0.22641509433962265,\n \"acc_norm_stderr\": 0.025757559893106737\n\
\ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.19444444444444445,\n\
\ \"acc_stderr\": 0.03309615177059004,\n \"acc_norm\": 0.19444444444444445,\n\
\ \"acc_norm_stderr\": 0.03309615177059004\n },\n \"harness|hendrycksTest-college_chemistry|5\"\
: {\n \"acc\": 0.24,\n \"acc_stderr\": 0.04292346959909283,\n \
\ \"acc_norm\": 0.24,\n \"acc_norm_stderr\": 0.04292346959909283\n \
\ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\"\
: 0.23,\n \"acc_stderr\": 0.04229525846816506,\n \"acc_norm\": 0.23,\n\
\ \"acc_norm_stderr\": 0.04229525846816506\n },\n \"harness|hendrycksTest-college_mathematics|5\"\
: {\n \"acc\": 0.29,\n \"acc_stderr\": 0.04560480215720683,\n \
\ \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.04560480215720683\n \
\ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.23121387283236994,\n\
\ \"acc_stderr\": 0.0321473730202947,\n \"acc_norm\": 0.23121387283236994,\n\
\ \"acc_norm_stderr\": 0.0321473730202947\n },\n \"harness|hendrycksTest-college_physics|5\"\
: {\n \"acc\": 0.22549019607843138,\n \"acc_stderr\": 0.041583075330832865,\n\
\ \"acc_norm\": 0.22549019607843138,\n \"acc_norm_stderr\": 0.041583075330832865\n\
\ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\
\ 0.26,\n \"acc_stderr\": 0.0440844002276808,\n \"acc_norm\": 0.26,\n\
\ \"acc_norm_stderr\": 0.0440844002276808\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\
: {\n \"acc\": 0.28085106382978725,\n \"acc_stderr\": 0.02937917046412481,\n\
\ \"acc_norm\": 0.28085106382978725,\n \"acc_norm_stderr\": 0.02937917046412481\n\
\ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.21052631578947367,\n\
\ \"acc_stderr\": 0.0383515395439942,\n \"acc_norm\": 0.21052631578947367,\n\
\ \"acc_norm_stderr\": 0.0383515395439942\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\
: {\n \"acc\": 0.2689655172413793,\n \"acc_stderr\": 0.036951833116502325,\n\
\ \"acc_norm\": 0.2689655172413793,\n \"acc_norm_stderr\": 0.036951833116502325\n\
\ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\
: 0.2222222222222222,\n \"acc_stderr\": 0.021411684393694196,\n \"\
acc_norm\": 0.2222222222222222,\n \"acc_norm_stderr\": 0.021411684393694196\n\
\ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.3412698412698413,\n\
\ \"acc_stderr\": 0.04240799327574924,\n \"acc_norm\": 0.3412698412698413,\n\
\ \"acc_norm_stderr\": 0.04240799327574924\n },\n \"harness|hendrycksTest-global_facts|5\"\
: {\n \"acc\": 0.29,\n \"acc_stderr\": 0.04560480215720684,\n \
\ \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.04560480215720684\n \
\ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.2161290322580645,\n\
\ \"acc_stderr\": 0.02341529343356853,\n \"acc_norm\": 0.2161290322580645,\n\
\ \"acc_norm_stderr\": 0.02341529343356853\n },\n \"harness|hendrycksTest-high_school_chemistry|5\"\
: {\n \"acc\": 0.18226600985221675,\n \"acc_stderr\": 0.02716334085964515,\n\
\ \"acc_norm\": 0.18226600985221675,\n \"acc_norm_stderr\": 0.02716334085964515\n\
\ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \
\ \"acc\": 0.23,\n \"acc_stderr\": 0.04229525846816506,\n \"acc_norm\"\
: 0.23,\n \"acc_norm_stderr\": 0.04229525846816506\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\
: {\n \"acc\": 0.21818181818181817,\n \"acc_stderr\": 0.03225078108306289,\n\
\ \"acc_norm\": 0.21818181818181817,\n \"acc_norm_stderr\": 0.03225078108306289\n\
\ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\
: 0.2676767676767677,\n \"acc_stderr\": 0.03154449888270286,\n \"\
acc_norm\": 0.2676767676767677,\n \"acc_norm_stderr\": 0.03154449888270286\n\
\ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\
\ \"acc\": 0.18652849740932642,\n \"acc_stderr\": 0.028112091210117457,\n\
\ \"acc_norm\": 0.18652849740932642,\n \"acc_norm_stderr\": 0.028112091210117457\n\
\ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \
\ \"acc\": 0.2128205128205128,\n \"acc_stderr\": 0.020752423722128023,\n\
\ \"acc_norm\": 0.2128205128205128,\n \"acc_norm_stderr\": 0.020752423722128023\n\
\ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\
acc\": 0.2851851851851852,\n \"acc_stderr\": 0.027528599210340492,\n \
\ \"acc_norm\": 0.2851851851851852,\n \"acc_norm_stderr\": 0.027528599210340492\n\
\ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \
\ \"acc\": 0.20168067226890757,\n \"acc_stderr\": 0.026064313406304527,\n\
\ \"acc_norm\": 0.20168067226890757,\n \"acc_norm_stderr\": 0.026064313406304527\n\
\ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\
: 0.23178807947019867,\n \"acc_stderr\": 0.03445406271987053,\n \"\
acc_norm\": 0.23178807947019867,\n \"acc_norm_stderr\": 0.03445406271987053\n\
\ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\
: 0.20917431192660552,\n \"acc_stderr\": 0.017437937173343222,\n \"\
acc_norm\": 0.20917431192660552,\n \"acc_norm_stderr\": 0.017437937173343222\n\
\ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\
: 0.24074074074074073,\n \"acc_stderr\": 0.029157522184605617,\n \"\
acc_norm\": 0.24074074074074073,\n \"acc_norm_stderr\": 0.029157522184605617\n\
\ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\
: 0.2549019607843137,\n \"acc_stderr\": 0.030587591351604243,\n \"\
acc_norm\": 0.2549019607843137,\n \"acc_norm_stderr\": 0.030587591351604243\n\
\ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\
acc\": 0.25316455696202533,\n \"acc_stderr\": 0.02830465794303531,\n \
\ \"acc_norm\": 0.25316455696202533,\n \"acc_norm_stderr\": 0.02830465794303531\n\
\ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.27802690582959644,\n\
\ \"acc_stderr\": 0.030069584874494033,\n \"acc_norm\": 0.27802690582959644,\n\
\ \"acc_norm_stderr\": 0.030069584874494033\n },\n \"harness|hendrycksTest-human_sexuality|5\"\
: {\n \"acc\": 0.21374045801526717,\n \"acc_stderr\": 0.035954616117746904,\n\
\ \"acc_norm\": 0.21374045801526717,\n \"acc_norm_stderr\": 0.035954616117746904\n\
\ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\
\ 0.2644628099173554,\n \"acc_stderr\": 0.04026187527591205,\n \"\
acc_norm\": 0.2644628099173554,\n \"acc_norm_stderr\": 0.04026187527591205\n\
\ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.3055555555555556,\n\
\ \"acc_stderr\": 0.04453197507374984,\n \"acc_norm\": 0.3055555555555556,\n\
\ \"acc_norm_stderr\": 0.04453197507374984\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\
: {\n \"acc\": 0.24539877300613497,\n \"acc_stderr\": 0.03380939813943354,\n\
\ \"acc_norm\": 0.24539877300613497,\n \"acc_norm_stderr\": 0.03380939813943354\n\
\ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.2767857142857143,\n\
\ \"acc_stderr\": 0.04246624336697624,\n \"acc_norm\": 0.2767857142857143,\n\
\ \"acc_norm_stderr\": 0.04246624336697624\n },\n \"harness|hendrycksTest-management|5\"\
: {\n \"acc\": 0.22330097087378642,\n \"acc_stderr\": 0.04123553189891431,\n\
\ \"acc_norm\": 0.22330097087378642,\n \"acc_norm_stderr\": 0.04123553189891431\n\
\ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.25213675213675213,\n\
\ \"acc_stderr\": 0.02844796547623102,\n \"acc_norm\": 0.25213675213675213,\n\
\ \"acc_norm_stderr\": 0.02844796547623102\n },\n \"harness|hendrycksTest-medical_genetics|5\"\
: {\n \"acc\": 0.23,\n \"acc_stderr\": 0.04229525846816506,\n \
\ \"acc_norm\": 0.23,\n \"acc_norm_stderr\": 0.04229525846816506\n \
\ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.28991060025542786,\n\
\ \"acc_stderr\": 0.01622501794477096,\n \"acc_norm\": 0.28991060025542786,\n\
\ \"acc_norm_stderr\": 0.01622501794477096\n },\n \"harness|hendrycksTest-moral_disputes|5\"\
: {\n \"acc\": 0.20520231213872833,\n \"acc_stderr\": 0.021742519835276287,\n\
\ \"acc_norm\": 0.20520231213872833,\n \"acc_norm_stderr\": 0.021742519835276287\n\
\ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.24581005586592178,\n\
\ \"acc_stderr\": 0.014400296429225598,\n \"acc_norm\": 0.24581005586592178,\n\
\ \"acc_norm_stderr\": 0.014400296429225598\n },\n \"harness|hendrycksTest-nutrition|5\"\
: {\n \"acc\": 0.2647058823529412,\n \"acc_stderr\": 0.025261691219729484,\n\
\ \"acc_norm\": 0.2647058823529412,\n \"acc_norm_stderr\": 0.025261691219729484\n\
\ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.29260450160771706,\n\
\ \"acc_stderr\": 0.025839898334877983,\n \"acc_norm\": 0.29260450160771706,\n\
\ \"acc_norm_stderr\": 0.025839898334877983\n },\n \"harness|hendrycksTest-prehistory|5\"\
: {\n \"acc\": 0.25308641975308643,\n \"acc_stderr\": 0.024191808600712992,\n\
\ \"acc_norm\": 0.25308641975308643,\n \"acc_norm_stderr\": 0.024191808600712992\n\
\ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\
acc\": 0.2127659574468085,\n \"acc_stderr\": 0.024414612974307713,\n \
\ \"acc_norm\": 0.2127659574468085,\n \"acc_norm_stderr\": 0.024414612974307713\n\
\ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.25554106910039115,\n\
\ \"acc_stderr\": 0.01113985783359853,\n \"acc_norm\": 0.25554106910039115,\n\
\ \"acc_norm_stderr\": 0.01113985783359853\n },\n \"harness|hendrycksTest-professional_medicine|5\"\
: {\n \"acc\": 0.1948529411764706,\n \"acc_stderr\": 0.02406059942348742,\n\
\ \"acc_norm\": 0.1948529411764706,\n \"acc_norm_stderr\": 0.02406059942348742\n\
\ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\
acc\": 0.2565359477124183,\n \"acc_stderr\": 0.017667841612378984,\n \
\ \"acc_norm\": 0.2565359477124183,\n \"acc_norm_stderr\": 0.017667841612378984\n\
\ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.2636363636363636,\n\
\ \"acc_stderr\": 0.04220224692971987,\n \"acc_norm\": 0.2636363636363636,\n\
\ \"acc_norm_stderr\": 0.04220224692971987\n },\n \"harness|hendrycksTest-security_studies|5\"\
: {\n \"acc\": 0.24081632653061225,\n \"acc_stderr\": 0.027372942201788163,\n\
\ \"acc_norm\": 0.24081632653061225,\n \"acc_norm_stderr\": 0.027372942201788163\n\
\ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.23383084577114427,\n\
\ \"acc_stderr\": 0.029929415408348398,\n \"acc_norm\": 0.23383084577114427,\n\
\ \"acc_norm_stderr\": 0.029929415408348398\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\
: {\n \"acc\": 0.2,\n \"acc_stderr\": 0.04020151261036846,\n \
\ \"acc_norm\": 0.2,\n \"acc_norm_stderr\": 0.04020151261036846\n },\n\
\ \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.2891566265060241,\n\
\ \"acc_stderr\": 0.035294868015111155,\n \"acc_norm\": 0.2891566265060241,\n\
\ \"acc_norm_stderr\": 0.035294868015111155\n },\n \"harness|hendrycksTest-world_religions|5\"\
: {\n \"acc\": 0.19883040935672514,\n \"acc_stderr\": 0.030611116557432528,\n\
\ \"acc_norm\": 0.19883040935672514,\n \"acc_norm_stderr\": 0.030611116557432528\n\
\ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.23623011015911874,\n\
\ \"mc1_stderr\": 0.014869755015871086,\n \"mc2\": 0.4895265981417741,\n\
\ \"mc2_stderr\": 0.016899411744816118\n },\n \"harness|winogrande|5\"\
: {\n \"acc\": 0.48855564325177586,\n \"acc_stderr\": 0.014048804199859329\n\
\ },\n \"harness|gsm8k|5\": {\n \"acc\": 0.0,\n \"acc_stderr\"\
: 0.0\n }\n}\n```"
repo_url: https://huggingface.co/allknowingroger/FrankenLong-15B-passthrough
leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard
point_of_contact: clementine@hf.co
configs:
- config_name: harness_arc_challenge_25
data_files:
- split: 2024_04_11T08_23_35.335626
path:
- '**/details_harness|arc:challenge|25_2024-04-11T08-23-35.335626.parquet'
- split: latest
path:
- '**/details_harness|arc:challenge|25_2024-04-11T08-23-35.335626.parquet'
- config_name: harness_gsm8k_5
data_files:
- split: 2024_04_11T08_23_35.335626
path:
- '**/details_harness|gsm8k|5_2024-04-11T08-23-35.335626.parquet'
- split: latest
path:
- '**/details_harness|gsm8k|5_2024-04-11T08-23-35.335626.parquet'
- config_name: harness_hellaswag_10
data_files:
- split: 2024_04_11T08_23_35.335626
path:
- '**/details_harness|hellaswag|10_2024-04-11T08-23-35.335626.parquet'
- split: latest
path:
- '**/details_harness|hellaswag|10_2024-04-11T08-23-35.335626.parquet'
- config_name: harness_hendrycksTest_5
data_files:
- split: 2024_04_11T08_23_35.335626
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-management|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-virology|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2024-04-11T08-23-35.335626.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-management|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-virology|5_2024-04-11T08-23-35.335626.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2024-04-11T08-23-35.335626.parquet'
- config_name: harness_hendrycksTest_abstract_algebra_5
data_files:
- split: 2024_04_11T08_23_35.335626
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-04-11T08-23-35.335626.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-04-11T08-23-35.335626.parquet'
- config_name: harness_hendrycksTest_anatomy_5
data_files:
- split: 2024_04_11T08_23_35.335626
path:
- '**/details_harness|hendrycksTest-anatomy|5_2024-04-11T08-23-35.335626.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-anatomy|5_2024-04-11T08-23-35.335626.parquet'
- config_name: harness_hendrycksTest_astronomy_5
data_files:
- split: 2024_04_11T08_23_35.335626
path:
- '**/details_harness|hendrycksTest-astronomy|5_2024-04-11T08-23-35.335626.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-astronomy|5_2024-04-11T08-23-35.335626.parquet'
- config_name: harness_hendrycksTest_business_ethics_5
data_files:
- split: 2024_04_11T08_23_35.335626
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2024-04-11T08-23-35.335626.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2024-04-11T08-23-35.335626.parquet'
- config_name: harness_hendrycksTest_clinical_knowledge_5
data_files:
- split: 2024_04_11T08_23_35.335626
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-04-11T08-23-35.335626.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-04-11T08-23-35.335626.parquet'
- config_name: harness_hendrycksTest_college_biology_5
data_files:
- split: 2024_04_11T08_23_35.335626
path:
- '**/details_harness|hendrycksTest-college_biology|5_2024-04-11T08-23-35.335626.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_biology|5_2024-04-11T08-23-35.335626.parquet'
- config_name: harness_hendrycksTest_college_chemistry_5
data_files:
- split: 2024_04_11T08_23_35.335626
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-04-11T08-23-35.335626.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-04-11T08-23-35.335626.parquet'
- config_name: harness_hendrycksTest_college_computer_science_5
data_files:
- split: 2024_04_11T08_23_35.335626
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-04-11T08-23-35.335626.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-04-11T08-23-35.335626.parquet'
- config_name: harness_hendrycksTest_college_mathematics_5
data_files:
- split: 2024_04_11T08_23_35.335626
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-04-11T08-23-35.335626.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-04-11T08-23-35.335626.parquet'
- config_name: harness_hendrycksTest_college_medicine_5
data_files:
- split: 2024_04_11T08_23_35.335626
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2024-04-11T08-23-35.335626.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2024-04-11T08-23-35.335626.parquet'
- config_name: harness_hendrycksTest_college_physics_5
data_files:
- split: 2024_04_11T08_23_35.335626
path:
- '**/details_harness|hendrycksTest-college_physics|5_2024-04-11T08-23-35.335626.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_physics|5_2024-04-11T08-23-35.335626.parquet'
- config_name: harness_hendrycksTest_computer_security_5
data_files:
- split: 2024_04_11T08_23_35.335626
path:
- '**/details_harness|hendrycksTest-computer_security|5_2024-04-11T08-23-35.335626.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-computer_security|5_2024-04-11T08-23-35.335626.parquet'
- config_name: harness_hendrycksTest_conceptual_physics_5
data_files:
- split: 2024_04_11T08_23_35.335626
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-04-11T08-23-35.335626.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-04-11T08-23-35.335626.parquet'
- config_name: harness_hendrycksTest_econometrics_5
data_files:
- split: 2024_04_11T08_23_35.335626
path:
- '**/details_harness|hendrycksTest-econometrics|5_2024-04-11T08-23-35.335626.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-econometrics|5_2024-04-11T08-23-35.335626.parquet'
- config_name: harness_hendrycksTest_electrical_engineering_5
data_files:
- split: 2024_04_11T08_23_35.335626
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-04-11T08-23-35.335626.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-04-11T08-23-35.335626.parquet'
- config_name: harness_hendrycksTest_elementary_mathematics_5
data_files:
- split: 2024_04_11T08_23_35.335626
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-04-11T08-23-35.335626.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-04-11T08-23-35.335626.parquet'
- config_name: harness_hendrycksTest_formal_logic_5
data_files:
- split: 2024_04_11T08_23_35.335626
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2024-04-11T08-23-35.335626.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2024-04-11T08-23-35.335626.parquet'
- config_name: harness_hendrycksTest_global_facts_5
data_files:
- split: 2024_04_11T08_23_35.335626
path:
- '**/details_harness|hendrycksTest-global_facts|5_2024-04-11T08-23-35.335626.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-global_facts|5_2024-04-11T08-23-35.335626.parquet'
- config_name: harness_hendrycksTest_high_school_biology_5
data_files:
- split: 2024_04_11T08_23_35.335626
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-04-11T08-23-35.335626.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-04-11T08-23-35.335626.parquet'
- config_name: harness_hendrycksTest_high_school_chemistry_5
data_files:
- split: 2024_04_11T08_23_35.335626
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-04-11T08-23-35.335626.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-04-11T08-23-35.335626.parquet'
- config_name: harness_hendrycksTest_high_school_computer_science_5
data_files:
- split: 2024_04_11T08_23_35.335626
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-04-11T08-23-35.335626.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-04-11T08-23-35.335626.parquet'
- config_name: harness_hendrycksTest_high_school_european_history_5
data_files:
- split: 2024_04_11T08_23_35.335626
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-04-11T08-23-35.335626.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-04-11T08-23-35.335626.parquet'
- config_name: harness_hendrycksTest_high_school_geography_5
data_files:
- split: 2024_04_11T08_23_35.335626
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-04-11T08-23-35.335626.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-04-11T08-23-35.335626.parquet'
- config_name: harness_hendrycksTest_high_school_government_and_politics_5
data_files:
- split: 2024_04_11T08_23_35.335626
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-04-11T08-23-35.335626.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-04-11T08-23-35.335626.parquet'
- config_name: harness_hendrycksTest_high_school_macroeconomics_5
data_files:
- split: 2024_04_11T08_23_35.335626
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-04-11T08-23-35.335626.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-04-11T08-23-35.335626.parquet'
- config_name: harness_hendrycksTest_high_school_mathematics_5
data_files:
- split: 2024_04_11T08_23_35.335626
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-04-11T08-23-35.335626.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-04-11T08-23-35.335626.parquet'
- config_name: harness_hendrycksTest_high_school_microeconomics_5
data_files:
- split: 2024_04_11T08_23_35.335626
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-04-11T08-23-35.335626.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-04-11T08-23-35.335626.parquet'
- config_name: harness_hendrycksTest_high_school_physics_5
data_files:
- split: 2024_04_11T08_23_35.335626
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-04-11T08-23-35.335626.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-04-11T08-23-35.335626.parquet'
- config_name: harness_hendrycksTest_high_school_psychology_5
data_files:
- split: 2024_04_11T08_23_35.335626
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-04-11T08-23-35.335626.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-04-11T08-23-35.335626.parquet'
- config_name: harness_hendrycksTest_high_school_statistics_5
data_files:
- split: 2024_04_11T08_23_35.335626
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-04-11T08-23-35.335626.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-04-11T08-23-35.335626.parquet'
- config_name: harness_hendrycksTest_high_school_us_history_5
data_files:
- split: 2024_04_11T08_23_35.335626
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-04-11T08-23-35.335626.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-04-11T08-23-35.335626.parquet'
- config_name: harness_hendrycksTest_high_school_world_history_5
data_files:
- split: 2024_04_11T08_23_35.335626
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-04-11T08-23-35.335626.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-04-11T08-23-35.335626.parquet'
- config_name: harness_hendrycksTest_human_aging_5
data_files:
- split: 2024_04_11T08_23_35.335626
path:
- '**/details_harness|hendrycksTest-human_aging|5_2024-04-11T08-23-35.335626.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_aging|5_2024-04-11T08-23-35.335626.parquet'
- config_name: harness_hendrycksTest_human_sexuality_5
data_files:
- split: 2024_04_11T08_23_35.335626
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-04-11T08-23-35.335626.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-04-11T08-23-35.335626.parquet'
- config_name: harness_hendrycksTest_international_law_5
data_files:
- split: 2024_04_11T08_23_35.335626
path:
- '**/details_harness|hendrycksTest-international_law|5_2024-04-11T08-23-35.335626.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-international_law|5_2024-04-11T08-23-35.335626.parquet'
- config_name: harness_hendrycksTest_jurisprudence_5
data_files:
- split: 2024_04_11T08_23_35.335626
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-04-11T08-23-35.335626.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-04-11T08-23-35.335626.parquet'
- config_name: harness_hendrycksTest_logical_fallacies_5
data_files:
- split: 2024_04_11T08_23_35.335626
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-04-11T08-23-35.335626.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-04-11T08-23-35.335626.parquet'
- config_name: harness_hendrycksTest_machine_learning_5
data_files:
- split: 2024_04_11T08_23_35.335626
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2024-04-11T08-23-35.335626.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2024-04-11T08-23-35.335626.parquet'
- config_name: harness_hendrycksTest_management_5
data_files:
- split: 2024_04_11T08_23_35.335626
path:
- '**/details_harness|hendrycksTest-management|5_2024-04-11T08-23-35.335626.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-management|5_2024-04-11T08-23-35.335626.parquet'
- config_name: harness_hendrycksTest_marketing_5
data_files:
- split: 2024_04_11T08_23_35.335626
path:
- '**/details_harness|hendrycksTest-marketing|5_2024-04-11T08-23-35.335626.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-marketing|5_2024-04-11T08-23-35.335626.parquet'
- config_name: harness_hendrycksTest_medical_genetics_5
data_files:
- split: 2024_04_11T08_23_35.335626
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-04-11T08-23-35.335626.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-04-11T08-23-35.335626.parquet'
- config_name: harness_hendrycksTest_miscellaneous_5
data_files:
- split: 2024_04_11T08_23_35.335626
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-04-11T08-23-35.335626.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-04-11T08-23-35.335626.parquet'
- config_name: harness_hendrycksTest_moral_disputes_5
data_files:
- split: 2024_04_11T08_23_35.335626
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-04-11T08-23-35.335626.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-04-11T08-23-35.335626.parquet'
- config_name: harness_hendrycksTest_moral_scenarios_5
data_files:
- split: 2024_04_11T08_23_35.335626
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-04-11T08-23-35.335626.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-04-11T08-23-35.335626.parquet'
- config_name: harness_hendrycksTest_nutrition_5
data_files:
- split: 2024_04_11T08_23_35.335626
path:
- '**/details_harness|hendrycksTest-nutrition|5_2024-04-11T08-23-35.335626.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-nutrition|5_2024-04-11T08-23-35.335626.parquet'
- config_name: harness_hendrycksTest_philosophy_5
data_files:
- split: 2024_04_11T08_23_35.335626
path:
- '**/details_harness|hendrycksTest-philosophy|5_2024-04-11T08-23-35.335626.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-philosophy|5_2024-04-11T08-23-35.335626.parquet'
- config_name: harness_hendrycksTest_prehistory_5
data_files:
- split: 2024_04_11T08_23_35.335626
path:
- '**/details_harness|hendrycksTest-prehistory|5_2024-04-11T08-23-35.335626.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-prehistory|5_2024-04-11T08-23-35.335626.parquet'
- config_name: harness_hendrycksTest_professional_accounting_5
data_files:
- split: 2024_04_11T08_23_35.335626
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-04-11T08-23-35.335626.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-04-11T08-23-35.335626.parquet'
- config_name: harness_hendrycksTest_professional_law_5
data_files:
- split: 2024_04_11T08_23_35.335626
path:
- '**/details_harness|hendrycksTest-professional_law|5_2024-04-11T08-23-35.335626.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_law|5_2024-04-11T08-23-35.335626.parquet'
- config_name: harness_hendrycksTest_professional_medicine_5
data_files:
- split: 2024_04_11T08_23_35.335626
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-04-11T08-23-35.335626.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-04-11T08-23-35.335626.parquet'
- config_name: harness_hendrycksTest_professional_psychology_5
data_files:
- split: 2024_04_11T08_23_35.335626
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-04-11T08-23-35.335626.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-04-11T08-23-35.335626.parquet'
- config_name: harness_hendrycksTest_public_relations_5
data_files:
- split: 2024_04_11T08_23_35.335626
path:
- '**/details_harness|hendrycksTest-public_relations|5_2024-04-11T08-23-35.335626.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-public_relations|5_2024-04-11T08-23-35.335626.parquet'
- config_name: harness_hendrycksTest_security_studies_5
data_files:
- split: 2024_04_11T08_23_35.335626
path:
- '**/details_harness|hendrycksTest-security_studies|5_2024-04-11T08-23-35.335626.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-security_studies|5_2024-04-11T08-23-35.335626.parquet'
- config_name: harness_hendrycksTest_sociology_5
data_files:
- split: 2024_04_11T08_23_35.335626
path:
- '**/details_harness|hendrycksTest-sociology|5_2024-04-11T08-23-35.335626.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-sociology|5_2024-04-11T08-23-35.335626.parquet'
- config_name: harness_hendrycksTest_us_foreign_policy_5
data_files:
- split: 2024_04_11T08_23_35.335626
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-04-11T08-23-35.335626.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-04-11T08-23-35.335626.parquet'
- config_name: harness_hendrycksTest_virology_5
data_files:
- split: 2024_04_11T08_23_35.335626
path:
- '**/details_harness|hendrycksTest-virology|5_2024-04-11T08-23-35.335626.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-virology|5_2024-04-11T08-23-35.335626.parquet'
- config_name: harness_hendrycksTest_world_religions_5
data_files:
- split: 2024_04_11T08_23_35.335626
path:
- '**/details_harness|hendrycksTest-world_religions|5_2024-04-11T08-23-35.335626.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-world_religions|5_2024-04-11T08-23-35.335626.parquet'
- config_name: harness_truthfulqa_mc_0
data_files:
- split: 2024_04_11T08_23_35.335626
path:
- '**/details_harness|truthfulqa:mc|0_2024-04-11T08-23-35.335626.parquet'
- split: latest
path:
- '**/details_harness|truthfulqa:mc|0_2024-04-11T08-23-35.335626.parquet'
- config_name: harness_winogrande_5
data_files:
- split: 2024_04_11T08_23_35.335626
path:
- '**/details_harness|winogrande|5_2024-04-11T08-23-35.335626.parquet'
- split: latest
path:
- '**/details_harness|winogrande|5_2024-04-11T08-23-35.335626.parquet'
- config_name: results
data_files:
- split: 2024_04_11T08_23_35.335626
path:
- results_2024-04-11T08-23-35.335626.parquet
- split: latest
path:
- results_2024-04-11T08-23-35.335626.parquet
---
# Dataset Card for Evaluation run of allknowingroger/FrankenLong-15B-passthrough
<!-- Provide a quick summary of the dataset. -->
Dataset automatically created during the evaluation run of model [allknowingroger/FrankenLong-15B-passthrough](https://huggingface.co/allknowingroger/FrankenLong-15B-passthrough) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).
The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset("open-llm-leaderboard/details_allknowingroger__FrankenLong-15B-passthrough",
"harness_winogrande_5",
split="train")
```
## Latest results
These are the [latest results from run 2024-04-11T08:23:35.335626](https://huggingface.co/datasets/open-llm-leaderboard/details_allknowingroger__FrankenLong-15B-passthrough/blob/main/results_2024-04-11T08-23-35.335626.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"acc": 0.24540239366183794,
"acc_stderr": 0.030571557769658596,
"acc_norm": 0.24658088053054075,
"acc_norm_stderr": 0.03138624578542628,
"mc1": 0.23623011015911874,
"mc1_stderr": 0.014869755015871086,
"mc2": 0.4895265981417741,
"mc2_stderr": 0.016899411744816118
},
"harness|arc:challenge|25": {
"acc": 0.23293515358361774,
"acc_stderr": 0.012352507042617386,
"acc_norm": 0.2909556313993174,
"acc_norm_stderr": 0.01327307786590758
},
"harness|hellaswag|10": {
"acc": 0.25124477195777734,
"acc_stderr": 0.00432842570099869,
"acc_norm": 0.2605058753236407,
"acc_norm_stderr": 0.004380136468543945
},
"harness|hendrycksTest-abstract_algebra|5": {
"acc": 0.35,
"acc_stderr": 0.0479372485441102,
"acc_norm": 0.35,
"acc_norm_stderr": 0.0479372485441102
},
"harness|hendrycksTest-anatomy|5": {
"acc": 0.3111111111111111,
"acc_stderr": 0.039992628766177214,
"acc_norm": 0.3111111111111111,
"acc_norm_stderr": 0.039992628766177214
},
"harness|hendrycksTest-astronomy|5": {
"acc": 0.19736842105263158,
"acc_stderr": 0.03238981601699397,
"acc_norm": 0.19736842105263158,
"acc_norm_stderr": 0.03238981601699397
},
"harness|hendrycksTest-business_ethics|5": {
"acc": 0.25,
"acc_stderr": 0.04351941398892446,
"acc_norm": 0.25,
"acc_norm_stderr": 0.04351941398892446
},
"harness|hendrycksTest-clinical_knowledge|5": {
"acc": 0.22641509433962265,
"acc_stderr": 0.025757559893106737,
"acc_norm": 0.22641509433962265,
"acc_norm_stderr": 0.025757559893106737
},
"harness|hendrycksTest-college_biology|5": {
"acc": 0.19444444444444445,
"acc_stderr": 0.03309615177059004,
"acc_norm": 0.19444444444444445,
"acc_norm_stderr": 0.03309615177059004
},
"harness|hendrycksTest-college_chemistry|5": {
"acc": 0.24,
"acc_stderr": 0.04292346959909283,
"acc_norm": 0.24,
"acc_norm_stderr": 0.04292346959909283
},
"harness|hendrycksTest-college_computer_science|5": {
"acc": 0.23,
"acc_stderr": 0.04229525846816506,
"acc_norm": 0.23,
"acc_norm_stderr": 0.04229525846816506
},
"harness|hendrycksTest-college_mathematics|5": {
"acc": 0.29,
"acc_stderr": 0.04560480215720683,
"acc_norm": 0.29,
"acc_norm_stderr": 0.04560480215720683
},
"harness|hendrycksTest-college_medicine|5": {
"acc": 0.23121387283236994,
"acc_stderr": 0.0321473730202947,
"acc_norm": 0.23121387283236994,
"acc_norm_stderr": 0.0321473730202947
},
"harness|hendrycksTest-college_physics|5": {
"acc": 0.22549019607843138,
"acc_stderr": 0.041583075330832865,
"acc_norm": 0.22549019607843138,
"acc_norm_stderr": 0.041583075330832865
},
"harness|hendrycksTest-computer_security|5": {
"acc": 0.26,
"acc_stderr": 0.0440844002276808,
"acc_norm": 0.26,
"acc_norm_stderr": 0.0440844002276808
},
"harness|hendrycksTest-conceptual_physics|5": {
"acc": 0.28085106382978725,
"acc_stderr": 0.02937917046412481,
"acc_norm": 0.28085106382978725,
"acc_norm_stderr": 0.02937917046412481
},
"harness|hendrycksTest-econometrics|5": {
"acc": 0.21052631578947367,
"acc_stderr": 0.0383515395439942,
"acc_norm": 0.21052631578947367,
"acc_norm_stderr": 0.0383515395439942
},
"harness|hendrycksTest-electrical_engineering|5": {
"acc": 0.2689655172413793,
"acc_stderr": 0.036951833116502325,
"acc_norm": 0.2689655172413793,
"acc_norm_stderr": 0.036951833116502325
},
"harness|hendrycksTest-elementary_mathematics|5": {
"acc": 0.2222222222222222,
"acc_stderr": 0.021411684393694196,
"acc_norm": 0.2222222222222222,
"acc_norm_stderr": 0.021411684393694196
},
"harness|hendrycksTest-formal_logic|5": {
"acc": 0.3412698412698413,
"acc_stderr": 0.04240799327574924,
"acc_norm": 0.3412698412698413,
"acc_norm_stderr": 0.04240799327574924
},
"harness|hendrycksTest-global_facts|5": {
"acc": 0.29,
"acc_stderr": 0.04560480215720684,
"acc_norm": 0.29,
"acc_norm_stderr": 0.04560480215720684
},
"harness|hendrycksTest-high_school_biology|5": {
"acc": 0.2161290322580645,
"acc_stderr": 0.02341529343356853,
"acc_norm": 0.2161290322580645,
"acc_norm_stderr": 0.02341529343356853
},
"harness|hendrycksTest-high_school_chemistry|5": {
"acc": 0.18226600985221675,
"acc_stderr": 0.02716334085964515,
"acc_norm": 0.18226600985221675,
"acc_norm_stderr": 0.02716334085964515
},
"harness|hendrycksTest-high_school_computer_science|5": {
"acc": 0.23,
"acc_stderr": 0.04229525846816506,
"acc_norm": 0.23,
"acc_norm_stderr": 0.04229525846816506
},
"harness|hendrycksTest-high_school_european_history|5": {
"acc": 0.21818181818181817,
"acc_stderr": 0.03225078108306289,
"acc_norm": 0.21818181818181817,
"acc_norm_stderr": 0.03225078108306289
},
"harness|hendrycksTest-high_school_geography|5": {
"acc": 0.2676767676767677,
"acc_stderr": 0.03154449888270286,
"acc_norm": 0.2676767676767677,
"acc_norm_stderr": 0.03154449888270286
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"acc": 0.18652849740932642,
"acc_stderr": 0.028112091210117457,
"acc_norm": 0.18652849740932642,
"acc_norm_stderr": 0.028112091210117457
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"acc": 0.2128205128205128,
"acc_stderr": 0.020752423722128023,
"acc_norm": 0.2128205128205128,
"acc_norm_stderr": 0.020752423722128023
},
"harness|hendrycksTest-high_school_mathematics|5": {
"acc": 0.2851851851851852,
"acc_stderr": 0.027528599210340492,
"acc_norm": 0.2851851851851852,
"acc_norm_stderr": 0.027528599210340492
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"acc": 0.20168067226890757,
"acc_stderr": 0.026064313406304527,
"acc_norm": 0.20168067226890757,
"acc_norm_stderr": 0.026064313406304527
},
"harness|hendrycksTest-high_school_physics|5": {
"acc": 0.23178807947019867,
"acc_stderr": 0.03445406271987053,
"acc_norm": 0.23178807947019867,
"acc_norm_stderr": 0.03445406271987053
},
"harness|hendrycksTest-high_school_psychology|5": {
"acc": 0.20917431192660552,
"acc_stderr": 0.017437937173343222,
"acc_norm": 0.20917431192660552,
"acc_norm_stderr": 0.017437937173343222
},
"harness|hendrycksTest-high_school_statistics|5": {
"acc": 0.24074074074074073,
"acc_stderr": 0.029157522184605617,
"acc_norm": 0.24074074074074073,
"acc_norm_stderr": 0.029157522184605617
},
"harness|hendrycksTest-high_school_us_history|5": {
"acc": 0.2549019607843137,
"acc_stderr": 0.030587591351604243,
"acc_norm": 0.2549019607843137,
"acc_norm_stderr": 0.030587591351604243
},
"harness|hendrycksTest-high_school_world_history|5": {
"acc": 0.25316455696202533,
"acc_stderr": 0.02830465794303531,
"acc_norm": 0.25316455696202533,
"acc_norm_stderr": 0.02830465794303531
},
"harness|hendrycksTest-human_aging|5": {
"acc": 0.27802690582959644,
"acc_stderr": 0.030069584874494033,
"acc_norm": 0.27802690582959644,
"acc_norm_stderr": 0.030069584874494033
},
"harness|hendrycksTest-human_sexuality|5": {
"acc": 0.21374045801526717,
"acc_stderr": 0.035954616117746904,
"acc_norm": 0.21374045801526717,
"acc_norm_stderr": 0.035954616117746904
},
"harness|hendrycksTest-international_law|5": {
"acc": 0.2644628099173554,
"acc_stderr": 0.04026187527591205,
"acc_norm": 0.2644628099173554,
"acc_norm_stderr": 0.04026187527591205
},
"harness|hendrycksTest-jurisprudence|5": {
"acc": 0.3055555555555556,
"acc_stderr": 0.04453197507374984,
"acc_norm": 0.3055555555555556,
"acc_norm_stderr": 0.04453197507374984
},
"harness|hendrycksTest-logical_fallacies|5": {
"acc": 0.24539877300613497,
"acc_stderr": 0.03380939813943354,
"acc_norm": 0.24539877300613497,
"acc_norm_stderr": 0.03380939813943354
},
"harness|hendrycksTest-machine_learning|5": {
"acc": 0.2767857142857143,
"acc_stderr": 0.04246624336697624,
"acc_norm": 0.2767857142857143,
"acc_norm_stderr": 0.04246624336697624
},
"harness|hendrycksTest-management|5": {
"acc": 0.22330097087378642,
"acc_stderr": 0.04123553189891431,
"acc_norm": 0.22330097087378642,
"acc_norm_stderr": 0.04123553189891431
},
"harness|hendrycksTest-marketing|5": {
"acc": 0.25213675213675213,
"acc_stderr": 0.02844796547623102,
"acc_norm": 0.25213675213675213,
"acc_norm_stderr": 0.02844796547623102
},
"harness|hendrycksTest-medical_genetics|5": {
"acc": 0.23,
"acc_stderr": 0.04229525846816506,
"acc_norm": 0.23,
"acc_norm_stderr": 0.04229525846816506
},
"harness|hendrycksTest-miscellaneous|5": {
"acc": 0.28991060025542786,
"acc_stderr": 0.01622501794477096,
"acc_norm": 0.28991060025542786,
"acc_norm_stderr": 0.01622501794477096
},
"harness|hendrycksTest-moral_disputes|5": {
"acc": 0.20520231213872833,
"acc_stderr": 0.021742519835276287,
"acc_norm": 0.20520231213872833,
"acc_norm_stderr": 0.021742519835276287
},
"harness|hendrycksTest-moral_scenarios|5": {
"acc": 0.24581005586592178,
"acc_stderr": 0.014400296429225598,
"acc_norm": 0.24581005586592178,
"acc_norm_stderr": 0.014400296429225598
},
"harness|hendrycksTest-nutrition|5": {
"acc": 0.2647058823529412,
"acc_stderr": 0.025261691219729484,
"acc_norm": 0.2647058823529412,
"acc_norm_stderr": 0.025261691219729484
},
"harness|hendrycksTest-philosophy|5": {
"acc": 0.29260450160771706,
"acc_stderr": 0.025839898334877983,
"acc_norm": 0.29260450160771706,
"acc_norm_stderr": 0.025839898334877983
},
"harness|hendrycksTest-prehistory|5": {
"acc": 0.25308641975308643,
"acc_stderr": 0.024191808600712992,
"acc_norm": 0.25308641975308643,
"acc_norm_stderr": 0.024191808600712992
},
"harness|hendrycksTest-professional_accounting|5": {
"acc": 0.2127659574468085,
"acc_stderr": 0.024414612974307713,
"acc_norm": 0.2127659574468085,
"acc_norm_stderr": 0.024414612974307713
},
"harness|hendrycksTest-professional_law|5": {
"acc": 0.25554106910039115,
"acc_stderr": 0.01113985783359853,
"acc_norm": 0.25554106910039115,
"acc_norm_stderr": 0.01113985783359853
},
"harness|hendrycksTest-professional_medicine|5": {
"acc": 0.1948529411764706,
"acc_stderr": 0.02406059942348742,
"acc_norm": 0.1948529411764706,
"acc_norm_stderr": 0.02406059942348742
},
"harness|hendrycksTest-professional_psychology|5": {
"acc": 0.2565359477124183,
"acc_stderr": 0.017667841612378984,
"acc_norm": 0.2565359477124183,
"acc_norm_stderr": 0.017667841612378984
},
"harness|hendrycksTest-public_relations|5": {
"acc": 0.2636363636363636,
"acc_stderr": 0.04220224692971987,
"acc_norm": 0.2636363636363636,
"acc_norm_stderr": 0.04220224692971987
},
"harness|hendrycksTest-security_studies|5": {
"acc": 0.24081632653061225,
"acc_stderr": 0.027372942201788163,
"acc_norm": 0.24081632653061225,
"acc_norm_stderr": 0.027372942201788163
},
"harness|hendrycksTest-sociology|5": {
"acc": 0.23383084577114427,
"acc_stderr": 0.029929415408348398,
"acc_norm": 0.23383084577114427,
"acc_norm_stderr": 0.029929415408348398
},
"harness|hendrycksTest-us_foreign_policy|5": {
"acc": 0.2,
"acc_stderr": 0.04020151261036846,
"acc_norm": 0.2,
"acc_norm_stderr": 0.04020151261036846
},
"harness|hendrycksTest-virology|5": {
"acc": 0.2891566265060241,
"acc_stderr": 0.035294868015111155,
"acc_norm": 0.2891566265060241,
"acc_norm_stderr": 0.035294868015111155
},
"harness|hendrycksTest-world_religions|5": {
"acc": 0.19883040935672514,
"acc_stderr": 0.030611116557432528,
"acc_norm": 0.19883040935672514,
"acc_norm_stderr": 0.030611116557432528
},
"harness|truthfulqa:mc|0": {
"mc1": 0.23623011015911874,
"mc1_stderr": 0.014869755015871086,
"mc2": 0.4895265981417741,
"mc2_stderr": 0.016899411744816118
},
"harness|winogrande|5": {
"acc": 0.48855564325177586,
"acc_stderr": 0.014048804199859329
},
"harness|gsm8k|5": {
"acc": 0.0,
"acc_stderr": 0.0
}
}
```
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] |
FreedomIntelligence/EXAMs | ---
language:
- ar
task_categories:
- multiple-choice
size_categories:
- n<1K
viewer: true
license: apache-2.0
---
# EXAMs
You can find details of the dataset in this post:https://arxiv.org/pdf/2308.16149.pdf
## About this Arabic dataset
We only took the Arabic part of the dataset,which contains 562 data.
We then extracted five from each category based on the task domain as a few shot data. |
botbot-ai/PortugueseDolly | ---
license: other
language:
- pt
pretty_name: Portuguese Dolly 15k
size_categories:
- 10K<n<100K
---
PortugueseDolly รฉ uma tradiรงรฃo do [Databricks Dolly 15k]( https://huggingface.co/datasets/databricks/databricks-dolly-15k) para portuguรชs brasileiro (pt-br) utilizando o nllb 3.3b.
*Somente para demonstraรงรฃo e pesquisa. Proibido para uso comercial.
- - - - - - - - - - - - - - - - - - - - - - - - - - -
PortugueseDolly is a translation of the [Databricks Dolly 15k]( https://huggingface.co/datasets/databricks/databricks-dolly-15k) into Brazilian Portuguese (pt-br) using GPT3.5 Turbo.
*For demonstration and research purposes only. Commercial use prohibited.
|
yiyic/mtg_en | ---
language:
- en
--- |
datahrvoje/twitter_dataset_1713084588 | ---
dataset_info:
features:
- name: id
dtype: string
- name: tweet_content
dtype: string
- name: user_name
dtype: string
- name: user_id
dtype: string
- name: created_at
dtype: string
- name: url
dtype: string
- name: favourite_count
dtype: int64
- name: scraped_at
dtype: string
- name: image_urls
dtype: string
splits:
- name: train
num_bytes: 14581
num_examples: 35
download_size: 10422
dataset_size: 14581
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
mchen72/my-test-dataset | ---
dataset_info:
features:
- name: labels
dtype: int64
- name: text
dtype: string
splits:
- name: train
num_bytes: 16847665.2
num_examples: 90000
- name: test
num_bytes: 1871962.8
num_examples: 10000
download_size: 11140374
dataset_size: 18719628.0
---
# Dataset Card for "my-test-dataset"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
results-sd-v1-5-sd-v2-1-if-v1-0-karlo/2930d131 | ---
dataset_info:
features:
- name: result
dtype: string
- name: id
dtype: int64
splits:
- name: train
num_bytes: 186
num_examples: 10
download_size: 1331
dataset_size: 186
---
# Dataset Card for "2930d131"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
Nexdata/10075_People_Multi_race_and_Multi_pose_Face_Images_Data | ---
license: cc-by-nc-nd-4.0
---
## Description
10,075 people - face images dataset includes people collected from many countries. Multiple photos of each personโs daily life are collected, and the gender, race, age, etc. of the person being collected are marked.This Dataset provides a rich resource for artificial intelligence applications. It has been validated by multiple AI companies and proves beneficial for achieving outstanding performance in real-world applications. Throughout the process of Dataset collection, storage, and usage, we have consistently adhered to Dataset protection and privacy regulations to ensure the preservation of user privacy and legal rights. All Dataset comply with regulations such as GDPR, CCPA, PIPL, and other applicable laws.
For more details, please refer to the link: https://www.nexdata.ai/dataset/1402?source=Huggingface
## Data size
10,075 people, no less than 30 images per person
## Race distribution
3,542 black people, 3,531 Indian people and 3,002 Asian people
## Gender distribution
4,997 males, 5,078 females
## Age distribution
most people are young aged, the middle-aged and the elderly cover a small portion
## Collecting environment
including indoor and outdoor scenes
## Data diversity
different face poses, races, accessories, ages, light conditions and scenes
## Data format
.jpg, .png
# Licensing Information
Commercial License
|
0-hero/Matter-0.1-Slim-A | ---
license: apache-2.0
---
Subset A of [Matter-0.1](https://huggingface.co/datasets/0-hero/Matter-0.1) <br>
Datasets have been deduped, decontaminated with the [bagel script from Jon Durbin](https://github.com/jondurbin/bagel/blob/main/bagel/data_sources/__init__.py) |
jscotthorn/krs-structured | ---
license: apache-2.0
---
|
malaysia-ai/mosaic-embedding-pairs | ---
language:
- ms
---
# Mosaic format for embedding task text pair dataset
This repository is to store dataset shards using mosaic format.
1. prepared at https://github.com/mesolitica/llama2-embedding/blob/main/notebooks/combine-embedding.ipynb
## how-to
1. git clone,
```bash
git lfs clone https://huggingface.co/datasets/malaysia-ai/mosaic-embedding-pairs
```
2. load it,
```python
from streaming import LocalDataset
from streaming.base.format.mds.encodings import Encoding, _encodings
import json
class ListStr(Encoding):
def encode(self, obj):
return json.dumps(obj).encode()
def decode(self, data):
return json.loads(data)
_encodings['liststr'] = ListStr
dataset = LocalDataset('mosaic-embedding-pairs')
len(dataset)
``` |
ebisuke/liz-nojaloli-ja-ds | ---
license: mit
language:
- ja
---
# ebisuke/liz-nojaloli-ja-ds
## License
[MIT License](https://opensource.org/licenses/MIT)
## Description
[ebisuke/liz-nojaloli-ja](https://huggingface.co/ebisuke/liz-nojaloli-ja)ใฎๅญฆ็ฟๅ
ใฎใใผใฟใปใใใงใใ
็ง๏ผebisuke๏ผใฎๆๆใกใฎใใผใฟใปใใใซใชใใพใใ
pythonใฎใณใผใใซใคใใฆใฏ[qiita](https://qiita.com/)ใๅ็
งใใฆใใๅ ดๅใใใใพใใ
## Plan
- RLHF็จใฎใใผใฟใปใใใฎๆบๅใใใฆใฟใใ |
sh110495/compressed_gsm8k | ---
dataset_info:
features:
- name: id
dtype: string
- name: input_ids
sequence: int32
- name: attention_mask
sequence: int8
- name: labels
dtype: string
splits:
- name: test
num_bytes: 3282072
num_examples: 1319
download_size: 1109705
dataset_size: 3282072
configs:
- config_name: default
data_files:
- split: test
path: data/test-*
---
|
CaioFelipe/Teste | ---
license: apache-2.0
---
|
CyberHarem/ethan_arknights | ---
license: mit
task_categories:
- text-to-image
tags:
- art
- not-for-all-audiences
size_categories:
- n<1K
---
# Dataset of ethan_arknights
This is the dataset of ethan_arknights, containing 42 images and their tags.
Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by [DeepGHS Team](https://github.com/deepghs)([huggingface organization](https://huggingface.co/deepghs)).
| Name | Images | Download | Description |
|:------------|---------:|:------------------------------------|:-------------------------------------------------------------------------|
| raw | 42 | [Download](dataset-raw.zip) | Raw data with meta information. |
| raw-stage3 | 83 | [Download](dataset-raw-stage3.zip) | 3-stage cropped raw data with meta information. |
| 384x512 | 42 | [Download](dataset-384x512.zip) | 384x512 aligned dataset. |
| 512x512 | 42 | [Download](dataset-512x512.zip) | 512x512 aligned dataset. |
| 512x704 | 42 | [Download](dataset-512x704.zip) | 512x704 aligned dataset. |
| 640x640 | 42 | [Download](dataset-640x640.zip) | 640x640 aligned dataset. |
| 640x880 | 42 | [Download](dataset-640x880.zip) | 640x880 aligned dataset. |
| stage3-640 | 83 | [Download](dataset-stage3-640.zip) | 3-stage cropped dataset with the shorter side not exceeding 640 pixels. |
| stage3-800 | 83 | [Download](dataset-stage3-800.zip) | 3-stage cropped dataset with the shorter side not exceeding 800 pixels. |
| stage3-1200 | 83 | [Download](dataset-stage3-1200.zip) | 3-stage cropped dataset with the shorter side not exceeding 1200 pixels. |
|
kheder/dataset_hadith | ---
dataset_info:
features:
- name: id
dtype: string
- name: hadith_id
dtype: string
- name: source
dtype: string
- name: chapter_no
dtype: string
- name: hadith_no
dtype: string
- name: chapter
dtype: string
- name: chain_indx
dtype: string
- name: text_ar
dtype: string
- name: text_en
dtype: string
splits:
- name: train
num_bytes: 41709856
num_examples: 34441
download_size: 0
dataset_size: 41709856
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
# Dataset Card for "dataset_hadith"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
sasha/australian_sea_slugs | ---
dataset_info:
features:
- name: url
dtype: string
- name: image
dtype: image
- name: label
dtype: string
splits:
- name: train
num_bytes: 86677304.65602817
num_examples: 2107
download_size: 87406259
dataset_size: 86677304.65602817
---
# Dataset Card for "australian_sea_slugs"
This is a filtered version of the [Nudibranchs of the Sunshine Coast Australia](https://www.gbif.org/dataset/ee412fa2-edc9-4c6b-91f3-ff2a02c245e0) dataset.
## Citation
```
Atlas of Living Australia (2019). Nudibranchs of the Sunshine Coast Australia. Occurrence dataset https://doi.org/10.15468/gtoiks accessed via GBIF.org on 2022-12-16.
``` |
ignmilton/ign_clean_instruct_dataset_500k | ---
license: apache-2.0
task_categories:
- question-answering
- conversational
language:
- en
tags:
- code
pretty_name: ign_500k
size_categories:
- 100K<n<1M
---
This dataset contains ~508k prompt-instruction pairs with high quality responses. It was synthetically created from a subset of Ultrachat prompts. It does not contain any alignment focused responses or NSFW content.
Licensed under apache-2.0 |
KatMarie/eu_test6 | ---
dataset_info:
features:
- name: text
dtype: string
splits:
- name: train
num_bytes: 2429668
num_examples: 41376
download_size: 1661037
dataset_size: 2429668
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
# Dataset Card for "eu_test6"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
salmonhumorous/logo-blip-caption | ---
dataset_info:
features:
- name: image
dtype: image
- name: text
dtype: string
splits:
- name: train
num_bytes: 24808769.89
num_examples: 1435
download_size: 24242906
dataset_size: 24808769.89
---
# Dataset Card for "logo-blip"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
zolak/twitter_dataset_79_1713061596 | ---
dataset_info:
features:
- name: id
dtype: string
- name: tweet_content
dtype: string
- name: user_name
dtype: string
- name: user_id
dtype: string
- name: created_at
dtype: string
- name: url
dtype: string
- name: favourite_count
dtype: int64
- name: scraped_at
dtype: string
- name: image_urls
dtype: string
splits:
- name: train
num_bytes: 3310730
num_examples: 8069
download_size: 1653771
dataset_size: 3310730
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
anan-2024/twitter_dataset_1713193559 | ---
dataset_info:
features:
- name: id
dtype: string
- name: tweet_content
dtype: string
- name: user_name
dtype: string
- name: user_id
dtype: string
- name: created_at
dtype: string
- name: url
dtype: string
- name: favourite_count
dtype: int64
- name: scraped_at
dtype: string
- name: image_urls
dtype: string
splits:
- name: train
num_bytes: 135798
num_examples: 363
download_size: 76517
dataset_size: 135798
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
Hennara/ammlu | ---
task_categories:
- question-answering
language:
- ar
size_categories:
- 10K<n<100K
---
# Dataset Card for Dataset Name
Arabic MMLU: Measuring massive multitask language understanding in Arabic
This dataset has been translated from the original MMLU with the help of GPT-4.
The original data paper [MMLU](https://arxiv.org/pdf/2009.03300v3.pdf)
The MMLU dataset on huggingface [MMLU](cais/mmlu)
### Dataset Sources [optional]
The translation and re-generation has been done by AceGPT researchers [AceGPT](https://arxiv.org/abs/2309.12053)
- [**Repository:**](https://github.com/FreedomIntelligence/AceGPT/tree/main/eval/benchmark_eval/benchmarks/MMLUArabic)
- [**Paper**](https://arxiv.org/abs/2309.12053)
## Uses
Arabic-MMLU is a comprehensive evaluation benchmark specifically designed to evaluate the knowledge and reasoning abilities of LLMs within the context of Arabic language and culture.
Arabic-MMLU covers a wide range of subjects, comprising 57 topics that span from elementary to advanced professional levels.
### Direct Use
This dataset is available to used directly using [datasets](https://github.com/huggingface/datasets) from huggingface, also is availabe to use with [lm-eval](https://github.com/EleutherAI/lm-evaluation-harness) framework.
## Dataset Structure
The dataset consist of 57 subject, divided into 4 category.
| Subject Area | STEM | Humanities | Social Sciences | Other |
|---|---|---|---|---|
| abstract_algebra | โ | | | |
| anatomy | โ | | | |
| astronomy | โ | | | |
| business_ethics | | | | โ |
| clinical_knowledge | | | | โ |
| college_biology | โ | | | |
| college_chemistry | โ | | | |
| college_computer_science | โ | | | |
| college_mathematics | โ | | | |
| college_medicine | | | | โ |
| college_physics | โ | | | |
| computer_security | โ | | | |
| conceptual_physics | โ | | | |
| econometrics | | | โ | |
| electrical_engineering | โ | | | |
| elementary_mathematics | โ | | | |
| formal_logic | | โ | | |
| global_facts | | | | โ |
| high_school_biology | โ | | | |
| high_school_chemistry | โ | | | |
| high_school_computer_science | โ | | | |
| high_school_european_history | | โ | | |
| high_school_geography | | | โ | |
| high_school_government_and_politics | | | โ | |
| high_school_macroeconomics | | | โ | |
| high_school_mathematics | โ | | | |
| high_school_microeconomics | | | โ | |
| high_school_physics | โ | | | |
| high_school_psychology | | | โ | |
| high_school_statistics | โ | | | |
| high_school_us_history | | โ | | |
| high_school_world_history | | โ | | |
| human_aging | | | | โ |
| human_sexuality | | | โ | |
| international_law | | โ | | |
| jurisprudence | | โ | | |
| logical_fallacies | | โ | | |
| machine_learning | โ | | | |
| management | | | | โ |
| marketing | | | | โ |
| medical_genetics | | | | โ |
| miscellaneous | | | | โ |
| moral_disputes | | โ | | |
| moral_scenarios | | โ | | |
| nutrition | | | | โ |
| philosophy | | โ | | |
| prehistory | | โ | | |
| professional_accounting | | | | โ |
| professional_law | | โ | | |
| professional_medicine | | | | โ |
| professional_psychology | | | โ | |
| public_relations | | | โ | |
| security_studies | | | โ | |
| sociology | | | โ | |
| us_foreign_policy | | | โ | |
| virology | | | | โ |
| world_religions | | โ | | |
| - | - | - | - | - |
each item of the dataset is a dictionary with **Question, A, B, C, D, Answer** where A,B,C,D are options to the choose from.
here is three example from the abstract algebra subject.
| Question | A | B | C | D | Answer |
|---|---|---|---|---|---|
| ู
ุฌู
ูุนุฉ ูุฑุนูุฉ H ู
ู ู
ุฌู
ูุนุฉ (Gุ*) ูู ู
ุฌู
ูุนุฉ ุฅุฐุง | 'aุ b ูู H => a * b ูู H' | 'a ูู H => a^-1 ูู H' | 'aุ b ูู H => a * b^-1 ูู H' | 'H ูุญุชูู ุนูู ุงูุนูุตุฑ ุงูู
ุญุฏุฏ' | C |
| 'ู
ุง ูู ุชุฑุชูุจ ุงูุนูุตุฑ (4ุ 2) ู
ู Z_12 x Z_8' | 2 | 4 | 8 | 12 | C |
|ู
ุง ูู ุงูุฏุฑุฌุฉ ูุชู
ุฏูุฏ ุงูุญูู ุงูู
ุนุทู Q(sqrt(2) + sqrt(3)) ุนูู Q| 0 | 4 | 2 | 6| B |
The size of each subject within the dataset
| Subject | Test Length | Eval Length |
|---|---|---|
| professional_law | 1534 | 5 |
| moral_scenarios | 895 | 5 |
| miscellaneous | 783 | 5 |
| professional_psychology | 612 | 5 |
| high_school_psychology | 545 | 5 |
| high_school_macroeconomics | 390 | 5 |
| elementary_mathematics | 378 | 5 |
| moral_disputes | 346 | 5 |
| prehistory | 324 | 5 |
| philosophy | 311 | 5 |
| high_school_biology | 310 | 5 |
| nutrition | 306 | 5 |
| professional_accounting | 282 | 5 |
| professional_medicine | 272 | 5 |
| high_school_mathematics | 270 | 5 |
| clinical_knowledge | 265 | 5 |
| security_studies | 245 | 5 |
| high_school_microeconomics | 238 | 5 |
| high_school_world_history | 237 | 5 |
| conceptual_physics | 235 | 5 |
| marketing | 234 | 5 |
| human_aging | 223 | 5 |
| high_school_statistics | 216 | 5 |
| high_school_us_history | 204 | 5 |
| high_school_chemistry | 203 | 5 |
| sociology | 201 | 5 |
| high_school_geography | 198 | 5 |
| high_school_government_and_politics | 193 | 5 |
| college_medicine | 173 | 5 |
| world_religions | 171 | 5 |
| virology | 166 | 5 |
| high_school_european_history | 165 | 5 |
| logical_fallacies | 163 | 5 |
| astronomy | 152 | 5 |
| high_school_physics | 151 | 5 |
| electrical_engineering | 145 | 5 |
| college_biology | 144 | 5 |
| anatomy | 135 | 5 |
| human_sexuality | 131 | 5 |
| formal_logic | 126 | 5 |
| international_law | 121 | 5 |
| econometrics | 114 | 5 |
| machine_learning | 112 | 5 |
| public_relations | 110 | 5 |
| jurisprudence | 108 | 5 |
| management | 103 | 5 |
| college_physics | 102 | 5 |
| abstract_algebra | 100 | 5 |
| business_ethics | 100 | 5 |
| college_chemistry | 100 | 5 |
| college_computer_science | 100 | 5 |
| college_mathematics | 100 | 5 |
| computer_security | 100 | 5 |
| global_facts | 100 | 5 |
| high_school_computer_science | 100 | 5 |
| medical_genetics | 100 | 5 |
| us_foreign_policy | 100 | 5 |
| count | 14042 | 285 | |
ashwathjadhav23/Spanish_MLM_5 | ---
dataset_info:
features:
- name: text
dtype: string
splits:
- name: train
num_bytes: 3567673
num_examples: 25000
download_size: 1978049
dataset_size: 3567673
---
# Dataset Card for "Spanish_MLM_5"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
mask-distilled-onesec-cv12-each-chunk-uniq/chunk_161 | ---
dataset_info:
features:
- name: logits
sequence: float32
- name: mfcc
sequence:
sequence: float64
splits:
- name: train
num_bytes: 1150705436.0
num_examples: 225983
download_size: 1174986963
dataset_size: 1150705436.0
---
# Dataset Card for "chunk_161"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
gguichard/wsd_myriade_synth_data_gpt4turbo_with_lemma | ---
dataset_info:
features:
- name: tokens
sequence: string
- name: wn_sens
sequence: int64
- name: input_ids
sequence: int32
- name: attention_mask
sequence: int8
- name: labels
sequence: int64
splits:
- name: train
num_bytes: 2400979
num_examples: 3391
download_size: 472673
dataset_size: 2400979
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
# Dataset Card for "wsd_myriade_synth_data_gpt4turbo_with_lemma"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
omarmus/data | ---
dataset_info:
features:
- name: text
dtype: string
splits:
- name: train
num_bytes: 23844
num_examples: 50
download_size: 15094
dataset_size: 23844
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
license: apache-2.0
task_categories:
- question-answering
language:
- es
tags:
- legal
pretty_name: a
size_categories:
- n<1K
---
Datos para el entrenamiento de un chatbot. |
Vaibhav9401/llama_spam | ---
license: apache-2.0
---
|
ericrisco/ragas-eval-dataset | ---
dataset_info:
features:
- name: question
dtype: string
- name: context
dtype: string
- name: ground_truth
dtype: string
- name: metadata
struct:
- name: Authors
dtype: string
- name: Published
dtype: string
- name: Summary
dtype: string
- name: Title
dtype: string
splits:
- name: train
num_bytes: 6832
num_examples: 5
download_size: 19624
dataset_size: 6832
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
Pav17/T3-gen-dataset | ---
dataset_info:
features:
- name: task_id
dtype: int32
- name: text
dtype: string
- name: code
dtype: string
- name: test_list
sequence: string
- name: test_setup_code
dtype: string
- name: challenge_test_list
sequence: string
- name: input
dtype: string
splits:
- name: train
num_bytes: 377899
num_examples: 374
- name: test
num_bytes: 519921
num_examples: 500
- name: validation
num_bytes: 90750
num_examples: 90
- name: prompt
num_bytes: 9760
num_examples: 10
download_size: 459451
dataset_size: 998330
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
- split: test
path: data/test-*
- split: validation
path: data/validation-*
- split: prompt
path: data/prompt-*
---
|
fia24/banel_wit_postag_v0.1.2.3.4 | ---
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
- split: test
path: data/test-*
- split: val
path: data/val-*
dataset_info:
features:
- name: Inflected_Word
dtype: string
- name: Lemma
dtype: string
- name: POS
dtype: string
splits:
- name: train
num_bytes: 1237478.719008634
num_examples: 17882
- name: test
num_bytes: 154736.74173489018
num_examples: 2236
- name: val
num_bytes: 154667.53925647563
num_examples: 2235
download_size: 521864
dataset_size: 1546883.0
---
# Dataset Card for "banel_wit_postag_v0.1.2.3.4"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
snats/chico | ---
license: cc-by-4.0
---
|
luna-code/sfepy | ---
dataset_info:
features:
- name: prompt
dtype: string
- name: completion
dtype: string
- name: api
dtype: string
splits:
- name: train
num_bytes: 11902445
num_examples: 1364
- name: test
num_bytes: 585379
num_examples: 159
download_size: 2255941
dataset_size: 12487824
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
- split: test
path: data/test-*
---
|
Xenova/quickdraw | ---
annotations_creators:
- machine-generated
language_creators:
- crowdsourced
language:
- en
license:
- cc-by-4.0
multilinguality:
- monolingual
size_categories:
- 10M<n<100M
source_datasets:
- original
task_categories:
- image-classification
task_ids:
- multi-class-image-classification
paperswithcode_id: quick-draw-dataset
pretty_name: Quick, Draw!
dataset_info:
features:
- name: image
dtype: image
- name: label
dtype:
class_label:
names:
'0': aircraft carrier
'1': airplane
'2': alarm clock
'3': ambulance
'4': angel
'5': animal migration
'6': ant
'7': anvil
'8': apple
'9': arm
'10': asparagus
'11': axe
'12': backpack
'13': banana
'14': bandage
'15': barn
'16': baseball bat
'17': baseball
'18': basket
'19': basketball
'20': bat
'21': bathtub
'22': beach
'23': bear
'24': beard
'25': bed
'26': bee
'27': belt
'28': bench
'29': bicycle
'30': binoculars
'31': bird
'32': birthday cake
'33': blackberry
'34': blueberry
'35': book
'36': boomerang
'37': bottlecap
'38': bowtie
'39': bracelet
'40': brain
'41': bread
'42': bridge
'43': broccoli
'44': broom
'45': bucket
'46': bulldozer
'47': bus
'48': bush
'49': butterfly
'50': cactus
'51': cake
'52': calculator
'53': calendar
'54': camel
'55': camera
'56': camouflage
'57': campfire
'58': candle
'59': cannon
'60': canoe
'61': car
'62': carrot
'63': castle
'64': cat
'65': ceiling fan
'66': cell phone
'67': cello
'68': chair
'69': chandelier
'70': church
'71': circle
'72': clarinet
'73': clock
'74': cloud
'75': coffee cup
'76': compass
'77': computer
'78': cookie
'79': cooler
'80': couch
'81': cow
'82': crab
'83': crayon
'84': crocodile
'85': crown
'86': cruise ship
'87': cup
'88': diamond
'89': dishwasher
'90': diving board
'91': dog
'92': dolphin
'93': donut
'94': door
'95': dragon
'96': dresser
'97': drill
'98': drums
'99': duck
'100': dumbbell
'101': ear
'102': elbow
'103': elephant
'104': envelope
'105': eraser
'106': eye
'107': eyeglasses
'108': face
'109': fan
'110': feather
'111': fence
'112': finger
'113': fire hydrant
'114': fireplace
'115': firetruck
'116': fish
'117': flamingo
'118': flashlight
'119': flip flops
'120': floor lamp
'121': flower
'122': flying saucer
'123': foot
'124': fork
'125': frog
'126': frying pan
'127': garden hose
'128': garden
'129': giraffe
'130': goatee
'131': golf club
'132': grapes
'133': grass
'134': guitar
'135': hamburger
'136': hammer
'137': hand
'138': harp
'139': hat
'140': headphones
'141': hedgehog
'142': helicopter
'143': helmet
'144': hexagon
'145': hockey puck
'146': hockey stick
'147': horse
'148': hospital
'149': hot air balloon
'150': hot dog
'151': hot tub
'152': hourglass
'153': house plant
'154': house
'155': hurricane
'156': ice cream
'157': jacket
'158': jail
'159': kangaroo
'160': key
'161': keyboard
'162': knee
'163': knife
'164': ladder
'165': lantern
'166': laptop
'167': leaf
'168': leg
'169': light bulb
'170': lighter
'171': lighthouse
'172': lightning
'173': line
'174': lion
'175': lipstick
'176': lobster
'177': lollipop
'178': mailbox
'179': map
'180': marker
'181': matches
'182': megaphone
'183': mermaid
'184': microphone
'185': microwave
'186': monkey
'187': moon
'188': mosquito
'189': motorbike
'190': mountain
'191': mouse
'192': moustache
'193': mouth
'194': mug
'195': mushroom
'196': nail
'197': necklace
'198': nose
'199': ocean
'200': octagon
'201': octopus
'202': onion
'203': oven
'204': owl
'205': paint can
'206': paintbrush
'207': palm tree
'208': panda
'209': pants
'210': paper clip
'211': parachute
'212': parrot
'213': passport
'214': peanut
'215': pear
'216': peas
'217': pencil
'218': penguin
'219': piano
'220': pickup truck
'221': picture frame
'222': pig
'223': pillow
'224': pineapple
'225': pizza
'226': pliers
'227': police car
'228': pond
'229': pool
'230': popsicle
'231': postcard
'232': potato
'233': power outlet
'234': purse
'235': rabbit
'236': raccoon
'237': radio
'238': rain
'239': rainbow
'240': rake
'241': remote control
'242': rhinoceros
'243': rifle
'244': river
'245': roller coaster
'246': rollerskates
'247': sailboat
'248': sandwich
'249': saw
'250': saxophone
'251': school bus
'252': scissors
'253': scorpion
'254': screwdriver
'255': sea turtle
'256': see saw
'257': shark
'258': sheep
'259': shoe
'260': shorts
'261': shovel
'262': sink
'263': skateboard
'264': skull
'265': skyscraper
'266': sleeping bag
'267': smiley face
'268': snail
'269': snake
'270': snorkel
'271': snowflake
'272': snowman
'273': soccer ball
'274': sock
'275': speedboat
'276': spider
'277': spoon
'278': spreadsheet
'279': square
'280': squiggle
'281': squirrel
'282': stairs
'283': star
'284': steak
'285': stereo
'286': stethoscope
'287': stitches
'288': stop sign
'289': stove
'290': strawberry
'291': streetlight
'292': string bean
'293': submarine
'294': suitcase
'295': sun
'296': swan
'297': sweater
'298': swing set
'299': sword
'300': syringe
'301': t-shirt
'302': table
'303': teapot
'304': teddy-bear
'305': telephone
'306': television
'307': tennis racquet
'308': tent
'309': The Eiffel Tower
'310': The Great Wall of China
'311': The Mona Lisa
'312': tiger
'313': toaster
'314': toe
'315': toilet
'316': tooth
'317': toothbrush
'318': toothpaste
'319': tornado
'320': tractor
'321': traffic light
'322': train
'323': tree
'324': triangle
'325': trombone
'326': truck
'327': trumpet
'328': umbrella
'329': underwear
'330': van
'331': vase
'332': violin
'333': washing machine
'334': watermelon
'335': waterslide
'336': whale
'337': wheel
'338': windmill
'339': wine bottle
'340': wine glass
'341': wristwatch
'342': yoga
'343': zebra
'344': zigzag
splits:
- name: train
num_bytes: 19761125464.75
num_examples: 50426266
download_size: 18927763475
dataset_size: 19761125464.75
---
# Dataset Card for Quick, Draw!
This is a processed version of Google's [Quick, Draw](https://huggingface.co/datasets/quickdraw/) dataset to be compatible with the latest versions of ๐ค Datasets that support .parquet files. NOTE: this dataset only contains the "preprocessed_bitmaps" subset of the original dataset.
|
pablouribe/speech2text_robustness | ---
dataset_info:
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: accent
dtype: string
- name: sentence
dtype: string
- name: language
dtype: string
- name: audio_phone
dtype:
audio:
sampling_rate: 16000
splits:
- name: train
num_bytes: 31420688.0
num_examples: 90
download_size: 27915339
dataset_size: 31420688.0
---
# Dataset Card for "speech2text_robustness"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
PatSeal/dataset1 | ---
license: apache-2.0
---
|
Back-up/health-100 | ---
dataset_info:
features:
- name: question
dtype: string
- name: options
list:
- name: answer
dtype: string
- name: key
dtype: string
- name: answer
dtype: string
- name: context
dtype: string
splits:
- name: train
num_bytes: 117374
num_examples: 103
download_size: 30084
dataset_size: 117374
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
# Dataset Card for "health-100"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
open-llm-leaderboard/details_dillfrescott__trinity-medium | ---
pretty_name: Evaluation run of dillfrescott/trinity-medium
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [dillfrescott/trinity-medium](https://huggingface.co/dillfrescott/trinity-medium)\
\ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\
\nThe dataset is composed of 63 configuration, each one coresponding to one of the\
\ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\
\ found as a specific split in each configuration, the split being named using the\
\ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\
\nAn additional configuration \"results\" store all the aggregated results of the\
\ run (and is used to compute and display the aggregated metrics on the [Open LLM\
\ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\
\nTo load the details from a run, you can for instance do the following:\n```python\n\
from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_dillfrescott__trinity-medium\"\
,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\
These are the [latest results from run 2023-12-29T19:31:05.351110](https://huggingface.co/datasets/open-llm-leaderboard/details_dillfrescott__trinity-medium/blob/main/results_2023-12-29T19-31-05.351110.json)(note\
\ that their might be results for other tasks in the repos if successive evals didn't\
\ cover the same tasks. You find each in the results and the \"latest\" split for\
\ each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6543513981322463,\n\
\ \"acc_stderr\": 0.03200522597754596,\n \"acc_norm\": 0.65517466197835,\n\
\ \"acc_norm_stderr\": 0.03265417586520206,\n \"mc1\": 0.5630354957160343,\n\
\ \"mc1_stderr\": 0.017363844503195957,\n \"mc2\": 0.6954134254414035,\n\
\ \"mc2_stderr\": 0.015047304382402624\n },\n \"harness|arc:challenge|25\"\
: {\n \"acc\": 0.6868600682593856,\n \"acc_stderr\": 0.013552671543623497,\n\
\ \"acc_norm\": 0.7150170648464164,\n \"acc_norm_stderr\": 0.013191348179838795\n\
\ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6963752240589524,\n\
\ \"acc_stderr\": 0.004588827958775116,\n \"acc_norm\": 0.869946225851424,\n\
\ \"acc_norm_stderr\": 0.0033567515689037672\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\
: {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695236,\n \
\ \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695236\n \
\ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6666666666666666,\n\
\ \"acc_stderr\": 0.04072314811876837,\n \"acc_norm\": 0.6666666666666666,\n\
\ \"acc_norm_stderr\": 0.04072314811876837\n },\n \"harness|hendrycksTest-astronomy|5\"\
: {\n \"acc\": 0.6842105263157895,\n \"acc_stderr\": 0.0378272898086547,\n\
\ \"acc_norm\": 0.6842105263157895,\n \"acc_norm_stderr\": 0.0378272898086547\n\
\ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.63,\n\
\ \"acc_stderr\": 0.04852365870939099,\n \"acc_norm\": 0.63,\n \
\ \"acc_norm_stderr\": 0.04852365870939099\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\
: {\n \"acc\": 0.7169811320754716,\n \"acc_stderr\": 0.027724236492700918,\n\
\ \"acc_norm\": 0.7169811320754716,\n \"acc_norm_stderr\": 0.027724236492700918\n\
\ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7708333333333334,\n\
\ \"acc_stderr\": 0.03514697467862388,\n \"acc_norm\": 0.7708333333333334,\n\
\ \"acc_norm_stderr\": 0.03514697467862388\n },\n \"harness|hendrycksTest-college_chemistry|5\"\
: {\n \"acc\": 0.44,\n \"acc_stderr\": 0.04988876515698589,\n \
\ \"acc_norm\": 0.44,\n \"acc_norm_stderr\": 0.04988876515698589\n \
\ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\"\
: 0.56,\n \"acc_stderr\": 0.049888765156985884,\n \"acc_norm\": 0.56,\n\
\ \"acc_norm_stderr\": 0.049888765156985884\n },\n \"harness|hendrycksTest-college_mathematics|5\"\
: {\n \"acc\": 0.27,\n \"acc_stderr\": 0.0446196043338474,\n \
\ \"acc_norm\": 0.27,\n \"acc_norm_stderr\": 0.0446196043338474\n },\n\
\ \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6820809248554913,\n\
\ \"acc_stderr\": 0.035506839891655796,\n \"acc_norm\": 0.6820809248554913,\n\
\ \"acc_norm_stderr\": 0.035506839891655796\n },\n \"harness|hendrycksTest-college_physics|5\"\
: {\n \"acc\": 0.46078431372549017,\n \"acc_stderr\": 0.04959859966384181,\n\
\ \"acc_norm\": 0.46078431372549017,\n \"acc_norm_stderr\": 0.04959859966384181\n\
\ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\
\ 0.77,\n \"acc_stderr\": 0.04229525846816506,\n \"acc_norm\": 0.77,\n\
\ \"acc_norm_stderr\": 0.04229525846816506\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\
: {\n \"acc\": 0.5659574468085107,\n \"acc_stderr\": 0.03240038086792747,\n\
\ \"acc_norm\": 0.5659574468085107,\n \"acc_norm_stderr\": 0.03240038086792747\n\
\ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.5087719298245614,\n\
\ \"acc_stderr\": 0.04702880432049615,\n \"acc_norm\": 0.5087719298245614,\n\
\ \"acc_norm_stderr\": 0.04702880432049615\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\
: {\n \"acc\": 0.5793103448275863,\n \"acc_stderr\": 0.0411391498118926,\n\
\ \"acc_norm\": 0.5793103448275863,\n \"acc_norm_stderr\": 0.0411391498118926\n\
\ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\
: 0.4312169312169312,\n \"acc_stderr\": 0.02550648169813821,\n \"\
acc_norm\": 0.4312169312169312,\n \"acc_norm_stderr\": 0.02550648169813821\n\
\ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.49206349206349204,\n\
\ \"acc_stderr\": 0.044715725362943486,\n \"acc_norm\": 0.49206349206349204,\n\
\ \"acc_norm_stderr\": 0.044715725362943486\n },\n \"harness|hendrycksTest-global_facts|5\"\
: {\n \"acc\": 0.35,\n \"acc_stderr\": 0.047937248544110196,\n \
\ \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.047937248544110196\n \
\ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\"\
: 0.8,\n \"acc_stderr\": 0.022755204959542946,\n \"acc_norm\": 0.8,\n\
\ \"acc_norm_stderr\": 0.022755204959542946\n },\n \"harness|hendrycksTest-high_school_chemistry|5\"\
: {\n \"acc\": 0.4975369458128079,\n \"acc_stderr\": 0.03517945038691063,\n\
\ \"acc_norm\": 0.4975369458128079,\n \"acc_norm_stderr\": 0.03517945038691063\n\
\ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \
\ \"acc\": 0.73,\n \"acc_stderr\": 0.04461960433384739,\n \"acc_norm\"\
: 0.73,\n \"acc_norm_stderr\": 0.04461960433384739\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\
: {\n \"acc\": 0.7757575757575758,\n \"acc_stderr\": 0.03256866661681102,\n\
\ \"acc_norm\": 0.7757575757575758,\n \"acc_norm_stderr\": 0.03256866661681102\n\
\ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\
: 0.7929292929292929,\n \"acc_stderr\": 0.028869778460267045,\n \"\
acc_norm\": 0.7929292929292929,\n \"acc_norm_stderr\": 0.028869778460267045\n\
\ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\
\ \"acc\": 0.8963730569948186,\n \"acc_stderr\": 0.02199531196364424,\n\
\ \"acc_norm\": 0.8963730569948186,\n \"acc_norm_stderr\": 0.02199531196364424\n\
\ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \
\ \"acc\": 0.6692307692307692,\n \"acc_stderr\": 0.023854795680971128,\n\
\ \"acc_norm\": 0.6692307692307692,\n \"acc_norm_stderr\": 0.023854795680971128\n\
\ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\
acc\": 0.337037037037037,\n \"acc_stderr\": 0.028820884666253255,\n \
\ \"acc_norm\": 0.337037037037037,\n \"acc_norm_stderr\": 0.028820884666253255\n\
\ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \
\ \"acc\": 0.6848739495798319,\n \"acc_stderr\": 0.030176808288974337,\n\
\ \"acc_norm\": 0.6848739495798319,\n \"acc_norm_stderr\": 0.030176808288974337\n\
\ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\
: 0.3576158940397351,\n \"acc_stderr\": 0.03913453431177258,\n \"\
acc_norm\": 0.3576158940397351,\n \"acc_norm_stderr\": 0.03913453431177258\n\
\ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\
: 0.8422018348623853,\n \"acc_stderr\": 0.01563002297009244,\n \"\
acc_norm\": 0.8422018348623853,\n \"acc_norm_stderr\": 0.01563002297009244\n\
\ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\
: 0.5370370370370371,\n \"acc_stderr\": 0.03400603625538272,\n \"\
acc_norm\": 0.5370370370370371,\n \"acc_norm_stderr\": 0.03400603625538272\n\
\ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\
: 0.8284313725490197,\n \"acc_stderr\": 0.026460569561240644,\n \"\
acc_norm\": 0.8284313725490197,\n \"acc_norm_stderr\": 0.026460569561240644\n\
\ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\
acc\": 0.7974683544303798,\n \"acc_stderr\": 0.02616056824660146,\n \
\ \"acc_norm\": 0.7974683544303798,\n \"acc_norm_stderr\": 0.02616056824660146\n\
\ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6816143497757847,\n\
\ \"acc_stderr\": 0.03126580522513713,\n \"acc_norm\": 0.6816143497757847,\n\
\ \"acc_norm_stderr\": 0.03126580522513713\n },\n \"harness|hendrycksTest-human_sexuality|5\"\
: {\n \"acc\": 0.8091603053435115,\n \"acc_stderr\": 0.03446513350752599,\n\
\ \"acc_norm\": 0.8091603053435115,\n \"acc_norm_stderr\": 0.03446513350752599\n\
\ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\
\ 0.7933884297520661,\n \"acc_stderr\": 0.03695980128098824,\n \"\
acc_norm\": 0.7933884297520661,\n \"acc_norm_stderr\": 0.03695980128098824\n\
\ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7685185185185185,\n\
\ \"acc_stderr\": 0.04077494709252627,\n \"acc_norm\": 0.7685185185185185,\n\
\ \"acc_norm_stderr\": 0.04077494709252627\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\
: {\n \"acc\": 0.7730061349693251,\n \"acc_stderr\": 0.03291099578615769,\n\
\ \"acc_norm\": 0.7730061349693251,\n \"acc_norm_stderr\": 0.03291099578615769\n\
\ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.41964285714285715,\n\
\ \"acc_stderr\": 0.04684099321077106,\n \"acc_norm\": 0.41964285714285715,\n\
\ \"acc_norm_stderr\": 0.04684099321077106\n },\n \"harness|hendrycksTest-management|5\"\
: {\n \"acc\": 0.7864077669902912,\n \"acc_stderr\": 0.040580420156460344,\n\
\ \"acc_norm\": 0.7864077669902912,\n \"acc_norm_stderr\": 0.040580420156460344\n\
\ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8717948717948718,\n\
\ \"acc_stderr\": 0.021901905115073332,\n \"acc_norm\": 0.8717948717948718,\n\
\ \"acc_norm_stderr\": 0.021901905115073332\n },\n \"harness|hendrycksTest-medical_genetics|5\"\
: {\n \"acc\": 0.71,\n \"acc_stderr\": 0.045604802157206845,\n \
\ \"acc_norm\": 0.71,\n \"acc_norm_stderr\": 0.045604802157206845\n \
\ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8301404853128991,\n\
\ \"acc_stderr\": 0.013428186370608311,\n \"acc_norm\": 0.8301404853128991,\n\
\ \"acc_norm_stderr\": 0.013428186370608311\n },\n \"harness|hendrycksTest-moral_disputes|5\"\
: {\n \"acc\": 0.7138728323699421,\n \"acc_stderr\": 0.024332146779134128,\n\
\ \"acc_norm\": 0.7138728323699421,\n \"acc_norm_stderr\": 0.024332146779134128\n\
\ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.4770949720670391,\n\
\ \"acc_stderr\": 0.016704945740326188,\n \"acc_norm\": 0.4770949720670391,\n\
\ \"acc_norm_stderr\": 0.016704945740326188\n },\n \"harness|hendrycksTest-nutrition|5\"\
: {\n \"acc\": 0.738562091503268,\n \"acc_stderr\": 0.025160998214292456,\n\
\ \"acc_norm\": 0.738562091503268,\n \"acc_norm_stderr\": 0.025160998214292456\n\
\ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7170418006430869,\n\
\ \"acc_stderr\": 0.025583062489984806,\n \"acc_norm\": 0.7170418006430869,\n\
\ \"acc_norm_stderr\": 0.025583062489984806\n },\n \"harness|hendrycksTest-prehistory|5\"\
: {\n \"acc\": 0.7438271604938271,\n \"acc_stderr\": 0.0242885336377261,\n\
\ \"acc_norm\": 0.7438271604938271,\n \"acc_norm_stderr\": 0.0242885336377261\n\
\ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\
acc\": 0.4716312056737589,\n \"acc_stderr\": 0.029779450957303062,\n \
\ \"acc_norm\": 0.4716312056737589,\n \"acc_norm_stderr\": 0.029779450957303062\n\
\ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4634941329856584,\n\
\ \"acc_stderr\": 0.012736153390214963,\n \"acc_norm\": 0.4634941329856584,\n\
\ \"acc_norm_stderr\": 0.012736153390214963\n },\n \"harness|hendrycksTest-professional_medicine|5\"\
: {\n \"acc\": 0.6838235294117647,\n \"acc_stderr\": 0.02824568739146293,\n\
\ \"acc_norm\": 0.6838235294117647,\n \"acc_norm_stderr\": 0.02824568739146293\n\
\ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\
acc\": 0.6633986928104575,\n \"acc_stderr\": 0.019117213911495148,\n \
\ \"acc_norm\": 0.6633986928104575,\n \"acc_norm_stderr\": 0.019117213911495148\n\
\ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6636363636363637,\n\
\ \"acc_stderr\": 0.04525393596302506,\n \"acc_norm\": 0.6636363636363637,\n\
\ \"acc_norm_stderr\": 0.04525393596302506\n },\n \"harness|hendrycksTest-security_studies|5\"\
: {\n \"acc\": 0.7346938775510204,\n \"acc_stderr\": 0.028263889943784593,\n\
\ \"acc_norm\": 0.7346938775510204,\n \"acc_norm_stderr\": 0.028263889943784593\n\
\ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.845771144278607,\n\
\ \"acc_stderr\": 0.02553843336857833,\n \"acc_norm\": 0.845771144278607,\n\
\ \"acc_norm_stderr\": 0.02553843336857833\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\
: {\n \"acc\": 0.86,\n \"acc_stderr\": 0.0348735088019777,\n \
\ \"acc_norm\": 0.86,\n \"acc_norm_stderr\": 0.0348735088019777\n },\n\
\ \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5481927710843374,\n\
\ \"acc_stderr\": 0.03874371556587953,\n \"acc_norm\": 0.5481927710843374,\n\
\ \"acc_norm_stderr\": 0.03874371556587953\n },\n \"harness|hendrycksTest-world_religions|5\"\
: {\n \"acc\": 0.8362573099415205,\n \"acc_stderr\": 0.028380919596145866,\n\
\ \"acc_norm\": 0.8362573099415205,\n \"acc_norm_stderr\": 0.028380919596145866\n\
\ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.5630354957160343,\n\
\ \"mc1_stderr\": 0.017363844503195957,\n \"mc2\": 0.6954134254414035,\n\
\ \"mc2_stderr\": 0.015047304382402624\n },\n \"harness|winogrande|5\"\
: {\n \"acc\": 0.8113654301499605,\n \"acc_stderr\": 0.010995172318019816\n\
\ },\n \"harness|gsm8k|5\": {\n \"acc\": 0.6504927975739196,\n \
\ \"acc_stderr\": 0.013133836511705991\n }\n}\n```"
repo_url: https://huggingface.co/dillfrescott/trinity-medium
leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard
point_of_contact: clementine@hf.co
configs:
- config_name: harness_arc_challenge_25
data_files:
- split: 2023_12_29T19_31_05.351110
path:
- '**/details_harness|arc:challenge|25_2023-12-29T19-31-05.351110.parquet'
- split: latest
path:
- '**/details_harness|arc:challenge|25_2023-12-29T19-31-05.351110.parquet'
- config_name: harness_gsm8k_5
data_files:
- split: 2023_12_29T19_31_05.351110
path:
- '**/details_harness|gsm8k|5_2023-12-29T19-31-05.351110.parquet'
- split: latest
path:
- '**/details_harness|gsm8k|5_2023-12-29T19-31-05.351110.parquet'
- config_name: harness_hellaswag_10
data_files:
- split: 2023_12_29T19_31_05.351110
path:
- '**/details_harness|hellaswag|10_2023-12-29T19-31-05.351110.parquet'
- split: latest
path:
- '**/details_harness|hellaswag|10_2023-12-29T19-31-05.351110.parquet'
- config_name: harness_hendrycksTest_5
data_files:
- split: 2023_12_29T19_31_05.351110
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-management|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-virology|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2023-12-29T19-31-05.351110.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-management|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-virology|5_2023-12-29T19-31-05.351110.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2023-12-29T19-31-05.351110.parquet'
- config_name: harness_hendrycksTest_abstract_algebra_5
data_files:
- split: 2023_12_29T19_31_05.351110
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-29T19-31-05.351110.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-29T19-31-05.351110.parquet'
- config_name: harness_hendrycksTest_anatomy_5
data_files:
- split: 2023_12_29T19_31_05.351110
path:
- '**/details_harness|hendrycksTest-anatomy|5_2023-12-29T19-31-05.351110.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-anatomy|5_2023-12-29T19-31-05.351110.parquet'
- config_name: harness_hendrycksTest_astronomy_5
data_files:
- split: 2023_12_29T19_31_05.351110
path:
- '**/details_harness|hendrycksTest-astronomy|5_2023-12-29T19-31-05.351110.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-astronomy|5_2023-12-29T19-31-05.351110.parquet'
- config_name: harness_hendrycksTest_business_ethics_5
data_files:
- split: 2023_12_29T19_31_05.351110
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2023-12-29T19-31-05.351110.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2023-12-29T19-31-05.351110.parquet'
- config_name: harness_hendrycksTest_clinical_knowledge_5
data_files:
- split: 2023_12_29T19_31_05.351110
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-29T19-31-05.351110.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-29T19-31-05.351110.parquet'
- config_name: harness_hendrycksTest_college_biology_5
data_files:
- split: 2023_12_29T19_31_05.351110
path:
- '**/details_harness|hendrycksTest-college_biology|5_2023-12-29T19-31-05.351110.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_biology|5_2023-12-29T19-31-05.351110.parquet'
- config_name: harness_hendrycksTest_college_chemistry_5
data_files:
- split: 2023_12_29T19_31_05.351110
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-12-29T19-31-05.351110.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-12-29T19-31-05.351110.parquet'
- config_name: harness_hendrycksTest_college_computer_science_5
data_files:
- split: 2023_12_29T19_31_05.351110
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-12-29T19-31-05.351110.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-12-29T19-31-05.351110.parquet'
- config_name: harness_hendrycksTest_college_mathematics_5
data_files:
- split: 2023_12_29T19_31_05.351110
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-12-29T19-31-05.351110.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-12-29T19-31-05.351110.parquet'
- config_name: harness_hendrycksTest_college_medicine_5
data_files:
- split: 2023_12_29T19_31_05.351110
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2023-12-29T19-31-05.351110.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2023-12-29T19-31-05.351110.parquet'
- config_name: harness_hendrycksTest_college_physics_5
data_files:
- split: 2023_12_29T19_31_05.351110
path:
- '**/details_harness|hendrycksTest-college_physics|5_2023-12-29T19-31-05.351110.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_physics|5_2023-12-29T19-31-05.351110.parquet'
- config_name: harness_hendrycksTest_computer_security_5
data_files:
- split: 2023_12_29T19_31_05.351110
path:
- '**/details_harness|hendrycksTest-computer_security|5_2023-12-29T19-31-05.351110.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-computer_security|5_2023-12-29T19-31-05.351110.parquet'
- config_name: harness_hendrycksTest_conceptual_physics_5
data_files:
- split: 2023_12_29T19_31_05.351110
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-29T19-31-05.351110.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-29T19-31-05.351110.parquet'
- config_name: harness_hendrycksTest_econometrics_5
data_files:
- split: 2023_12_29T19_31_05.351110
path:
- '**/details_harness|hendrycksTest-econometrics|5_2023-12-29T19-31-05.351110.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-econometrics|5_2023-12-29T19-31-05.351110.parquet'
- config_name: harness_hendrycksTest_electrical_engineering_5
data_files:
- split: 2023_12_29T19_31_05.351110
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-29T19-31-05.351110.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-29T19-31-05.351110.parquet'
- config_name: harness_hendrycksTest_elementary_mathematics_5
data_files:
- split: 2023_12_29T19_31_05.351110
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-29T19-31-05.351110.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-29T19-31-05.351110.parquet'
- config_name: harness_hendrycksTest_formal_logic_5
data_files:
- split: 2023_12_29T19_31_05.351110
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2023-12-29T19-31-05.351110.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2023-12-29T19-31-05.351110.parquet'
- config_name: harness_hendrycksTest_global_facts_5
data_files:
- split: 2023_12_29T19_31_05.351110
path:
- '**/details_harness|hendrycksTest-global_facts|5_2023-12-29T19-31-05.351110.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-global_facts|5_2023-12-29T19-31-05.351110.parquet'
- config_name: harness_hendrycksTest_high_school_biology_5
data_files:
- split: 2023_12_29T19_31_05.351110
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-12-29T19-31-05.351110.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-12-29T19-31-05.351110.parquet'
- config_name: harness_hendrycksTest_high_school_chemistry_5
data_files:
- split: 2023_12_29T19_31_05.351110
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-29T19-31-05.351110.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-29T19-31-05.351110.parquet'
- config_name: harness_hendrycksTest_high_school_computer_science_5
data_files:
- split: 2023_12_29T19_31_05.351110
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-29T19-31-05.351110.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-29T19-31-05.351110.parquet'
- config_name: harness_hendrycksTest_high_school_european_history_5
data_files:
- split: 2023_12_29T19_31_05.351110
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-29T19-31-05.351110.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-29T19-31-05.351110.parquet'
- config_name: harness_hendrycksTest_high_school_geography_5
data_files:
- split: 2023_12_29T19_31_05.351110
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-12-29T19-31-05.351110.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-12-29T19-31-05.351110.parquet'
- config_name: harness_hendrycksTest_high_school_government_and_politics_5
data_files:
- split: 2023_12_29T19_31_05.351110
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-29T19-31-05.351110.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-29T19-31-05.351110.parquet'
- config_name: harness_hendrycksTest_high_school_macroeconomics_5
data_files:
- split: 2023_12_29T19_31_05.351110
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-29T19-31-05.351110.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-29T19-31-05.351110.parquet'
- config_name: harness_hendrycksTest_high_school_mathematics_5
data_files:
- split: 2023_12_29T19_31_05.351110
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-29T19-31-05.351110.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-29T19-31-05.351110.parquet'
- config_name: harness_hendrycksTest_high_school_microeconomics_5
data_files:
- split: 2023_12_29T19_31_05.351110
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-29T19-31-05.351110.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-29T19-31-05.351110.parquet'
- config_name: harness_hendrycksTest_high_school_physics_5
data_files:
- split: 2023_12_29T19_31_05.351110
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-12-29T19-31-05.351110.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-12-29T19-31-05.351110.parquet'
- config_name: harness_hendrycksTest_high_school_psychology_5
data_files:
- split: 2023_12_29T19_31_05.351110
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-29T19-31-05.351110.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-29T19-31-05.351110.parquet'
- config_name: harness_hendrycksTest_high_school_statistics_5
data_files:
- split: 2023_12_29T19_31_05.351110
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-29T19-31-05.351110.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-29T19-31-05.351110.parquet'
- config_name: harness_hendrycksTest_high_school_us_history_5
data_files:
- split: 2023_12_29T19_31_05.351110
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-29T19-31-05.351110.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-29T19-31-05.351110.parquet'
- config_name: harness_hendrycksTest_high_school_world_history_5
data_files:
- split: 2023_12_29T19_31_05.351110
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-29T19-31-05.351110.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-29T19-31-05.351110.parquet'
- config_name: harness_hendrycksTest_human_aging_5
data_files:
- split: 2023_12_29T19_31_05.351110
path:
- '**/details_harness|hendrycksTest-human_aging|5_2023-12-29T19-31-05.351110.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_aging|5_2023-12-29T19-31-05.351110.parquet'
- config_name: harness_hendrycksTest_human_sexuality_5
data_files:
- split: 2023_12_29T19_31_05.351110
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-12-29T19-31-05.351110.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-12-29T19-31-05.351110.parquet'
- config_name: harness_hendrycksTest_international_law_5
data_files:
- split: 2023_12_29T19_31_05.351110
path:
- '**/details_harness|hendrycksTest-international_law|5_2023-12-29T19-31-05.351110.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-international_law|5_2023-12-29T19-31-05.351110.parquet'
- config_name: harness_hendrycksTest_jurisprudence_5
data_files:
- split: 2023_12_29T19_31_05.351110
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-12-29T19-31-05.351110.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-12-29T19-31-05.351110.parquet'
- config_name: harness_hendrycksTest_logical_fallacies_5
data_files:
- split: 2023_12_29T19_31_05.351110
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-29T19-31-05.351110.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-29T19-31-05.351110.parquet'
- config_name: harness_hendrycksTest_machine_learning_5
data_files:
- split: 2023_12_29T19_31_05.351110
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2023-12-29T19-31-05.351110.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2023-12-29T19-31-05.351110.parquet'
- config_name: harness_hendrycksTest_management_5
data_files:
- split: 2023_12_29T19_31_05.351110
path:
- '**/details_harness|hendrycksTest-management|5_2023-12-29T19-31-05.351110.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-management|5_2023-12-29T19-31-05.351110.parquet'
- config_name: harness_hendrycksTest_marketing_5
data_files:
- split: 2023_12_29T19_31_05.351110
path:
- '**/details_harness|hendrycksTest-marketing|5_2023-12-29T19-31-05.351110.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-marketing|5_2023-12-29T19-31-05.351110.parquet'
- config_name: harness_hendrycksTest_medical_genetics_5
data_files:
- split: 2023_12_29T19_31_05.351110
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-12-29T19-31-05.351110.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-12-29T19-31-05.351110.parquet'
- config_name: harness_hendrycksTest_miscellaneous_5
data_files:
- split: 2023_12_29T19_31_05.351110
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-12-29T19-31-05.351110.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-12-29T19-31-05.351110.parquet'
- config_name: harness_hendrycksTest_moral_disputes_5
data_files:
- split: 2023_12_29T19_31_05.351110
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-12-29T19-31-05.351110.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-12-29T19-31-05.351110.parquet'
- config_name: harness_hendrycksTest_moral_scenarios_5
data_files:
- split: 2023_12_29T19_31_05.351110
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-29T19-31-05.351110.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-29T19-31-05.351110.parquet'
- config_name: harness_hendrycksTest_nutrition_5
data_files:
- split: 2023_12_29T19_31_05.351110
path:
- '**/details_harness|hendrycksTest-nutrition|5_2023-12-29T19-31-05.351110.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-nutrition|5_2023-12-29T19-31-05.351110.parquet'
- config_name: harness_hendrycksTest_philosophy_5
data_files:
- split: 2023_12_29T19_31_05.351110
path:
- '**/details_harness|hendrycksTest-philosophy|5_2023-12-29T19-31-05.351110.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-philosophy|5_2023-12-29T19-31-05.351110.parquet'
- config_name: harness_hendrycksTest_prehistory_5
data_files:
- split: 2023_12_29T19_31_05.351110
path:
- '**/details_harness|hendrycksTest-prehistory|5_2023-12-29T19-31-05.351110.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-prehistory|5_2023-12-29T19-31-05.351110.parquet'
- config_name: harness_hendrycksTest_professional_accounting_5
data_files:
- split: 2023_12_29T19_31_05.351110
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-12-29T19-31-05.351110.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-12-29T19-31-05.351110.parquet'
- config_name: harness_hendrycksTest_professional_law_5
data_files:
- split: 2023_12_29T19_31_05.351110
path:
- '**/details_harness|hendrycksTest-professional_law|5_2023-12-29T19-31-05.351110.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_law|5_2023-12-29T19-31-05.351110.parquet'
- config_name: harness_hendrycksTest_professional_medicine_5
data_files:
- split: 2023_12_29T19_31_05.351110
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-12-29T19-31-05.351110.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-12-29T19-31-05.351110.parquet'
- config_name: harness_hendrycksTest_professional_psychology_5
data_files:
- split: 2023_12_29T19_31_05.351110
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-12-29T19-31-05.351110.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-12-29T19-31-05.351110.parquet'
- config_name: harness_hendrycksTest_public_relations_5
data_files:
- split: 2023_12_29T19_31_05.351110
path:
- '**/details_harness|hendrycksTest-public_relations|5_2023-12-29T19-31-05.351110.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-public_relations|5_2023-12-29T19-31-05.351110.parquet'
- config_name: harness_hendrycksTest_security_studies_5
data_files:
- split: 2023_12_29T19_31_05.351110
path:
- '**/details_harness|hendrycksTest-security_studies|5_2023-12-29T19-31-05.351110.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-security_studies|5_2023-12-29T19-31-05.351110.parquet'
- config_name: harness_hendrycksTest_sociology_5
data_files:
- split: 2023_12_29T19_31_05.351110
path:
- '**/details_harness|hendrycksTest-sociology|5_2023-12-29T19-31-05.351110.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-sociology|5_2023-12-29T19-31-05.351110.parquet'
- config_name: harness_hendrycksTest_us_foreign_policy_5
data_files:
- split: 2023_12_29T19_31_05.351110
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-29T19-31-05.351110.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-29T19-31-05.351110.parquet'
- config_name: harness_hendrycksTest_virology_5
data_files:
- split: 2023_12_29T19_31_05.351110
path:
- '**/details_harness|hendrycksTest-virology|5_2023-12-29T19-31-05.351110.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-virology|5_2023-12-29T19-31-05.351110.parquet'
- config_name: harness_hendrycksTest_world_religions_5
data_files:
- split: 2023_12_29T19_31_05.351110
path:
- '**/details_harness|hendrycksTest-world_religions|5_2023-12-29T19-31-05.351110.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-world_religions|5_2023-12-29T19-31-05.351110.parquet'
- config_name: harness_truthfulqa_mc_0
data_files:
- split: 2023_12_29T19_31_05.351110
path:
- '**/details_harness|truthfulqa:mc|0_2023-12-29T19-31-05.351110.parquet'
- split: latest
path:
- '**/details_harness|truthfulqa:mc|0_2023-12-29T19-31-05.351110.parquet'
- config_name: harness_winogrande_5
data_files:
- split: 2023_12_29T19_31_05.351110
path:
- '**/details_harness|winogrande|5_2023-12-29T19-31-05.351110.parquet'
- split: latest
path:
- '**/details_harness|winogrande|5_2023-12-29T19-31-05.351110.parquet'
- config_name: results
data_files:
- split: 2023_12_29T19_31_05.351110
path:
- results_2023-12-29T19-31-05.351110.parquet
- split: latest
path:
- results_2023-12-29T19-31-05.351110.parquet
---
# Dataset Card for Evaluation run of dillfrescott/trinity-medium
<!-- Provide a quick summary of the dataset. -->
Dataset automatically created during the evaluation run of model [dillfrescott/trinity-medium](https://huggingface.co/dillfrescott/trinity-medium) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).
The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset("open-llm-leaderboard/details_dillfrescott__trinity-medium",
"harness_winogrande_5",
split="train")
```
## Latest results
These are the [latest results from run 2023-12-29T19:31:05.351110](https://huggingface.co/datasets/open-llm-leaderboard/details_dillfrescott__trinity-medium/blob/main/results_2023-12-29T19-31-05.351110.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"acc": 0.6543513981322463,
"acc_stderr": 0.03200522597754596,
"acc_norm": 0.65517466197835,
"acc_norm_stderr": 0.03265417586520206,
"mc1": 0.5630354957160343,
"mc1_stderr": 0.017363844503195957,
"mc2": 0.6954134254414035,
"mc2_stderr": 0.015047304382402624
},
"harness|arc:challenge|25": {
"acc": 0.6868600682593856,
"acc_stderr": 0.013552671543623497,
"acc_norm": 0.7150170648464164,
"acc_norm_stderr": 0.013191348179838795
},
"harness|hellaswag|10": {
"acc": 0.6963752240589524,
"acc_stderr": 0.004588827958775116,
"acc_norm": 0.869946225851424,
"acc_norm_stderr": 0.0033567515689037672
},
"harness|hendrycksTest-abstract_algebra|5": {
"acc": 0.34,
"acc_stderr": 0.04760952285695236,
"acc_norm": 0.34,
"acc_norm_stderr": 0.04760952285695236
},
"harness|hendrycksTest-anatomy|5": {
"acc": 0.6666666666666666,
"acc_stderr": 0.04072314811876837,
"acc_norm": 0.6666666666666666,
"acc_norm_stderr": 0.04072314811876837
},
"harness|hendrycksTest-astronomy|5": {
"acc": 0.6842105263157895,
"acc_stderr": 0.0378272898086547,
"acc_norm": 0.6842105263157895,
"acc_norm_stderr": 0.0378272898086547
},
"harness|hendrycksTest-business_ethics|5": {
"acc": 0.63,
"acc_stderr": 0.04852365870939099,
"acc_norm": 0.63,
"acc_norm_stderr": 0.04852365870939099
},
"harness|hendrycksTest-clinical_knowledge|5": {
"acc": 0.7169811320754716,
"acc_stderr": 0.027724236492700918,
"acc_norm": 0.7169811320754716,
"acc_norm_stderr": 0.027724236492700918
},
"harness|hendrycksTest-college_biology|5": {
"acc": 0.7708333333333334,
"acc_stderr": 0.03514697467862388,
"acc_norm": 0.7708333333333334,
"acc_norm_stderr": 0.03514697467862388
},
"harness|hendrycksTest-college_chemistry|5": {
"acc": 0.44,
"acc_stderr": 0.04988876515698589,
"acc_norm": 0.44,
"acc_norm_stderr": 0.04988876515698589
},
"harness|hendrycksTest-college_computer_science|5": {
"acc": 0.56,
"acc_stderr": 0.049888765156985884,
"acc_norm": 0.56,
"acc_norm_stderr": 0.049888765156985884
},
"harness|hendrycksTest-college_mathematics|5": {
"acc": 0.27,
"acc_stderr": 0.0446196043338474,
"acc_norm": 0.27,
"acc_norm_stderr": 0.0446196043338474
},
"harness|hendrycksTest-college_medicine|5": {
"acc": 0.6820809248554913,
"acc_stderr": 0.035506839891655796,
"acc_norm": 0.6820809248554913,
"acc_norm_stderr": 0.035506839891655796
},
"harness|hendrycksTest-college_physics|5": {
"acc": 0.46078431372549017,
"acc_stderr": 0.04959859966384181,
"acc_norm": 0.46078431372549017,
"acc_norm_stderr": 0.04959859966384181
},
"harness|hendrycksTest-computer_security|5": {
"acc": 0.77,
"acc_stderr": 0.04229525846816506,
"acc_norm": 0.77,
"acc_norm_stderr": 0.04229525846816506
},
"harness|hendrycksTest-conceptual_physics|5": {
"acc": 0.5659574468085107,
"acc_stderr": 0.03240038086792747,
"acc_norm": 0.5659574468085107,
"acc_norm_stderr": 0.03240038086792747
},
"harness|hendrycksTest-econometrics|5": {
"acc": 0.5087719298245614,
"acc_stderr": 0.04702880432049615,
"acc_norm": 0.5087719298245614,
"acc_norm_stderr": 0.04702880432049615
},
"harness|hendrycksTest-electrical_engineering|5": {
"acc": 0.5793103448275863,
"acc_stderr": 0.0411391498118926,
"acc_norm": 0.5793103448275863,
"acc_norm_stderr": 0.0411391498118926
},
"harness|hendrycksTest-elementary_mathematics|5": {
"acc": 0.4312169312169312,
"acc_stderr": 0.02550648169813821,
"acc_norm": 0.4312169312169312,
"acc_norm_stderr": 0.02550648169813821
},
"harness|hendrycksTest-formal_logic|5": {
"acc": 0.49206349206349204,
"acc_stderr": 0.044715725362943486,
"acc_norm": 0.49206349206349204,
"acc_norm_stderr": 0.044715725362943486
},
"harness|hendrycksTest-global_facts|5": {
"acc": 0.35,
"acc_stderr": 0.047937248544110196,
"acc_norm": 0.35,
"acc_norm_stderr": 0.047937248544110196
},
"harness|hendrycksTest-high_school_biology|5": {
"acc": 0.8,
"acc_stderr": 0.022755204959542946,
"acc_norm": 0.8,
"acc_norm_stderr": 0.022755204959542946
},
"harness|hendrycksTest-high_school_chemistry|5": {
"acc": 0.4975369458128079,
"acc_stderr": 0.03517945038691063,
"acc_norm": 0.4975369458128079,
"acc_norm_stderr": 0.03517945038691063
},
"harness|hendrycksTest-high_school_computer_science|5": {
"acc": 0.73,
"acc_stderr": 0.04461960433384739,
"acc_norm": 0.73,
"acc_norm_stderr": 0.04461960433384739
},
"harness|hendrycksTest-high_school_european_history|5": {
"acc": 0.7757575757575758,
"acc_stderr": 0.03256866661681102,
"acc_norm": 0.7757575757575758,
"acc_norm_stderr": 0.03256866661681102
},
"harness|hendrycksTest-high_school_geography|5": {
"acc": 0.7929292929292929,
"acc_stderr": 0.028869778460267045,
"acc_norm": 0.7929292929292929,
"acc_norm_stderr": 0.028869778460267045
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"acc": 0.8963730569948186,
"acc_stderr": 0.02199531196364424,
"acc_norm": 0.8963730569948186,
"acc_norm_stderr": 0.02199531196364424
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"acc": 0.6692307692307692,
"acc_stderr": 0.023854795680971128,
"acc_norm": 0.6692307692307692,
"acc_norm_stderr": 0.023854795680971128
},
"harness|hendrycksTest-high_school_mathematics|5": {
"acc": 0.337037037037037,
"acc_stderr": 0.028820884666253255,
"acc_norm": 0.337037037037037,
"acc_norm_stderr": 0.028820884666253255
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"acc": 0.6848739495798319,
"acc_stderr": 0.030176808288974337,
"acc_norm": 0.6848739495798319,
"acc_norm_stderr": 0.030176808288974337
},
"harness|hendrycksTest-high_school_physics|5": {
"acc": 0.3576158940397351,
"acc_stderr": 0.03913453431177258,
"acc_norm": 0.3576158940397351,
"acc_norm_stderr": 0.03913453431177258
},
"harness|hendrycksTest-high_school_psychology|5": {
"acc": 0.8422018348623853,
"acc_stderr": 0.01563002297009244,
"acc_norm": 0.8422018348623853,
"acc_norm_stderr": 0.01563002297009244
},
"harness|hendrycksTest-high_school_statistics|5": {
"acc": 0.5370370370370371,
"acc_stderr": 0.03400603625538272,
"acc_norm": 0.5370370370370371,
"acc_norm_stderr": 0.03400603625538272
},
"harness|hendrycksTest-high_school_us_history|5": {
"acc": 0.8284313725490197,
"acc_stderr": 0.026460569561240644,
"acc_norm": 0.8284313725490197,
"acc_norm_stderr": 0.026460569561240644
},
"harness|hendrycksTest-high_school_world_history|5": {
"acc": 0.7974683544303798,
"acc_stderr": 0.02616056824660146,
"acc_norm": 0.7974683544303798,
"acc_norm_stderr": 0.02616056824660146
},
"harness|hendrycksTest-human_aging|5": {
"acc": 0.6816143497757847,
"acc_stderr": 0.03126580522513713,
"acc_norm": 0.6816143497757847,
"acc_norm_stderr": 0.03126580522513713
},
"harness|hendrycksTest-human_sexuality|5": {
"acc": 0.8091603053435115,
"acc_stderr": 0.03446513350752599,
"acc_norm": 0.8091603053435115,
"acc_norm_stderr": 0.03446513350752599
},
"harness|hendrycksTest-international_law|5": {
"acc": 0.7933884297520661,
"acc_stderr": 0.03695980128098824,
"acc_norm": 0.7933884297520661,
"acc_norm_stderr": 0.03695980128098824
},
"harness|hendrycksTest-jurisprudence|5": {
"acc": 0.7685185185185185,
"acc_stderr": 0.04077494709252627,
"acc_norm": 0.7685185185185185,
"acc_norm_stderr": 0.04077494709252627
},
"harness|hendrycksTest-logical_fallacies|5": {
"acc": 0.7730061349693251,
"acc_stderr": 0.03291099578615769,
"acc_norm": 0.7730061349693251,
"acc_norm_stderr": 0.03291099578615769
},
"harness|hendrycksTest-machine_learning|5": {
"acc": 0.41964285714285715,
"acc_stderr": 0.04684099321077106,
"acc_norm": 0.41964285714285715,
"acc_norm_stderr": 0.04684099321077106
},
"harness|hendrycksTest-management|5": {
"acc": 0.7864077669902912,
"acc_stderr": 0.040580420156460344,
"acc_norm": 0.7864077669902912,
"acc_norm_stderr": 0.040580420156460344
},
"harness|hendrycksTest-marketing|5": {
"acc": 0.8717948717948718,
"acc_stderr": 0.021901905115073332,
"acc_norm": 0.8717948717948718,
"acc_norm_stderr": 0.021901905115073332
},
"harness|hendrycksTest-medical_genetics|5": {
"acc": 0.71,
"acc_stderr": 0.045604802157206845,
"acc_norm": 0.71,
"acc_norm_stderr": 0.045604802157206845
},
"harness|hendrycksTest-miscellaneous|5": {
"acc": 0.8301404853128991,
"acc_stderr": 0.013428186370608311,
"acc_norm": 0.8301404853128991,
"acc_norm_stderr": 0.013428186370608311
},
"harness|hendrycksTest-moral_disputes|5": {
"acc": 0.7138728323699421,
"acc_stderr": 0.024332146779134128,
"acc_norm": 0.7138728323699421,
"acc_norm_stderr": 0.024332146779134128
},
"harness|hendrycksTest-moral_scenarios|5": {
"acc": 0.4770949720670391,
"acc_stderr": 0.016704945740326188,
"acc_norm": 0.4770949720670391,
"acc_norm_stderr": 0.016704945740326188
},
"harness|hendrycksTest-nutrition|5": {
"acc": 0.738562091503268,
"acc_stderr": 0.025160998214292456,
"acc_norm": 0.738562091503268,
"acc_norm_stderr": 0.025160998214292456
},
"harness|hendrycksTest-philosophy|5": {
"acc": 0.7170418006430869,
"acc_stderr": 0.025583062489984806,
"acc_norm": 0.7170418006430869,
"acc_norm_stderr": 0.025583062489984806
},
"harness|hendrycksTest-prehistory|5": {
"acc": 0.7438271604938271,
"acc_stderr": 0.0242885336377261,
"acc_norm": 0.7438271604938271,
"acc_norm_stderr": 0.0242885336377261
},
"harness|hendrycksTest-professional_accounting|5": {
"acc": 0.4716312056737589,
"acc_stderr": 0.029779450957303062,
"acc_norm": 0.4716312056737589,
"acc_norm_stderr": 0.029779450957303062
},
"harness|hendrycksTest-professional_law|5": {
"acc": 0.4634941329856584,
"acc_stderr": 0.012736153390214963,
"acc_norm": 0.4634941329856584,
"acc_norm_stderr": 0.012736153390214963
},
"harness|hendrycksTest-professional_medicine|5": {
"acc": 0.6838235294117647,
"acc_stderr": 0.02824568739146293,
"acc_norm": 0.6838235294117647,
"acc_norm_stderr": 0.02824568739146293
},
"harness|hendrycksTest-professional_psychology|5": {
"acc": 0.6633986928104575,
"acc_stderr": 0.019117213911495148,
"acc_norm": 0.6633986928104575,
"acc_norm_stderr": 0.019117213911495148
},
"harness|hendrycksTest-public_relations|5": {
"acc": 0.6636363636363637,
"acc_stderr": 0.04525393596302506,
"acc_norm": 0.6636363636363637,
"acc_norm_stderr": 0.04525393596302506
},
"harness|hendrycksTest-security_studies|5": {
"acc": 0.7346938775510204,
"acc_stderr": 0.028263889943784593,
"acc_norm": 0.7346938775510204,
"acc_norm_stderr": 0.028263889943784593
},
"harness|hendrycksTest-sociology|5": {
"acc": 0.845771144278607,
"acc_stderr": 0.02553843336857833,
"acc_norm": 0.845771144278607,
"acc_norm_stderr": 0.02553843336857833
},
"harness|hendrycksTest-us_foreign_policy|5": {
"acc": 0.86,
"acc_stderr": 0.0348735088019777,
"acc_norm": 0.86,
"acc_norm_stderr": 0.0348735088019777
},
"harness|hendrycksTest-virology|5": {
"acc": 0.5481927710843374,
"acc_stderr": 0.03874371556587953,
"acc_norm": 0.5481927710843374,
"acc_norm_stderr": 0.03874371556587953
},
"harness|hendrycksTest-world_religions|5": {
"acc": 0.8362573099415205,
"acc_stderr": 0.028380919596145866,
"acc_norm": 0.8362573099415205,
"acc_norm_stderr": 0.028380919596145866
},
"harness|truthfulqa:mc|0": {
"mc1": 0.5630354957160343,
"mc1_stderr": 0.017363844503195957,
"mc2": 0.6954134254414035,
"mc2_stderr": 0.015047304382402624
},
"harness|winogrande|5": {
"acc": 0.8113654301499605,
"acc_stderr": 0.010995172318019816
},
"harness|gsm8k|5": {
"acc": 0.6504927975739196,
"acc_stderr": 0.013133836511705991
}
}
```
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] |
SonicXtreme99/MikeySimon_Finished | ---
license: openrail
---
|
xdmizo/dsdsd | ---
license: wtfpl
---
|
codys12/MergeLlama | ---
license: cc-by-4.0
---
MergeLlama is a unique dataset that encapsulates real-world merge conflicts alongside their corresponding resolutions. Developed from the foundational dataset shared in "Anonymous. (2022). Data set for FSE 2022 Submission Program Merge Conflict Resolution via Neural Transformers", MergeLlama provides a comprehensive collection of conflict scenarios and how they were resolved. With potential multiple conflicts in a single entry followed by its respective resolution, this dataset serves as a rich resource for understanding merge conflicts and developing automated resolution strategies.
For those using this dataset, please cite as follows:
"MergeLlama Dataset. (2023). Merge Conflicts Fused with Their Resolutions. Based on: Anonymous. (2022). Data set for FSE 2022 Submission Program Merge Conflict Resolution via Neural Transformers (1.0) [Data set]. Zenodo. https://doi.org/10.5281/zenodo.6366908".
|
CyberHarem/fukuda_noriko_theidolmstermillionlive | ---
license: mit
task_categories:
- text-to-image
tags:
- art
- not-for-all-audiences
size_categories:
- n<1K
---
# Dataset of fukuda_noriko/็ฆ็ฐใฎใๅญ (THE iDOLM@STER: Million Live!)
This is the dataset of fukuda_noriko/็ฆ็ฐใฎใๅญ (THE iDOLM@STER: Million Live!), containing 156 images and their tags.
The core tags of this character are `short_hair, blonde_hair, brown_eyes, breasts, bangs, earrings`, which are pruned in this dataset.
Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by [DeepGHS Team](https://github.com/deepghs)([huggingface organization](https://huggingface.co/deepghs)).
## List of Packages
| Name | Images | Size | Download | Type | Description |
|:-----------------|---------:|:-----------|:---------------------------------------------------------------------------------------------------------------------------------------|:-----------|:---------------------------------------------------------------------|
| raw | 156 | 137.64 MiB | [Download](https://huggingface.co/datasets/CyberHarem/fukuda_noriko_theidolmstermillionlive/resolve/main/dataset-raw.zip) | Waifuc-Raw | Raw data with meta information (min edge aligned to 1400 if larger). |
| 800 | 156 | 99.75 MiB | [Download](https://huggingface.co/datasets/CyberHarem/fukuda_noriko_theidolmstermillionlive/resolve/main/dataset-800.zip) | IMG+TXT | dataset with the shorter side not exceeding 800 pixels. |
| stage3-p480-800 | 361 | 199.20 MiB | [Download](https://huggingface.co/datasets/CyberHarem/fukuda_noriko_theidolmstermillionlive/resolve/main/dataset-stage3-p480-800.zip) | IMG+TXT | 3-stage cropped dataset with the area not less than 480x480 pixels. |
| 1200 | 156 | 128.09 MiB | [Download](https://huggingface.co/datasets/CyberHarem/fukuda_noriko_theidolmstermillionlive/resolve/main/dataset-1200.zip) | IMG+TXT | dataset with the shorter side not exceeding 1200 pixels. |
| stage3-p480-1200 | 361 | 245.86 MiB | [Download](https://huggingface.co/datasets/CyberHarem/fukuda_noriko_theidolmstermillionlive/resolve/main/dataset-stage3-p480-1200.zip) | IMG+TXT | 3-stage cropped dataset with the area not less than 480x480 pixels. |
### Load Raw Dataset with Waifuc
We provide raw dataset (including tagged images) for [waifuc](https://deepghs.github.io/waifuc/main/tutorials/installation/index.html) loading. If you need this, just run the following code
```python
import os
import zipfile
from huggingface_hub import hf_hub_download
from waifuc.source import LocalSource
# download raw archive file
zip_file = hf_hub_download(
repo_id='CyberHarem/fukuda_noriko_theidolmstermillionlive',
repo_type='dataset',
filename='dataset-raw.zip',
)
# extract files to your directory
dataset_dir = 'dataset_dir'
os.makedirs(dataset_dir, exist_ok=True)
with zipfile.ZipFile(zip_file, 'r') as zf:
zf.extractall(dataset_dir)
# load the dataset with waifuc
source = LocalSource(dataset_dir)
for item in source:
print(item.image, item.meta['filename'], item.meta['tags'])
```
## List of Clusters
List of tag clustering result, maybe some outfits can be mined here.
### Raw Text Version
| # | Samples | Img-1 | Img-2 | Img-3 | Img-4 | Img-5 | Tags |
|----:|----------:|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| 0 | 8 | ![](samples/0/clu0-sample0.png) | ![](samples/0/clu0-sample1.png) | ![](samples/0/clu0-sample2.png) | ![](samples/0/clu0-sample3.png) | ![](samples/0/clu0-sample4.png) | 1girl, blush, nipples, open_mouth, sweat, 1boy, hetero, penis, pussy, solo_focus, female_pubic_hair, large_breasts, navel, sex, vaginal, medium_breasts, bar_censor, collarbone, completely_nude, cum, one_eye_closed, spread_legs |
| 1 | 10 | ![](samples/1/clu1-sample0.png) | ![](samples/1/clu1-sample1.png) | ![](samples/1/clu1-sample2.png) | ![](samples/1/clu1-sample3.png) | ![](samples/1/clu1-sample4.png) | 1girl, white_background, blush, looking_at_viewer, simple_background, solo, blunt_bangs, collarbone, long_sleeves, star_earrings, upper_body, white_shirt, :d, black_jacket, leather_jacket, open_mouth |
| 2 | 6 | ![](samples/2/clu2-sample0.png) | ![](samples/2/clu2-sample1.png) | ![](samples/2/clu2-sample2.png) | ![](samples/2/clu2-sample3.png) | ![](samples/2/clu2-sample4.png) | 1girl, looking_at_viewer, solo, cleavage, navel, blush, collarbone, simple_background, white_background, blue_bikini, blunt_bangs, large_breasts, medium_breasts, one_eye_closed, open_mouth, smile |
| 3 | 12 | ![](samples/3/clu3-sample0.png) | ![](samples/3/clu3-sample1.png) | ![](samples/3/clu3-sample2.png) | ![](samples/3/clu3-sample3.png) | ![](samples/3/clu3-sample4.png) | 1girl, smile, looking_at_viewer, open_mouth, solo, one_eye_closed, skirt, ;d, blush, gloves, jewelry, navel, microphone, midriff |
### Table Version
| # | Samples | Img-1 | Img-2 | Img-3 | Img-4 | Img-5 | 1girl | blush | nipples | open_mouth | sweat | 1boy | hetero | penis | pussy | solo_focus | female_pubic_hair | large_breasts | navel | sex | vaginal | medium_breasts | bar_censor | collarbone | completely_nude | cum | one_eye_closed | spread_legs | white_background | looking_at_viewer | simple_background | solo | blunt_bangs | long_sleeves | star_earrings | upper_body | white_shirt | :d | black_jacket | leather_jacket | cleavage | blue_bikini | smile | skirt | ;d | gloves | jewelry | microphone | midriff |
|----:|----------:|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------|:--------|:----------|:-------------|:--------|:-------|:---------|:--------|:--------|:-------------|:--------------------|:----------------|:--------|:------|:----------|:-----------------|:-------------|:-------------|:------------------|:------|:-----------------|:--------------|:-------------------|:--------------------|:--------------------|:-------|:--------------|:---------------|:----------------|:-------------|:--------------|:-----|:---------------|:-----------------|:-----------|:--------------|:--------|:--------|:-----|:---------|:----------|:-------------|:----------|
| 0 | 8 | ![](samples/0/clu0-sample0.png) | ![](samples/0/clu0-sample1.png) | ![](samples/0/clu0-sample2.png) | ![](samples/0/clu0-sample3.png) | ![](samples/0/clu0-sample4.png) | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | | | | | | | | | | | | | | | | | | | | | |
| 1 | 10 | ![](samples/1/clu1-sample0.png) | ![](samples/1/clu1-sample1.png) | ![](samples/1/clu1-sample2.png) | ![](samples/1/clu1-sample3.png) | ![](samples/1/clu1-sample4.png) | X | X | | X | | | | | | | | | | | | | | X | | | | | X | X | X | X | X | X | X | X | X | X | X | X | | | | | | | | | |
| 2 | 6 | ![](samples/2/clu2-sample0.png) | ![](samples/2/clu2-sample1.png) | ![](samples/2/clu2-sample2.png) | ![](samples/2/clu2-sample3.png) | ![](samples/2/clu2-sample4.png) | X | X | | X | | | | | | | | X | X | | | X | | X | | | X | | X | X | X | X | X | | | | | | | | X | X | X | | | | | | |
| 3 | 12 | ![](samples/3/clu3-sample0.png) | ![](samples/3/clu3-sample1.png) | ![](samples/3/clu3-sample2.png) | ![](samples/3/clu3-sample3.png) | ![](samples/3/clu3-sample4.png) | X | X | | X | | | | | | | | | X | | | | | | | | X | | | X | | X | | | | | | | | | | | X | X | X | X | X | X | X |
|
open-llm-leaderboard/details_TeeZee__2xbagel-dpo-34b-v0.2 | ---
pretty_name: Evaluation run of TeeZee/2xbagel-dpo-34b-v0.2
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [TeeZee/2xbagel-dpo-34b-v0.2](https://huggingface.co/TeeZee/2xbagel-dpo-34b-v0.2)\
\ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\
\nThe dataset is composed of 63 configuration, each one coresponding to one of the\
\ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\
\ found as a specific split in each configuration, the split being named using the\
\ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\
\nAn additional configuration \"results\" store all the aggregated results of the\
\ run (and is used to compute and display the aggregated metrics on the [Open LLM\
\ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\
\nTo load the details from a run, you can for instance do the following:\n```python\n\
from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_TeeZee__2xbagel-dpo-34b-v0.2\"\
,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\
These are the [latest results from run 2024-01-13T23:15:59.619735](https://huggingface.co/datasets/open-llm-leaderboard/details_TeeZee__2xbagel-dpo-34b-v0.2/blob/main/results_2024-01-13T23-15-59.619735.json)(note\
\ that their might be results for other tasks in the repos if successive evals didn't\
\ cover the same tasks. You find each in the results and the \"latest\" split for\
\ each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.7214725397684685,\n\
\ \"acc_stderr\": 0.029456464928054458,\n \"acc_norm\": 0.7359963920471002,\n\
\ \"acc_norm_stderr\": 0.030168902390549673,\n \"mc1\": 0.5018359853121175,\n\
\ \"mc1_stderr\": 0.017503383046877048,\n \"mc2\": 0.6715187545754473,\n\
\ \"mc2_stderr\": 0.015523811623029661\n },\n \"harness|arc:challenge|25\"\
: {\n \"acc\": 0.6356655290102389,\n \"acc_stderr\": 0.014063260279882417,\n\
\ \"acc_norm\": 0.6527303754266212,\n \"acc_norm_stderr\": 0.013913034529620458\n\
\ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6113324039036049,\n\
\ \"acc_stderr\": 0.004864513262194309,\n \"acc_norm\": 0.7934674367655845,\n\
\ \"acc_norm_stderr\": 0.004039897423689437\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\
: {\n \"acc\": 0.41,\n \"acc_stderr\": 0.049431107042371025,\n \
\ \"acc_norm\": 0.41,\n \"acc_norm_stderr\": 0.049431107042371025\n \
\ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.674074074074074,\n\
\ \"acc_stderr\": 0.040491220417025055,\n \"acc_norm\": 0.674074074074074,\n\
\ \"acc_norm_stderr\": 0.040491220417025055\n },\n \"harness|hendrycksTest-astronomy|5\"\
: {\n \"acc\": 0.8289473684210527,\n \"acc_stderr\": 0.030643607071677084,\n\
\ \"acc_norm\": 0.8289473684210527,\n \"acc_norm_stderr\": 0.030643607071677084\n\
\ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.8,\n\
\ \"acc_stderr\": 0.04020151261036843,\n \"acc_norm\": 0.8,\n \
\ \"acc_norm_stderr\": 0.04020151261036843\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\
: {\n \"acc\": 0.7735849056603774,\n \"acc_stderr\": 0.025757559893106737,\n\
\ \"acc_norm\": 0.7735849056603774,\n \"acc_norm_stderr\": 0.025757559893106737\n\
\ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.8819444444444444,\n\
\ \"acc_stderr\": 0.02698334650330939,\n \"acc_norm\": 0.8819444444444444,\n\
\ \"acc_norm_stderr\": 0.02698334650330939\n },\n \"harness|hendrycksTest-college_chemistry|5\"\
: {\n \"acc\": 0.56,\n \"acc_stderr\": 0.049888765156985884,\n \
\ \"acc_norm\": 0.56,\n \"acc_norm_stderr\": 0.049888765156985884\n \
\ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"\
acc\": 0.67,\n \"acc_stderr\": 0.04725815626252607,\n \"acc_norm\"\
: 0.67,\n \"acc_norm_stderr\": 0.04725815626252607\n },\n \"harness|hendrycksTest-college_mathematics|5\"\
: {\n \"acc\": 0.37,\n \"acc_stderr\": 0.04852365870939099,\n \
\ \"acc_norm\": 0.37,\n \"acc_norm_stderr\": 0.04852365870939099\n \
\ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6994219653179191,\n\
\ \"acc_stderr\": 0.0349610148119118,\n \"acc_norm\": 0.6994219653179191,\n\
\ \"acc_norm_stderr\": 0.0349610148119118\n },\n \"harness|hendrycksTest-college_physics|5\"\
: {\n \"acc\": 0.49019607843137253,\n \"acc_stderr\": 0.04974229460422817,\n\
\ \"acc_norm\": 0.49019607843137253,\n \"acc_norm_stderr\": 0.04974229460422817\n\
\ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\
\ 0.81,\n \"acc_stderr\": 0.039427724440366234,\n \"acc_norm\": 0.81,\n\
\ \"acc_norm_stderr\": 0.039427724440366234\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\
: {\n \"acc\": 0.7531914893617021,\n \"acc_stderr\": 0.02818544130123409,\n\
\ \"acc_norm\": 0.7531914893617021,\n \"acc_norm_stderr\": 0.02818544130123409\n\
\ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.5350877192982456,\n\
\ \"acc_stderr\": 0.046920083813689104,\n \"acc_norm\": 0.5350877192982456,\n\
\ \"acc_norm_stderr\": 0.046920083813689104\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\
: {\n \"acc\": 0.6689655172413793,\n \"acc_stderr\": 0.03921545312467122,\n\
\ \"acc_norm\": 0.6689655172413793,\n \"acc_norm_stderr\": 0.03921545312467122\n\
\ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\
: 0.6507936507936508,\n \"acc_stderr\": 0.02455229220934266,\n \"\
acc_norm\": 0.6507936507936508,\n \"acc_norm_stderr\": 0.02455229220934266\n\
\ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.5476190476190477,\n\
\ \"acc_stderr\": 0.044518079590553275,\n \"acc_norm\": 0.5476190476190477,\n\
\ \"acc_norm_stderr\": 0.044518079590553275\n },\n \"harness|hendrycksTest-global_facts|5\"\
: {\n \"acc\": 0.58,\n \"acc_stderr\": 0.049604496374885836,\n \
\ \"acc_norm\": 0.58,\n \"acc_norm_stderr\": 0.049604496374885836\n \
\ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\"\
: 0.8935483870967742,\n \"acc_stderr\": 0.017545102951656635,\n \"\
acc_norm\": 0.8935483870967742,\n \"acc_norm_stderr\": 0.017545102951656635\n\
\ },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\"\
: 0.5960591133004927,\n \"acc_stderr\": 0.03452453903822032,\n \"\
acc_norm\": 0.5960591133004927,\n \"acc_norm_stderr\": 0.03452453903822032\n\
\ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \
\ \"acc\": 0.78,\n \"acc_stderr\": 0.041633319989322626,\n \"acc_norm\"\
: 0.78,\n \"acc_norm_stderr\": 0.041633319989322626\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\
: {\n \"acc\": 0.8303030303030303,\n \"acc_stderr\": 0.02931118867498311,\n\
\ \"acc_norm\": 0.8303030303030303,\n \"acc_norm_stderr\": 0.02931118867498311\n\
\ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\
: 0.9090909090909091,\n \"acc_stderr\": 0.02048208677542421,\n \"\
acc_norm\": 0.9090909090909091,\n \"acc_norm_stderr\": 0.02048208677542421\n\
\ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\
\ \"acc\": 0.9585492227979274,\n \"acc_stderr\": 0.014385432857476453,\n\
\ \"acc_norm\": 0.9585492227979274,\n \"acc_norm_stderr\": 0.014385432857476453\n\
\ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \
\ \"acc\": 0.7769230769230769,\n \"acc_stderr\": 0.02110773012724399,\n \
\ \"acc_norm\": 0.7769230769230769,\n \"acc_norm_stderr\": 0.02110773012724399\n\
\ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\
acc\": 0.37037037037037035,\n \"acc_stderr\": 0.02944316932303154,\n \
\ \"acc_norm\": 0.37037037037037035,\n \"acc_norm_stderr\": 0.02944316932303154\n\
\ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \
\ \"acc\": 0.8235294117647058,\n \"acc_stderr\": 0.024762902678057943,\n\
\ \"acc_norm\": 0.8235294117647058,\n \"acc_norm_stderr\": 0.024762902678057943\n\
\ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\
: 0.4304635761589404,\n \"acc_stderr\": 0.04042809961395634,\n \"\
acc_norm\": 0.4304635761589404,\n \"acc_norm_stderr\": 0.04042809961395634\n\
\ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\
: 0.9174311926605505,\n \"acc_stderr\": 0.011800361363016567,\n \"\
acc_norm\": 0.9174311926605505,\n \"acc_norm_stderr\": 0.011800361363016567\n\
\ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\
: 0.6620370370370371,\n \"acc_stderr\": 0.03225941352631295,\n \"\
acc_norm\": 0.6620370370370371,\n \"acc_norm_stderr\": 0.03225941352631295\n\
\ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\
: 0.8970588235294118,\n \"acc_stderr\": 0.021328337570804365,\n \"\
acc_norm\": 0.8970588235294118,\n \"acc_norm_stderr\": 0.021328337570804365\n\
\ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\
acc\": 0.8481012658227848,\n \"acc_stderr\": 0.023363878096632446,\n \
\ \"acc_norm\": 0.8481012658227848,\n \"acc_norm_stderr\": 0.023363878096632446\n\
\ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.7757847533632287,\n\
\ \"acc_stderr\": 0.02799153425851952,\n \"acc_norm\": 0.7757847533632287,\n\
\ \"acc_norm_stderr\": 0.02799153425851952\n },\n \"harness|hendrycksTest-human_sexuality|5\"\
: {\n \"acc\": 0.8549618320610687,\n \"acc_stderr\": 0.030884661089515375,\n\
\ \"acc_norm\": 0.8549618320610687,\n \"acc_norm_stderr\": 0.030884661089515375\n\
\ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\
\ 0.8512396694214877,\n \"acc_stderr\": 0.03248470083807193,\n \"\
acc_norm\": 0.8512396694214877,\n \"acc_norm_stderr\": 0.03248470083807193\n\
\ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.8240740740740741,\n\
\ \"acc_stderr\": 0.03680918141673883,\n \"acc_norm\": 0.8240740740740741,\n\
\ \"acc_norm_stderr\": 0.03680918141673883\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\
: {\n \"acc\": 0.8404907975460123,\n \"acc_stderr\": 0.02876748172598387,\n\
\ \"acc_norm\": 0.8404907975460123,\n \"acc_norm_stderr\": 0.02876748172598387\n\
\ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.5714285714285714,\n\
\ \"acc_stderr\": 0.04697113923010213,\n \"acc_norm\": 0.5714285714285714,\n\
\ \"acc_norm_stderr\": 0.04697113923010213\n },\n \"harness|hendrycksTest-management|5\"\
: {\n \"acc\": 0.8543689320388349,\n \"acc_stderr\": 0.03492606476623791,\n\
\ \"acc_norm\": 0.8543689320388349,\n \"acc_norm_stderr\": 0.03492606476623791\n\
\ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.9145299145299145,\n\
\ \"acc_stderr\": 0.018315891685625845,\n \"acc_norm\": 0.9145299145299145,\n\
\ \"acc_norm_stderr\": 0.018315891685625845\n },\n \"harness|hendrycksTest-medical_genetics|5\"\
: {\n \"acc\": 0.81,\n \"acc_stderr\": 0.039427724440366234,\n \
\ \"acc_norm\": 0.81,\n \"acc_norm_stderr\": 0.039427724440366234\n \
\ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.89272030651341,\n\
\ \"acc_stderr\": 0.011066571449508435,\n \"acc_norm\": 0.89272030651341,\n\
\ \"acc_norm_stderr\": 0.011066571449508435\n },\n \"harness|hendrycksTest-moral_disputes|5\"\
: {\n \"acc\": 0.7976878612716763,\n \"acc_stderr\": 0.021628077380196124,\n\
\ \"acc_norm\": 0.7976878612716763,\n \"acc_norm_stderr\": 0.021628077380196124\n\
\ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.729608938547486,\n\
\ \"acc_stderr\": 0.014854993938010081,\n \"acc_norm\": 0.729608938547486,\n\
\ \"acc_norm_stderr\": 0.014854993938010081\n },\n \"harness|hendrycksTest-nutrition|5\"\
: {\n \"acc\": 0.8071895424836601,\n \"acc_stderr\": 0.02258931888817668,\n\
\ \"acc_norm\": 0.8071895424836601,\n \"acc_norm_stderr\": 0.02258931888817668\n\
\ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.8070739549839229,\n\
\ \"acc_stderr\": 0.022411516780911366,\n \"acc_norm\": 0.8070739549839229,\n\
\ \"acc_norm_stderr\": 0.022411516780911366\n },\n \"harness|hendrycksTest-prehistory|5\"\
: {\n \"acc\": 0.8179012345679012,\n \"acc_stderr\": 0.02147349183480833,\n\
\ \"acc_norm\": 0.8179012345679012,\n \"acc_norm_stderr\": 0.02147349183480833\n\
\ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\
acc\": 0.6382978723404256,\n \"acc_stderr\": 0.02866382014719949,\n \
\ \"acc_norm\": 0.6382978723404256,\n \"acc_norm_stderr\": 0.02866382014719949\n\
\ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.5501955671447197,\n\
\ \"acc_stderr\": 0.012705721498564972,\n \"acc_norm\": 0.5501955671447197,\n\
\ \"acc_norm_stderr\": 0.012705721498564972\n },\n \"harness|hendrycksTest-professional_medicine|5\"\
: {\n \"acc\": 0.7867647058823529,\n \"acc_stderr\": 0.024880971512294243,\n\
\ \"acc_norm\": 0.7867647058823529,\n \"acc_norm_stderr\": 0.024880971512294243\n\
\ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\
acc\": 0.7908496732026143,\n \"acc_stderr\": 0.016453399332279326,\n \
\ \"acc_norm\": 0.7908496732026143,\n \"acc_norm_stderr\": 0.016453399332279326\n\
\ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.7090909090909091,\n\
\ \"acc_stderr\": 0.04350271442923243,\n \"acc_norm\": 0.7090909090909091,\n\
\ \"acc_norm_stderr\": 0.04350271442923243\n },\n \"harness|hendrycksTest-security_studies|5\"\
: {\n \"acc\": 0.7836734693877551,\n \"acc_stderr\": 0.026358916334904045,\n\
\ \"acc_norm\": 0.7836734693877551,\n \"acc_norm_stderr\": 0.026358916334904045\n\
\ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8905472636815921,\n\
\ \"acc_stderr\": 0.022076326101824664,\n \"acc_norm\": 0.8905472636815921,\n\
\ \"acc_norm_stderr\": 0.022076326101824664\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\
: {\n \"acc\": 0.88,\n \"acc_stderr\": 0.032659863237109066,\n \
\ \"acc_norm\": 0.88,\n \"acc_norm_stderr\": 0.032659863237109066\n \
\ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5421686746987951,\n\
\ \"acc_stderr\": 0.0387862677100236,\n \"acc_norm\": 0.5421686746987951,\n\
\ \"acc_norm_stderr\": 0.0387862677100236\n },\n \"harness|hendrycksTest-world_religions|5\"\
: {\n \"acc\": 0.8596491228070176,\n \"acc_stderr\": 0.026640582539133196,\n\
\ \"acc_norm\": 0.8596491228070176,\n \"acc_norm_stderr\": 0.026640582539133196\n\
\ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.5018359853121175,\n\
\ \"mc1_stderr\": 0.017503383046877048,\n \"mc2\": 0.6715187545754473,\n\
\ \"mc2_stderr\": 0.015523811623029661\n },\n \"harness|winogrande|5\"\
: {\n \"acc\": 0.7640094711917916,\n \"acc_stderr\": 0.011933828850275626\n\
\ },\n \"harness|gsm8k|5\": {\n \"acc\": 0.02122820318423048,\n \
\ \"acc_stderr\": 0.003970449129848635\n }\n}\n```"
repo_url: https://huggingface.co/TeeZee/2xbagel-dpo-34b-v0.2
leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard
point_of_contact: clementine@hf.co
configs:
- config_name: harness_arc_challenge_25
data_files:
- split: 2024_01_13T23_15_59.619735
path:
- '**/details_harness|arc:challenge|25_2024-01-13T23-15-59.619735.parquet'
- split: latest
path:
- '**/details_harness|arc:challenge|25_2024-01-13T23-15-59.619735.parquet'
- config_name: harness_gsm8k_5
data_files:
- split: 2024_01_13T23_15_59.619735
path:
- '**/details_harness|gsm8k|5_2024-01-13T23-15-59.619735.parquet'
- split: latest
path:
- '**/details_harness|gsm8k|5_2024-01-13T23-15-59.619735.parquet'
- config_name: harness_hellaswag_10
data_files:
- split: 2024_01_13T23_15_59.619735
path:
- '**/details_harness|hellaswag|10_2024-01-13T23-15-59.619735.parquet'
- split: latest
path:
- '**/details_harness|hellaswag|10_2024-01-13T23-15-59.619735.parquet'
- config_name: harness_hendrycksTest_5
data_files:
- split: 2024_01_13T23_15_59.619735
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-management|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-virology|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2024-01-13T23-15-59.619735.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-management|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-virology|5_2024-01-13T23-15-59.619735.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2024-01-13T23-15-59.619735.parquet'
- config_name: harness_hendrycksTest_abstract_algebra_5
data_files:
- split: 2024_01_13T23_15_59.619735
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-13T23-15-59.619735.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-13T23-15-59.619735.parquet'
- config_name: harness_hendrycksTest_anatomy_5
data_files:
- split: 2024_01_13T23_15_59.619735
path:
- '**/details_harness|hendrycksTest-anatomy|5_2024-01-13T23-15-59.619735.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-anatomy|5_2024-01-13T23-15-59.619735.parquet'
- config_name: harness_hendrycksTest_astronomy_5
data_files:
- split: 2024_01_13T23_15_59.619735
path:
- '**/details_harness|hendrycksTest-astronomy|5_2024-01-13T23-15-59.619735.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-astronomy|5_2024-01-13T23-15-59.619735.parquet'
- config_name: harness_hendrycksTest_business_ethics_5
data_files:
- split: 2024_01_13T23_15_59.619735
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2024-01-13T23-15-59.619735.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2024-01-13T23-15-59.619735.parquet'
- config_name: harness_hendrycksTest_clinical_knowledge_5
data_files:
- split: 2024_01_13T23_15_59.619735
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-13T23-15-59.619735.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-13T23-15-59.619735.parquet'
- config_name: harness_hendrycksTest_college_biology_5
data_files:
- split: 2024_01_13T23_15_59.619735
path:
- '**/details_harness|hendrycksTest-college_biology|5_2024-01-13T23-15-59.619735.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_biology|5_2024-01-13T23-15-59.619735.parquet'
- config_name: harness_hendrycksTest_college_chemistry_5
data_files:
- split: 2024_01_13T23_15_59.619735
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-01-13T23-15-59.619735.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-01-13T23-15-59.619735.parquet'
- config_name: harness_hendrycksTest_college_computer_science_5
data_files:
- split: 2024_01_13T23_15_59.619735
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-01-13T23-15-59.619735.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-01-13T23-15-59.619735.parquet'
- config_name: harness_hendrycksTest_college_mathematics_5
data_files:
- split: 2024_01_13T23_15_59.619735
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-01-13T23-15-59.619735.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-01-13T23-15-59.619735.parquet'
- config_name: harness_hendrycksTest_college_medicine_5
data_files:
- split: 2024_01_13T23_15_59.619735
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2024-01-13T23-15-59.619735.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2024-01-13T23-15-59.619735.parquet'
- config_name: harness_hendrycksTest_college_physics_5
data_files:
- split: 2024_01_13T23_15_59.619735
path:
- '**/details_harness|hendrycksTest-college_physics|5_2024-01-13T23-15-59.619735.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_physics|5_2024-01-13T23-15-59.619735.parquet'
- config_name: harness_hendrycksTest_computer_security_5
data_files:
- split: 2024_01_13T23_15_59.619735
path:
- '**/details_harness|hendrycksTest-computer_security|5_2024-01-13T23-15-59.619735.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-computer_security|5_2024-01-13T23-15-59.619735.parquet'
- config_name: harness_hendrycksTest_conceptual_physics_5
data_files:
- split: 2024_01_13T23_15_59.619735
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-13T23-15-59.619735.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-13T23-15-59.619735.parquet'
- config_name: harness_hendrycksTest_econometrics_5
data_files:
- split: 2024_01_13T23_15_59.619735
path:
- '**/details_harness|hendrycksTest-econometrics|5_2024-01-13T23-15-59.619735.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-econometrics|5_2024-01-13T23-15-59.619735.parquet'
- config_name: harness_hendrycksTest_electrical_engineering_5
data_files:
- split: 2024_01_13T23_15_59.619735
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-13T23-15-59.619735.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-13T23-15-59.619735.parquet'
- config_name: harness_hendrycksTest_elementary_mathematics_5
data_files:
- split: 2024_01_13T23_15_59.619735
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-13T23-15-59.619735.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-13T23-15-59.619735.parquet'
- config_name: harness_hendrycksTest_formal_logic_5
data_files:
- split: 2024_01_13T23_15_59.619735
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2024-01-13T23-15-59.619735.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2024-01-13T23-15-59.619735.parquet'
- config_name: harness_hendrycksTest_global_facts_5
data_files:
- split: 2024_01_13T23_15_59.619735
path:
- '**/details_harness|hendrycksTest-global_facts|5_2024-01-13T23-15-59.619735.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-global_facts|5_2024-01-13T23-15-59.619735.parquet'
- config_name: harness_hendrycksTest_high_school_biology_5
data_files:
- split: 2024_01_13T23_15_59.619735
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-01-13T23-15-59.619735.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-01-13T23-15-59.619735.parquet'
- config_name: harness_hendrycksTest_high_school_chemistry_5
data_files:
- split: 2024_01_13T23_15_59.619735
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-13T23-15-59.619735.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-13T23-15-59.619735.parquet'
- config_name: harness_hendrycksTest_high_school_computer_science_5
data_files:
- split: 2024_01_13T23_15_59.619735
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-13T23-15-59.619735.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-13T23-15-59.619735.parquet'
- config_name: harness_hendrycksTest_high_school_european_history_5
data_files:
- split: 2024_01_13T23_15_59.619735
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-13T23-15-59.619735.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-13T23-15-59.619735.parquet'
- config_name: harness_hendrycksTest_high_school_geography_5
data_files:
- split: 2024_01_13T23_15_59.619735
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-01-13T23-15-59.619735.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-01-13T23-15-59.619735.parquet'
- config_name: harness_hendrycksTest_high_school_government_and_politics_5
data_files:
- split: 2024_01_13T23_15_59.619735
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-13T23-15-59.619735.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-13T23-15-59.619735.parquet'
- config_name: harness_hendrycksTest_high_school_macroeconomics_5
data_files:
- split: 2024_01_13T23_15_59.619735
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-13T23-15-59.619735.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-13T23-15-59.619735.parquet'
- config_name: harness_hendrycksTest_high_school_mathematics_5
data_files:
- split: 2024_01_13T23_15_59.619735
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-13T23-15-59.619735.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-13T23-15-59.619735.parquet'
- config_name: harness_hendrycksTest_high_school_microeconomics_5
data_files:
- split: 2024_01_13T23_15_59.619735
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-13T23-15-59.619735.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-13T23-15-59.619735.parquet'
- config_name: harness_hendrycksTest_high_school_physics_5
data_files:
- split: 2024_01_13T23_15_59.619735
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-01-13T23-15-59.619735.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-01-13T23-15-59.619735.parquet'
- config_name: harness_hendrycksTest_high_school_psychology_5
data_files:
- split: 2024_01_13T23_15_59.619735
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-13T23-15-59.619735.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-13T23-15-59.619735.parquet'
- config_name: harness_hendrycksTest_high_school_statistics_5
data_files:
- split: 2024_01_13T23_15_59.619735
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-13T23-15-59.619735.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-13T23-15-59.619735.parquet'
- config_name: harness_hendrycksTest_high_school_us_history_5
data_files:
- split: 2024_01_13T23_15_59.619735
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-13T23-15-59.619735.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-13T23-15-59.619735.parquet'
- config_name: harness_hendrycksTest_high_school_world_history_5
data_files:
- split: 2024_01_13T23_15_59.619735
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-13T23-15-59.619735.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-13T23-15-59.619735.parquet'
- config_name: harness_hendrycksTest_human_aging_5
data_files:
- split: 2024_01_13T23_15_59.619735
path:
- '**/details_harness|hendrycksTest-human_aging|5_2024-01-13T23-15-59.619735.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_aging|5_2024-01-13T23-15-59.619735.parquet'
- config_name: harness_hendrycksTest_human_sexuality_5
data_files:
- split: 2024_01_13T23_15_59.619735
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-01-13T23-15-59.619735.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-01-13T23-15-59.619735.parquet'
- config_name: harness_hendrycksTest_international_law_5
data_files:
- split: 2024_01_13T23_15_59.619735
path:
- '**/details_harness|hendrycksTest-international_law|5_2024-01-13T23-15-59.619735.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-international_law|5_2024-01-13T23-15-59.619735.parquet'
- config_name: harness_hendrycksTest_jurisprudence_5
data_files:
- split: 2024_01_13T23_15_59.619735
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-01-13T23-15-59.619735.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-01-13T23-15-59.619735.parquet'
- config_name: harness_hendrycksTest_logical_fallacies_5
data_files:
- split: 2024_01_13T23_15_59.619735
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-13T23-15-59.619735.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-13T23-15-59.619735.parquet'
- config_name: harness_hendrycksTest_machine_learning_5
data_files:
- split: 2024_01_13T23_15_59.619735
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2024-01-13T23-15-59.619735.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2024-01-13T23-15-59.619735.parquet'
- config_name: harness_hendrycksTest_management_5
data_files:
- split: 2024_01_13T23_15_59.619735
path:
- '**/details_harness|hendrycksTest-management|5_2024-01-13T23-15-59.619735.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-management|5_2024-01-13T23-15-59.619735.parquet'
- config_name: harness_hendrycksTest_marketing_5
data_files:
- split: 2024_01_13T23_15_59.619735
path:
- '**/details_harness|hendrycksTest-marketing|5_2024-01-13T23-15-59.619735.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-marketing|5_2024-01-13T23-15-59.619735.parquet'
- config_name: harness_hendrycksTest_medical_genetics_5
data_files:
- split: 2024_01_13T23_15_59.619735
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-01-13T23-15-59.619735.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-01-13T23-15-59.619735.parquet'
- config_name: harness_hendrycksTest_miscellaneous_5
data_files:
- split: 2024_01_13T23_15_59.619735
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-01-13T23-15-59.619735.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-01-13T23-15-59.619735.parquet'
- config_name: harness_hendrycksTest_moral_disputes_5
data_files:
- split: 2024_01_13T23_15_59.619735
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-01-13T23-15-59.619735.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-01-13T23-15-59.619735.parquet'
- config_name: harness_hendrycksTest_moral_scenarios_5
data_files:
- split: 2024_01_13T23_15_59.619735
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-13T23-15-59.619735.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-13T23-15-59.619735.parquet'
- config_name: harness_hendrycksTest_nutrition_5
data_files:
- split: 2024_01_13T23_15_59.619735
path:
- '**/details_harness|hendrycksTest-nutrition|5_2024-01-13T23-15-59.619735.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-nutrition|5_2024-01-13T23-15-59.619735.parquet'
- config_name: harness_hendrycksTest_philosophy_5
data_files:
- split: 2024_01_13T23_15_59.619735
path:
- '**/details_harness|hendrycksTest-philosophy|5_2024-01-13T23-15-59.619735.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-philosophy|5_2024-01-13T23-15-59.619735.parquet'
- config_name: harness_hendrycksTest_prehistory_5
data_files:
- split: 2024_01_13T23_15_59.619735
path:
- '**/details_harness|hendrycksTest-prehistory|5_2024-01-13T23-15-59.619735.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-prehistory|5_2024-01-13T23-15-59.619735.parquet'
- config_name: harness_hendrycksTest_professional_accounting_5
data_files:
- split: 2024_01_13T23_15_59.619735
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-01-13T23-15-59.619735.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-01-13T23-15-59.619735.parquet'
- config_name: harness_hendrycksTest_professional_law_5
data_files:
- split: 2024_01_13T23_15_59.619735
path:
- '**/details_harness|hendrycksTest-professional_law|5_2024-01-13T23-15-59.619735.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_law|5_2024-01-13T23-15-59.619735.parquet'
- config_name: harness_hendrycksTest_professional_medicine_5
data_files:
- split: 2024_01_13T23_15_59.619735
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-01-13T23-15-59.619735.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-01-13T23-15-59.619735.parquet'
- config_name: harness_hendrycksTest_professional_psychology_5
data_files:
- split: 2024_01_13T23_15_59.619735
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-01-13T23-15-59.619735.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-01-13T23-15-59.619735.parquet'
- config_name: harness_hendrycksTest_public_relations_5
data_files:
- split: 2024_01_13T23_15_59.619735
path:
- '**/details_harness|hendrycksTest-public_relations|5_2024-01-13T23-15-59.619735.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-public_relations|5_2024-01-13T23-15-59.619735.parquet'
- config_name: harness_hendrycksTest_security_studies_5
data_files:
- split: 2024_01_13T23_15_59.619735
path:
- '**/details_harness|hendrycksTest-security_studies|5_2024-01-13T23-15-59.619735.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-security_studies|5_2024-01-13T23-15-59.619735.parquet'
- config_name: harness_hendrycksTest_sociology_5
data_files:
- split: 2024_01_13T23_15_59.619735
path:
- '**/details_harness|hendrycksTest-sociology|5_2024-01-13T23-15-59.619735.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-sociology|5_2024-01-13T23-15-59.619735.parquet'
- config_name: harness_hendrycksTest_us_foreign_policy_5
data_files:
- split: 2024_01_13T23_15_59.619735
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-13T23-15-59.619735.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-13T23-15-59.619735.parquet'
- config_name: harness_hendrycksTest_virology_5
data_files:
- split: 2024_01_13T23_15_59.619735
path:
- '**/details_harness|hendrycksTest-virology|5_2024-01-13T23-15-59.619735.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-virology|5_2024-01-13T23-15-59.619735.parquet'
- config_name: harness_hendrycksTest_world_religions_5
data_files:
- split: 2024_01_13T23_15_59.619735
path:
- '**/details_harness|hendrycksTest-world_religions|5_2024-01-13T23-15-59.619735.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-world_religions|5_2024-01-13T23-15-59.619735.parquet'
- config_name: harness_truthfulqa_mc_0
data_files:
- split: 2024_01_13T23_15_59.619735
path:
- '**/details_harness|truthfulqa:mc|0_2024-01-13T23-15-59.619735.parquet'
- split: latest
path:
- '**/details_harness|truthfulqa:mc|0_2024-01-13T23-15-59.619735.parquet'
- config_name: harness_winogrande_5
data_files:
- split: 2024_01_13T23_15_59.619735
path:
- '**/details_harness|winogrande|5_2024-01-13T23-15-59.619735.parquet'
- split: latest
path:
- '**/details_harness|winogrande|5_2024-01-13T23-15-59.619735.parquet'
- config_name: results
data_files:
- split: 2024_01_13T23_15_59.619735
path:
- results_2024-01-13T23-15-59.619735.parquet
- split: latest
path:
- results_2024-01-13T23-15-59.619735.parquet
---
# Dataset Card for Evaluation run of TeeZee/2xbagel-dpo-34b-v0.2
<!-- Provide a quick summary of the dataset. -->
Dataset automatically created during the evaluation run of model [TeeZee/2xbagel-dpo-34b-v0.2](https://huggingface.co/TeeZee/2xbagel-dpo-34b-v0.2) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).
The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset("open-llm-leaderboard/details_TeeZee__2xbagel-dpo-34b-v0.2",
"harness_winogrande_5",
split="train")
```
## Latest results
These are the [latest results from run 2024-01-13T23:15:59.619735](https://huggingface.co/datasets/open-llm-leaderboard/details_TeeZee__2xbagel-dpo-34b-v0.2/blob/main/results_2024-01-13T23-15-59.619735.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"acc": 0.7214725397684685,
"acc_stderr": 0.029456464928054458,
"acc_norm": 0.7359963920471002,
"acc_norm_stderr": 0.030168902390549673,
"mc1": 0.5018359853121175,
"mc1_stderr": 0.017503383046877048,
"mc2": 0.6715187545754473,
"mc2_stderr": 0.015523811623029661
},
"harness|arc:challenge|25": {
"acc": 0.6356655290102389,
"acc_stderr": 0.014063260279882417,
"acc_norm": 0.6527303754266212,
"acc_norm_stderr": 0.013913034529620458
},
"harness|hellaswag|10": {
"acc": 0.6113324039036049,
"acc_stderr": 0.004864513262194309,
"acc_norm": 0.7934674367655845,
"acc_norm_stderr": 0.004039897423689437
},
"harness|hendrycksTest-abstract_algebra|5": {
"acc": 0.41,
"acc_stderr": 0.049431107042371025,
"acc_norm": 0.41,
"acc_norm_stderr": 0.049431107042371025
},
"harness|hendrycksTest-anatomy|5": {
"acc": 0.674074074074074,
"acc_stderr": 0.040491220417025055,
"acc_norm": 0.674074074074074,
"acc_norm_stderr": 0.040491220417025055
},
"harness|hendrycksTest-astronomy|5": {
"acc": 0.8289473684210527,
"acc_stderr": 0.030643607071677084,
"acc_norm": 0.8289473684210527,
"acc_norm_stderr": 0.030643607071677084
},
"harness|hendrycksTest-business_ethics|5": {
"acc": 0.8,
"acc_stderr": 0.04020151261036843,
"acc_norm": 0.8,
"acc_norm_stderr": 0.04020151261036843
},
"harness|hendrycksTest-clinical_knowledge|5": {
"acc": 0.7735849056603774,
"acc_stderr": 0.025757559893106737,
"acc_norm": 0.7735849056603774,
"acc_norm_stderr": 0.025757559893106737
},
"harness|hendrycksTest-college_biology|5": {
"acc": 0.8819444444444444,
"acc_stderr": 0.02698334650330939,
"acc_norm": 0.8819444444444444,
"acc_norm_stderr": 0.02698334650330939
},
"harness|hendrycksTest-college_chemistry|5": {
"acc": 0.56,
"acc_stderr": 0.049888765156985884,
"acc_norm": 0.56,
"acc_norm_stderr": 0.049888765156985884
},
"harness|hendrycksTest-college_computer_science|5": {
"acc": 0.67,
"acc_stderr": 0.04725815626252607,
"acc_norm": 0.67,
"acc_norm_stderr": 0.04725815626252607
},
"harness|hendrycksTest-college_mathematics|5": {
"acc": 0.37,
"acc_stderr": 0.04852365870939099,
"acc_norm": 0.37,
"acc_norm_stderr": 0.04852365870939099
},
"harness|hendrycksTest-college_medicine|5": {
"acc": 0.6994219653179191,
"acc_stderr": 0.0349610148119118,
"acc_norm": 0.6994219653179191,
"acc_norm_stderr": 0.0349610148119118
},
"harness|hendrycksTest-college_physics|5": {
"acc": 0.49019607843137253,
"acc_stderr": 0.04974229460422817,
"acc_norm": 0.49019607843137253,
"acc_norm_stderr": 0.04974229460422817
},
"harness|hendrycksTest-computer_security|5": {
"acc": 0.81,
"acc_stderr": 0.039427724440366234,
"acc_norm": 0.81,
"acc_norm_stderr": 0.039427724440366234
},
"harness|hendrycksTest-conceptual_physics|5": {
"acc": 0.7531914893617021,
"acc_stderr": 0.02818544130123409,
"acc_norm": 0.7531914893617021,
"acc_norm_stderr": 0.02818544130123409
},
"harness|hendrycksTest-econometrics|5": {
"acc": 0.5350877192982456,
"acc_stderr": 0.046920083813689104,
"acc_norm": 0.5350877192982456,
"acc_norm_stderr": 0.046920083813689104
},
"harness|hendrycksTest-electrical_engineering|5": {
"acc": 0.6689655172413793,
"acc_stderr": 0.03921545312467122,
"acc_norm": 0.6689655172413793,
"acc_norm_stderr": 0.03921545312467122
},
"harness|hendrycksTest-elementary_mathematics|5": {
"acc": 0.6507936507936508,
"acc_stderr": 0.02455229220934266,
"acc_norm": 0.6507936507936508,
"acc_norm_stderr": 0.02455229220934266
},
"harness|hendrycksTest-formal_logic|5": {
"acc": 0.5476190476190477,
"acc_stderr": 0.044518079590553275,
"acc_norm": 0.5476190476190477,
"acc_norm_stderr": 0.044518079590553275
},
"harness|hendrycksTest-global_facts|5": {
"acc": 0.58,
"acc_stderr": 0.049604496374885836,
"acc_norm": 0.58,
"acc_norm_stderr": 0.049604496374885836
},
"harness|hendrycksTest-high_school_biology|5": {
"acc": 0.8935483870967742,
"acc_stderr": 0.017545102951656635,
"acc_norm": 0.8935483870967742,
"acc_norm_stderr": 0.017545102951656635
},
"harness|hendrycksTest-high_school_chemistry|5": {
"acc": 0.5960591133004927,
"acc_stderr": 0.03452453903822032,
"acc_norm": 0.5960591133004927,
"acc_norm_stderr": 0.03452453903822032
},
"harness|hendrycksTest-high_school_computer_science|5": {
"acc": 0.78,
"acc_stderr": 0.041633319989322626,
"acc_norm": 0.78,
"acc_norm_stderr": 0.041633319989322626
},
"harness|hendrycksTest-high_school_european_history|5": {
"acc": 0.8303030303030303,
"acc_stderr": 0.02931118867498311,
"acc_norm": 0.8303030303030303,
"acc_norm_stderr": 0.02931118867498311
},
"harness|hendrycksTest-high_school_geography|5": {
"acc": 0.9090909090909091,
"acc_stderr": 0.02048208677542421,
"acc_norm": 0.9090909090909091,
"acc_norm_stderr": 0.02048208677542421
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"acc": 0.9585492227979274,
"acc_stderr": 0.014385432857476453,
"acc_norm": 0.9585492227979274,
"acc_norm_stderr": 0.014385432857476453
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"acc": 0.7769230769230769,
"acc_stderr": 0.02110773012724399,
"acc_norm": 0.7769230769230769,
"acc_norm_stderr": 0.02110773012724399
},
"harness|hendrycksTest-high_school_mathematics|5": {
"acc": 0.37037037037037035,
"acc_stderr": 0.02944316932303154,
"acc_norm": 0.37037037037037035,
"acc_norm_stderr": 0.02944316932303154
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"acc": 0.8235294117647058,
"acc_stderr": 0.024762902678057943,
"acc_norm": 0.8235294117647058,
"acc_norm_stderr": 0.024762902678057943
},
"harness|hendrycksTest-high_school_physics|5": {
"acc": 0.4304635761589404,
"acc_stderr": 0.04042809961395634,
"acc_norm": 0.4304635761589404,
"acc_norm_stderr": 0.04042809961395634
},
"harness|hendrycksTest-high_school_psychology|5": {
"acc": 0.9174311926605505,
"acc_stderr": 0.011800361363016567,
"acc_norm": 0.9174311926605505,
"acc_norm_stderr": 0.011800361363016567
},
"harness|hendrycksTest-high_school_statistics|5": {
"acc": 0.6620370370370371,
"acc_stderr": 0.03225941352631295,
"acc_norm": 0.6620370370370371,
"acc_norm_stderr": 0.03225941352631295
},
"harness|hendrycksTest-high_school_us_history|5": {
"acc": 0.8970588235294118,
"acc_stderr": 0.021328337570804365,
"acc_norm": 0.8970588235294118,
"acc_norm_stderr": 0.021328337570804365
},
"harness|hendrycksTest-high_school_world_history|5": {
"acc": 0.8481012658227848,
"acc_stderr": 0.023363878096632446,
"acc_norm": 0.8481012658227848,
"acc_norm_stderr": 0.023363878096632446
},
"harness|hendrycksTest-human_aging|5": {
"acc": 0.7757847533632287,
"acc_stderr": 0.02799153425851952,
"acc_norm": 0.7757847533632287,
"acc_norm_stderr": 0.02799153425851952
},
"harness|hendrycksTest-human_sexuality|5": {
"acc": 0.8549618320610687,
"acc_stderr": 0.030884661089515375,
"acc_norm": 0.8549618320610687,
"acc_norm_stderr": 0.030884661089515375
},
"harness|hendrycksTest-international_law|5": {
"acc": 0.8512396694214877,
"acc_stderr": 0.03248470083807193,
"acc_norm": 0.8512396694214877,
"acc_norm_stderr": 0.03248470083807193
},
"harness|hendrycksTest-jurisprudence|5": {
"acc": 0.8240740740740741,
"acc_stderr": 0.03680918141673883,
"acc_norm": 0.8240740740740741,
"acc_norm_stderr": 0.03680918141673883
},
"harness|hendrycksTest-logical_fallacies|5": {
"acc": 0.8404907975460123,
"acc_stderr": 0.02876748172598387,
"acc_norm": 0.8404907975460123,
"acc_norm_stderr": 0.02876748172598387
},
"harness|hendrycksTest-machine_learning|5": {
"acc": 0.5714285714285714,
"acc_stderr": 0.04697113923010213,
"acc_norm": 0.5714285714285714,
"acc_norm_stderr": 0.04697113923010213
},
"harness|hendrycksTest-management|5": {
"acc": 0.8543689320388349,
"acc_stderr": 0.03492606476623791,
"acc_norm": 0.8543689320388349,
"acc_norm_stderr": 0.03492606476623791
},
"harness|hendrycksTest-marketing|5": {
"acc": 0.9145299145299145,
"acc_stderr": 0.018315891685625845,
"acc_norm": 0.9145299145299145,
"acc_norm_stderr": 0.018315891685625845
},
"harness|hendrycksTest-medical_genetics|5": {
"acc": 0.81,
"acc_stderr": 0.039427724440366234,
"acc_norm": 0.81,
"acc_norm_stderr": 0.039427724440366234
},
"harness|hendrycksTest-miscellaneous|5": {
"acc": 0.89272030651341,
"acc_stderr": 0.011066571449508435,
"acc_norm": 0.89272030651341,
"acc_norm_stderr": 0.011066571449508435
},
"harness|hendrycksTest-moral_disputes|5": {
"acc": 0.7976878612716763,
"acc_stderr": 0.021628077380196124,
"acc_norm": 0.7976878612716763,
"acc_norm_stderr": 0.021628077380196124
},
"harness|hendrycksTest-moral_scenarios|5": {
"acc": 0.729608938547486,
"acc_stderr": 0.014854993938010081,
"acc_norm": 0.729608938547486,
"acc_norm_stderr": 0.014854993938010081
},
"harness|hendrycksTest-nutrition|5": {
"acc": 0.8071895424836601,
"acc_stderr": 0.02258931888817668,
"acc_norm": 0.8071895424836601,
"acc_norm_stderr": 0.02258931888817668
},
"harness|hendrycksTest-philosophy|5": {
"acc": 0.8070739549839229,
"acc_stderr": 0.022411516780911366,
"acc_norm": 0.8070739549839229,
"acc_norm_stderr": 0.022411516780911366
},
"harness|hendrycksTest-prehistory|5": {
"acc": 0.8179012345679012,
"acc_stderr": 0.02147349183480833,
"acc_norm": 0.8179012345679012,
"acc_norm_stderr": 0.02147349183480833
},
"harness|hendrycksTest-professional_accounting|5": {
"acc": 0.6382978723404256,
"acc_stderr": 0.02866382014719949,
"acc_norm": 0.6382978723404256,
"acc_norm_stderr": 0.02866382014719949
},
"harness|hendrycksTest-professional_law|5": {
"acc": 0.5501955671447197,
"acc_stderr": 0.012705721498564972,
"acc_norm": 0.5501955671447197,
"acc_norm_stderr": 0.012705721498564972
},
"harness|hendrycksTest-professional_medicine|5": {
"acc": 0.7867647058823529,
"acc_stderr": 0.024880971512294243,
"acc_norm": 0.7867647058823529,
"acc_norm_stderr": 0.024880971512294243
},
"harness|hendrycksTest-professional_psychology|5": {
"acc": 0.7908496732026143,
"acc_stderr": 0.016453399332279326,
"acc_norm": 0.7908496732026143,
"acc_norm_stderr": 0.016453399332279326
},
"harness|hendrycksTest-public_relations|5": {
"acc": 0.7090909090909091,
"acc_stderr": 0.04350271442923243,
"acc_norm": 0.7090909090909091,
"acc_norm_stderr": 0.04350271442923243
},
"harness|hendrycksTest-security_studies|5": {
"acc": 0.7836734693877551,
"acc_stderr": 0.026358916334904045,
"acc_norm": 0.7836734693877551,
"acc_norm_stderr": 0.026358916334904045
},
"harness|hendrycksTest-sociology|5": {
"acc": 0.8905472636815921,
"acc_stderr": 0.022076326101824664,
"acc_norm": 0.8905472636815921,
"acc_norm_stderr": 0.022076326101824664
},
"harness|hendrycksTest-us_foreign_policy|5": {
"acc": 0.88,
"acc_stderr": 0.032659863237109066,
"acc_norm": 0.88,
"acc_norm_stderr": 0.032659863237109066
},
"harness|hendrycksTest-virology|5": {
"acc": 0.5421686746987951,
"acc_stderr": 0.0387862677100236,
"acc_norm": 0.5421686746987951,
"acc_norm_stderr": 0.0387862677100236
},
"harness|hendrycksTest-world_religions|5": {
"acc": 0.8596491228070176,
"acc_stderr": 0.026640582539133196,
"acc_norm": 0.8596491228070176,
"acc_norm_stderr": 0.026640582539133196
},
"harness|truthfulqa:mc|0": {
"mc1": 0.5018359853121175,
"mc1_stderr": 0.017503383046877048,
"mc2": 0.6715187545754473,
"mc2_stderr": 0.015523811623029661
},
"harness|winogrande|5": {
"acc": 0.7640094711917916,
"acc_stderr": 0.011933828850275626
},
"harness|gsm8k|5": {
"acc": 0.02122820318423048,
"acc_stderr": 0.003970449129848635
}
}
```
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] |
liuyanchen1015/MULTI_VALUE_wnli_generalized_third_person_s | ---
dataset_info:
features:
- name: sentence1
dtype: string
- name: sentence2
dtype: string
- name: label
dtype: int64
- name: idx
dtype: int64
- name: value_score
dtype: int64
splits:
- name: dev
num_bytes: 172
num_examples: 1
- name: test
num_bytes: 374
num_examples: 2
- name: train
num_bytes: 3923
num_examples: 21
download_size: 10184
dataset_size: 4469
---
# Dataset Card for "MULTI_VALUE_wnli_generalized_third_person_s"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
flaviagiammarino/path-vqa | ---
license: mit
task_categories:
- visual-question-answering
language:
- en
tags:
- medical
pretty_name: PathVQA
paperswithcode_id: pathvqa
size_categories:
- 10K<n<100K
dataset_info:
features:
- name: image
dtype: image
- name: question
dtype: string
- name: answer
dtype: string
splits:
- name: train
num_bytes: 3171303616.326
num_examples: 19654
- name: test
num_bytes: 1113474813.05
num_examples: 6719
- name: validation
num_bytes: 1191658832.096
num_examples: 6259
download_size: 785414952
dataset_size: 5476437261.472
---
# Dataset Card for PathVQA
## Dataset Description
PathVQA is a dataset of question-answer pairs on pathology images. The dataset is intended to be used for training and testing
Medical Visual Question Answering (VQA) systems. The dataset includes both open-ended questions and binary "yes/no" questions.
The dataset is built from two publicly-available pathology textbooks: "Textbook of Pathology" and "Basic Pathology", and a
publicly-available digital library: "Pathology Education Informational Resource" (PEIR). The copyrights of images and captions
belong to the publishers and authors of these two books, and the owners of the PEIR digital library.<br>
**Repository:** [PathVQA Official GitHub Repository](https://github.com/UCSD-AI4H/PathVQA)<br>
**Paper:** [PathVQA: 30000+ Questions for Medical Visual Question Answering](https://arxiv.org/abs/2003.10286)<br>
**Leaderboard:** [Papers with Code Leaderboard](https://paperswithcode.com/sota/medical-visual-question-answering-on-pathvqa)
### Dataset Summary
The dataset was obtained from the updated Google Drive link shared by the authors on Feb 15, 2023,
see the [commit](https://github.com/UCSD-AI4H/PathVQA/commit/117e7f4ef88a0e65b0e7f37b98a73d6237a3ceab)
in the GitHub repository. This version of the dataset contains a total of 5,004 images and 32,795 question-answer pairs.
Out of the 5,004 images, 4,289 images are referenced by a question-answer pair, while 715 images are not used.
There are a few image-question-answer triplets which occur more than once in the same split (training, validation, test).
After dropping the duplicate image-question-answer triplets, the dataset contains 32,632 question-answer pairs on 4,289 images.
#### Supported Tasks and Leaderboards
The PathVQA dataset has an active leaderboard on [Papers with Code](https://paperswithcode.com/sota/medical-visual-question-answering-on-pathvqa)
where models are ranked based on three metrics: "Yes/No Accuracy", "Free-form accuracy" and "Overall accuracy". "Yes/No Accuracy" is
the accuracy of a model's generated answers for the subset of binary "yes/no" questions. "Free-form accuracy" is the accuracy
of a model's generated answers for the subset of open-ended questions. "Overall accuracy" is the accuracy of a model's generated
answers across all questions.
#### Languages
The question-answer pairs are in English.
## Dataset Structure
### Data Instances
Each instance consists of an image-question-answer triplet.
```
{
'image': <PIL.JpegImagePlugin.JpegImageFile image mode=CMYK size=309x272>,
'question': 'where are liver stem cells (oval cells) located?',
'answer': 'in the canals of hering'
}
```
### Data Fields
- `'image'`: the image referenced by the question-answer pair.
- `'question'`: the question about the image.
- `'answer'`: the expected answer.
### Data Splits
The dataset is split into training, validation and test. The split is provided directly by the authors.
| | Training Set | Validation Set | Test Set |
|-------------------------|:------------:|:--------------:|:--------:|
| QAs |19,654 |6,259 |6,719 |
| Images |2,599 |832 |858 |
## Additional Information
### Licensing Information
The authors have released the dataset under the [MIT License](https://github.com/UCSD-AI4H/PathVQA/blob/master/LICENSE).
### Citation Information
```
@article{he2020pathvqa,
title={PathVQA: 30000+ Questions for Medical Visual Question Answering},
author={He, Xuehai and Zhang, Yichen and Mou, Luntian and Xing, Eric and Xie, Pengtao},
journal={arXiv preprint arXiv:2003.10286},
year={2020}
}
``` |
Mutonix/RefGPT-Fact | ---
license: apache-2.0
dataset_info:
features:
- name: dialogue
dtype: string
- name: reference
dtype: string
- name: language
dtype: string
- name: type
dtype: string
splits:
- name: zh
num_bytes: 180760081
num_examples: 50000
- name: en
num_bytes: 464054853
num_examples: 50000
download_size: 260969665
dataset_size: 644814934
task_categories:
- conversational
language:
- zh
- en
arxiv: https://arxiv.org/abs/2305.14994
size_categories:
- 10K<n<100K
---
# Dataset Card for RefGPT-Fact
## Dataset Description
- **Homepage:**
- **Repository:** [https://github.com/ziliwangnlp/RefGPT](https://github.com/ziliwangnlp/RefGPT)
- **Paper:** [https://arxiv.org/abs/2305.14994](https://arxiv.org/abs/2305.14994)
- **Leaderboard:**
- **Point of Contact:**
### Dataset Summary
<p align="center">
<a href="https://arxiv.org/abs/2305.14994"><b>[Paper] RefGPT</b></a> |
<a href="https://github.com/ziliwangnlp/RefGPT"><b>[Github] RefGPT</b></a>
</p>
RefGPT-Fact is a datasets containing 100k multi-turn dialogues about factual knowledge with 50k English and 50k Chinese. The English version uses the English Wikipedia as the reference and the Chinese version uses the frequently-used Chinese online encyclopedia website, Baidu Baike.
### Supported Tasks and Leaderboards
Chatbot instruction finetuning
### Languages
Chinese, English
## Dataset Structure
### Data Instances
[More Information Needed]
### Data Fields
[More Information Needed]
### Data Splits
[More Information Needed]
## Dataset Creation
### Curation Rationale
[More Information Needed]
### Source Data
#### Initial Data Collection and Normalization
[More Information Needed]
#### Who are the source language producers?
[More Information Needed]
### Annotations
#### Annotation process
[More Information Needed]
#### Who are the annotators?
[More Information Needed]
### Personal and Sensitive Information
[More Information Needed]
## Considerations for Using the Data
### Social Impact of Dataset
Please pay attention that RefGPT Datasets, including RefGPT-Fact and RefGPT-Code, have not undergone manual verification, and as such, their security cannot be strictly guaranteed. Users should be aware that they are responsible for the results generated using this data.
### Discussion of Biases
As the datasets RefGPT-Fact and RefGPT-Code are collected by using the references like Wikipedia and Github repositories, it can not be avoided that the reference itself has factual errors, typos, or bugs and malicious code if it is from Github repositories. The datasets may also reflect the biases of the selected references and GPT-3.5/GPT-4 model
### Other Known Limitations
[More Information Needed]
## Additional Information
### Dataset Curators
[More Information Needed]
### Licensing Information
[More Information Needed]
### Citation Information
```bibtex
@misc{yang2023refgpt,
title={RefGPT: Reference -> Truthful & Customized Dialogues Generation by GPTs and for GPTs},
author={Dongjie Yang and Ruifeng Yuan and YuanTao Fan and YiFei Yang and Zili Wang and Shusen Wang and Hai Zhao},
year={2023},
eprint={2305.14994},
archivePrefix={arXiv},
primaryClass={cs.CL}
}
```
### Contributions
[More Information Needed] |
phamtungthuy/cauhoiphapluat_400tokenanswer | ---
dataset_info:
features:
- name: question
dtype: string
- name: answer
dtype: string
- name: field
dtype: string
splits:
- name: validation
num_bytes: 16155075
num_examples: 8808
- name: test
num_bytes: 32477322
num_examples: 17616
- name: train
num_bytes: 113686598
num_examples: 61684
download_size: 59968004
dataset_size: 162318995
configs:
- config_name: default
data_files:
- split: validation
path: data/validation-*
- split: test
path: data/test-*
- split: train
path: data/train-*
---
# Dataset Card for "cauhoiphapluat_400tokenanswer"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
nlp-brin-id/pos_pairs_filtered | ---
license: apache-2.0
---
|
jihye-moon/LawQA-Ko | ---
task_categories:
- conversational
language:
- ko
tags:
- legal
size_categories:
- 10K<n<100K
---
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
๋ฒ๋ฅ ์ ๋ํ ์ง๋ฌธ๊ณผ ๋ต๋ณ์ผ๋ก ๊ตฌ์ฑ๋ ๋ฐ์ดํฐ์
์
๋๋ค.
์๋์ ๋ฐ์ดํฐ์
์์ ์ง๋ฌธ๊ณผ ๋ต๋ณ์ ๋ณํฉํ์ฌ Datasets๋ฅผ ๋ง๋ค์์ต๋๋ค.
| ์ ๋ณด ์ถ์ฒ | Dataset Page | Rows |
|---|---|---|
|[์ฐพ๊ธฐ์ฌ์ด์ํ๋ฒ๋ น์ ๋ณด ๋ฐฑ๋ฌธ๋ฐฑ๋ต](https://www.easylaw.go.kr/CSP/OnhunqueansLstRetrieve.laf?search_put=)| [jiwoochris/easylaw_kr](https://huggingface.co/datasets/jiwoochris/easylaw_kr) | 2,195 rows |
|[๋ํ๋ฒ๋ฅ ๊ตฌ์กฐ๊ณต๋จ ๋ฒ๋ฅ ์๋ด์ฌ๋ก](https://www.klac.or.kr/legalinfo/counsel.do)| [jihye-moon/klac_legal_aid_counseling](https://huggingface.co/datasets/jihye-moon/klac_legal_aid_counseling) | 10,037 rows |
|[๋ํ๋ฒ๋ฅ ๊ตฌ์กฐ๊ณต๋จ ์ฌ์ด๋ฒ์๋ด](https://www.klac.or.kr/legalstruct/cyberConsultation.do)| jihye-moon/klac_cyber_counseling (private Datasets) | 2,587 rows |
โป ์์ ๋ฐ์ดํฐ๋ ๋ชจ๋ ์น ํ์ด์ง๋ฅผ ํฌ๋กค๋ง ํ์ฌ ๊ตฌ์ถ๋ ๋ฐ์ดํฐ ์
๋๋ค.
โป ๋ํ๋ฒ๋ฅ ๊ตฌ์กฐ๊ณต๋จ ๋ฐ์ดํฐ๋ ํฌ๋กค๋ง ํ, ์ ์ฒ๋ฆฌ(๊ณต๋จ ์๋ด๋ฌธ๊ตฌ ์ญ์ , ์ฟ ์
์ด ์ญ์ ๋ฑ)๋ฅผ ํ์์ต๋๋ค. |
adity1a/new_data | ---
dataset_info:
features:
- name: text
dtype: string
splits:
- name: train
num_bytes: 3719569
num_examples: 1860
download_size: 2157912
dataset_size: 3719569
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
Freed-Wu/kodak | ---
annotations_creators:
- no-annotation
language:
- en
language_creators:
- found
license:
- gpl-3.0
multilinguality:
- monolingual
pretty_name: kodak
size_categories:
- n<1K
source_datasets:
- original
tags: []
task_categories:
- other
task_ids: []
dataset_info:
features:
- name: image
dtype: image
splits:
- name: test
num_bytes: 15072
num_examples: 24
download_size: 15072
dataset_size: 15072
---
# Dataset Card for kodak
## Table of Contents
- [Table of Contents](#table-of-contents)
- [Dataset Description](#dataset-description)
- [Dataset Summary](#dataset-summary)
- [Supported Tasks and Leaderboards](#supported-tasks-and-leaderboards)
- [Languages](#languages)
- [Dataset Structure](#dataset-structure)
- [Data Instances](#data-instances)
- [Data Fields](#data-fields)
- [Data Splits](#data-splits)
- [Dataset Creation](#dataset-creation)
- [Curation Rationale](#curation-rationale)
- [Source Data](#source-data)
- [Annotations](#annotations)
- [Personal and Sensitive Information](#personal-and-sensitive-information)
- [Considerations for Using the Data](#considerations-for-using-the-data)
- [Social Impact of Dataset](#social-impact-of-dataset)
- [Discussion of Biases](#discussion-of-biases)
- [Other Known Limitations](#other-known-limitations)
- [Additional Information](#additional-information)
- [Dataset Curators](#dataset-curators)
- [Licensing Information](#licensing-information)
- [Citation Information](#citation-information)
- [Contributions](#contributions)
## Dataset Description
- **Homepage:** <https://r0k.us/graphics/kodak/>
- **Repository:** <https://github.com/MohamedBakrAli/Kodak-Lossless-True-Color-Image-Suite>
- **Paper:**
- **Leaderboard:**
- **Point of Contact:**
### Dataset Summary
The pictures below link to lossless, true color (24 bits per pixel, aka "full
color") images. It is my understanding they have been released by the Eastman
Kodak Company for unrestricted usage. Many sites use them as a standard test
suite for compression testing, etc. Prior to this site, they were only
available in the Sun Raster format via ftp. This meant that the images could
not be previewed before downloading. Since their release, however, the lossless
PNG format has been incorporated into all the major browsers. Since PNG
supports 24-bit lossless color (which GIF and JPEG do not), it is now possible
to offer this browser-friendly access to the images.
### Supported Tasks and Leaderboards
- Image compression
### Languages
- en
## Dataset Structure
### Data Instances
- [![kodak01](https://r0k.us/graphics/kodak/thumbs/kodim01t.jpg)](https://r0k.us/graphics/kodak/kodak/kodim01.png)
- [![kodak02](https://r0k.us/graphics/kodak/thumbs/kodim02t.jpg)](https://r0k.us/graphics/kodak/kodak/kodim02.png)
- [![kodak03](https://r0k.us/graphics/kodak/thumbs/kodim03t.jpg)](https://r0k.us/graphics/kodak/kodak/kodim03.png)
- [![kodak04](https://r0k.us/graphics/kodak/thumbs/kodim04t.jpg)](https://r0k.us/graphics/kodak/kodak/kodim04.png)
- [![kodak05](https://r0k.us/graphics/kodak/thumbs/kodim05t.jpg)](https://r0k.us/graphics/kodak/kodak/kodim05.png)
- [![kodak06](https://r0k.us/graphics/kodak/thumbs/kodim06t.jpg)](https://r0k.us/graphics/kodak/kodak/kodim06.png)
- [![kodak07](https://r0k.us/graphics/kodak/thumbs/kodim07t.jpg)](https://r0k.us/graphics/kodak/kodak/kodim07.png)
- [![kodak08](https://r0k.us/graphics/kodak/thumbs/kodim08t.jpg)](https://r0k.us/graphics/kodak/kodak/kodim08.png)
- [![kodak09](https://r0k.us/graphics/kodak/thumbs/kodim09t.jpg)](https://r0k.us/graphics/kodak/kodak/kodim09.png)
- [![kodak10](https://r0k.us/graphics/kodak/thumbs/kodim10t.jpg)](https://r0k.us/graphics/kodak/kodak/kodim10.png)
- [![kodak11](https://r0k.us/graphics/kodak/thumbs/kodim11t.jpg)](https://r0k.us/graphics/kodak/kodak/kodim11.png)
- [![kodak12](https://r0k.us/graphics/kodak/thumbs/kodim12t.jpg)](https://r0k.us/graphics/kodak/kodak/kodim12.png)
- [![kodak13](https://r0k.us/graphics/kodak/thumbs/kodim13t.jpg)](https://r0k.us/graphics/kodak/kodak/kodim13.png)
- [![kodak14](https://r0k.us/graphics/kodak/thumbs/kodim14t.jpg)](https://r0k.us/graphics/kodak/kodak/kodim14.png)
- [![kodak15](https://r0k.us/graphics/kodak/thumbs/kodim15t.jpg)](https://r0k.us/graphics/kodak/kodak/kodim15.png)
- [![kodak16](https://r0k.us/graphics/kodak/thumbs/kodim16t.jpg)](https://r0k.us/graphics/kodak/kodak/kodim16.png)
- [![kodak17](https://r0k.us/graphics/kodak/thumbs/kodim17t.jpg)](https://r0k.us/graphics/kodak/kodak/kodim17.png)
- [![kodak18](https://r0k.us/graphics/kodak/thumbs/kodim18t.jpg)](https://r0k.us/graphics/kodak/kodak/kodim18.png)
- [![kodak19](https://r0k.us/graphics/kodak/thumbs/kodim19t.jpg)](https://r0k.us/graphics/kodak/kodak/kodim19.png)
- [![kodak20](https://r0k.us/graphics/kodak/thumbs/kodim20t.jpg)](https://r0k.us/graphics/kodak/kodak/kodim20.png)
- [![kodak21](https://r0k.us/graphics/kodak/thumbs/kodim21t.jpg)](https://r0k.us/graphics/kodak/kodak/kodim21.png)
- [![kodak22](https://r0k.us/graphics/kodak/thumbs/kodim22t.jpg)](https://r0k.us/graphics/kodak/kodak/kodim22.png)
- [![kodak23](https://r0k.us/graphics/kodak/thumbs/kodim23t.jpg)](https://r0k.us/graphics/kodak/kodak/kodim23.png)
- [![kodak24](https://r0k.us/graphics/kodak/thumbs/kodim24t.jpg)](https://r0k.us/graphics/kodak/kodak/kodim24.png)
### Data Fields
### Data Splits
## Dataset Creation
### Curation Rationale
### Source Data
#### Initial Data Collection and Normalization
#### Who are the source language producers?
<https://www.kodak.com>
### Annotations
#### Annotation process
#### Who are the annotators?
### Personal and Sensitive Information
## Considerations for Using the Data
### Social Impact of Dataset
### Discussion of Biases
### Other Known Limitations
## Additional Information
### Dataset Curators
### Licensing Information
[LICENSE](LICENSE)
### Citation Information
### Contributions
Thanks to [@Freed-Wu](https://github.com/Freed-Wu) for adding this dataset.
|
GEM/ART | ---
annotations_creators:
- automatically-created
language_creators:
- unknown
language:
- en
license:
- apache-2.0
multilinguality:
- unknown
size_categories:
- unknown
source_datasets:
- original
task_categories:
- other
task_ids: []
pretty_name: ART
tags:
- reasoning
---
# Dataset Card for GEM/ART
## Dataset Description
- **Homepage:** http://abductivecommonsense.xyz/
- **Repository:** https://storage.googleapis.com/ai2-mosaic/public/abductive-commonsense-reasoning-iclr2020/anlg.zip
- **Paper:** https://openreview.net/pdf?id=Byg1v1HKDB
- **Leaderboard:** N/A
- **Point of Contact:** Chandra Bhagavatulla
### Link to Main Data Card
You can find the main data card on the [GEM Website](https://gem-benchmark.com/data_cards/ART).
### Dataset Summary
Abductive reasoning is inference to the most plausible explanation. For example, if Jenny finds her house in a mess when she returns from work, and remembers that she left a window open, she can hypothesize that a thief broke into her house and caused the mess, as the most plausible explanation.
This data loader focuses on abductive NLG: a conditional English generation task for explaining given observations in natural language.
You can load the dataset via:
```
import datasets
data = datasets.load_dataset('GEM/ART')
```
The data loader can be found [here](https://huggingface.co/datasets/GEM/ART).
#### website
[Website](http://abductivecommonsense.xyz/)
#### paper
[OpenReview](https://openreview.net/pdf?id=Byg1v1HKDB)
#### authors
Chandra Bhagavatula (AI2), Ronan Le Bras (AI2), Chaitanya Malaviya (AI2), Keisuke Sakaguchi (AI2), Ari Holtzman (AI2, UW), Hannah Rashkin (AI2, UW), Doug Downey (AI2), Wen-tau Yih (AI2), Yejin Choi (AI2, UW)
## Dataset Overview
### Where to find the Data and its Documentation
#### Webpage
<!-- info: What is the webpage for the dataset (if it exists)? -->
<!-- scope: telescope -->
[Website](http://abductivecommonsense.xyz/)
#### Download
<!-- info: What is the link to where the original dataset is hosted? -->
<!-- scope: telescope -->
[Google Storage](https://storage.googleapis.com/ai2-mosaic/public/abductive-commonsense-reasoning-iclr2020/anlg.zip)
#### Paper
<!-- info: What is the link to the paper describing the dataset (open access preferred)? -->
<!-- scope: telescope -->
[OpenReview](https://openreview.net/pdf?id=Byg1v1HKDB)
#### BibTex
<!-- info: Provide the BibTex-formatted reference for the dataset. Please use the correct published version (ACL anthology, etc.) instead of google scholar created Bibtex. -->
<!-- scope: microscope -->
```
@inproceedings{
Bhagavatula2020Abductive,
title={Abductive Commonsense Reasoning},
author={Chandra Bhagavatula and Ronan Le Bras and Chaitanya Malaviya and Keisuke Sakaguchi and Ari Holtzman and Hannah Rashkin and Doug Downey and Wen-tau Yih and Yejin Choi},
booktitle={International Conference on Learning Representations},
year={2020},
url={https://openreview.net/forum?id=Byg1v1HKDB}
}
```
#### Contact Name
<!-- quick -->
<!-- info: If known, provide the name of at least one person the reader can contact for questions about the dataset. -->
<!-- scope: periscope -->
Chandra Bhagavatulla
#### Contact Email
<!-- info: If known, provide the email of at least one person the reader can contact for questions about the dataset. -->
<!-- scope: periscope -->
chandrab@allenai.org
#### Has a Leaderboard?
<!-- info: Does the dataset have an active leaderboard? -->
<!-- scope: telescope -->
no
### Languages and Intended Use
#### Multilingual?
<!-- quick -->
<!-- info: Is the dataset multilingual? -->
<!-- scope: telescope -->
no
#### Covered Languages
<!-- quick -->
<!-- info: What languages/dialects are covered in the dataset? -->
<!-- scope: telescope -->
`English`
#### Whose Language?
<!-- info: Whose language is in the dataset? -->
<!-- scope: periscope -->
Crowdworkers on the Amazon Mechanical Turk platform based in the U.S, Canada, U.K and Australia.
#### License
<!-- quick -->
<!-- info: What is the license of the dataset? -->
<!-- scope: telescope -->
apache-2.0: Apache License 2.0
#### Intended Use
<!-- info: What is the intended use of the dataset? -->
<!-- scope: microscope -->
To study the viability of language-based abductive reasoning. Training and evaluating models to generate a plausible hypothesis to explain two given observations.
#### Primary Task
<!-- info: What primary task does the dataset support? -->
<!-- scope: telescope -->
Reasoning
### Credit
#### Curation Organization Type(s)
<!-- info: In what kind of organization did the dataset curation happen? -->
<!-- scope: telescope -->
`industry`
#### Curation Organization(s)
<!-- info: Name the organization(s). -->
<!-- scope: periscope -->
Allen Institute for AI
#### Dataset Creators
<!-- info: Who created the original dataset? List the people involved in collecting the dataset and their affiliation(s). -->
<!-- scope: microscope -->
Chandra Bhagavatula (AI2), Ronan Le Bras (AI2), Chaitanya Malaviya (AI2), Keisuke Sakaguchi (AI2), Ari Holtzman (AI2, UW), Hannah Rashkin (AI2, UW), Doug Downey (AI2), Wen-tau Yih (AI2), Yejin Choi (AI2, UW)
#### Funding
<!-- info: Who funded the data creation? -->
<!-- scope: microscope -->
Allen Institute for AI
#### Who added the Dataset to GEM?
<!-- info: Who contributed to the data card and adding the dataset to GEM? List the people+affiliations involved in creating this data card and who helped integrate this dataset into GEM. -->
<!-- scope: microscope -->
Chandra Bhagavatula (AI2), Ronan LeBras (AI2), Aman Madaan (CMU), Nico Daheim (RWTH Aachen University)
### Dataset Structure
#### Data Fields
<!-- info: List and describe the fields present in the dataset. -->
<!-- scope: telescope -->
- `observation_1`: A string describing an observation / event.
- `observation_2`: A string describing an observation / event.
- `label`: A string that plausibly explains why observation_1 and observation_2 might have happened.
#### How were labels chosen?
<!-- info: How were the labels chosen? -->
<!-- scope: microscope -->
Explanations were authored by crowdworkers on the Amazon Mechanical Turk platform using a custom template designed by the creators of the dataset.
#### Example Instance
<!-- info: Provide a JSON formatted example of a typical instance in the dataset. -->
<!-- scope: periscope -->
```
{
'gem_id': 'GEM-ART-validation-0',
'observation_1': 'Stephen was at a party.',
'observation_2': 'He checked it but it was completely broken.',
'label': 'Stephen knocked over a vase while drunk.'
}
```
#### Data Splits
<!-- info: Describe and name the splits in the dataset if there are more than one. -->
<!-- scope: periscope -->
- `train`: Consists of training instances.
- `dev`: Consists of dev instances.
- `test`: Consists of test instances.
## Dataset in GEM
### Rationale for Inclusion in GEM
#### Why is the Dataset in GEM?
<!-- info: What does this dataset contribute toward better generation evaluation and why is it part of GEM? -->
<!-- scope: microscope -->
Abductive reasoning is a crucial capability of humans and ART is the first dataset curated to study language-based abductive reasoning.
#### Similar Datasets
<!-- info: Do other datasets for the high level task exist? -->
<!-- scope: telescope -->
no
#### Ability that the Dataset measures
<!-- info: What aspect of model ability can be measured with this dataset? -->
<!-- scope: periscope -->
Whether models can reason abductively about a given pair of observations.
### GEM-Specific Curation
#### Modificatied for GEM?
<!-- info: Has the GEM version of the dataset been modified in any way (data, processing, splits) from the original curated data? -->
<!-- scope: telescope -->
no
#### Additional Splits?
<!-- info: Does GEM provide additional splits to the dataset? -->
<!-- scope: telescope -->
no
### Getting Started with the Task
#### Pointers to Resources
<!-- info: Getting started with in-depth research on the task. Add relevant pointers to resources that researchers can consult when they want to get started digging deeper into the task. -->
<!-- scope: microscope -->
- [Paper](https://arxiv.org/abs/1908.05739)
- [Code](https://github.com/allenai/abductive-commonsense-reasoning)
## Previous Results
### Previous Results
#### Measured Model Abilities
<!-- info: What aspect of model ability can be measured with this dataset? -->
<!-- scope: telescope -->
Whether models can reason abductively about a given pair of observations.
#### Metrics
<!-- info: What metrics are typically used for this task? -->
<!-- scope: periscope -->
`BLEU`, `BERT-Score`, `ROUGE`
#### Previous results available?
<!-- info: Are previous results available? -->
<!-- scope: telescope -->
no
## Dataset Curation
### Original Curation
#### Sourced from Different Sources
<!-- info: Is the dataset aggregated from different data sources? -->
<!-- scope: telescope -->
no
### Language Data
#### How was Language Data Obtained?
<!-- info: How was the language data obtained? -->
<!-- scope: telescope -->
`Crowdsourced`
#### Where was it crowdsourced?
<!-- info: If crowdsourced, where from? -->
<!-- scope: periscope -->
`Amazon Mechanical Turk`
#### Language Producers
<!-- info: What further information do we have on the language producers? -->
<!-- scope: microscope -->
Language producers were English speakers in U.S., Canada, U.K and Australia.
#### Topics Covered
<!-- info: Does the language in the dataset focus on specific topics? How would you describe them? -->
<!-- scope: periscope -->
No
#### Data Validation
<!-- info: Was the text validated by a different worker or a data curator? -->
<!-- scope: telescope -->
validated by crowdworker
#### Was Data Filtered?
<!-- info: Were text instances selected or filtered? -->
<!-- scope: telescope -->
algorithmically
#### Filter Criteria
<!-- info: What were the selection criteria? -->
<!-- scope: microscope -->
Adversarial filtering algorithm as described in the [paper](https://arxiv.org/abs/1908.05739)
### Structured Annotations
#### Additional Annotations?
<!-- quick -->
<!-- info: Does the dataset have additional annotations for each instance? -->
<!-- scope: telescope -->
automatically created
#### Annotation Service?
<!-- info: Was an annotation service used? -->
<!-- scope: telescope -->
no
#### Annotation Values
<!-- info: Purpose and values for each annotation -->
<!-- scope: microscope -->
Each observation is associated with a list of COMET (https://arxiv.org/abs/1906.05317) inferences.
#### Any Quality Control?
<!-- info: Quality control measures? -->
<!-- scope: telescope -->
none
### Consent
#### Any Consent Policy?
<!-- info: Was there a consent policy involved when gathering the data? -->
<!-- scope: telescope -->
no
### Private Identifying Information (PII)
#### Contains PII?
<!-- quick -->
<!-- info: Does the source language data likely contain Personal Identifying Information about the data creators or subjects? -->
<!-- scope: telescope -->
no PII
#### Justification for no PII
<!-- info: Provide a justification for selecting `no PII` above. -->
<!-- scope: periscope -->
The dataset contains day-to-day events. It does not contain names, emails, addresses etc.
### Maintenance
#### Any Maintenance Plan?
<!-- info: Does the original dataset have a maintenance plan? -->
<!-- scope: telescope -->
no
## Broader Social Context
### Previous Work on the Social Impact of the Dataset
#### Usage of Models based on the Data
<!-- info: Are you aware of cases where models trained on the task featured in this dataset ore related tasks have been used in automated systems? -->
<!-- scope: telescope -->
no
### Impact on Under-Served Communities
#### Addresses needs of underserved Communities?
<!-- info: Does this dataset address the needs of communities that are traditionally underserved in language technology, and particularly language generation technology? Communities may be underserved for exemple because their language, language variety, or social or geographical context is underepresented in NLP and NLG resources (datasets and models). -->
<!-- scope: telescope -->
no
### Discussion of Biases
#### Any Documented Social Biases?
<!-- info: Are there documented social biases in the dataset? Biases in this context are variations in the ways members of different social categories are represented that can have harmful downstream consequences for members of the more disadvantaged group. -->
<!-- scope: telescope -->
no
## Considerations for Using the Data
### PII Risks and Liability
#### Potential PII Risk
<!-- info: Considering your answers to the PII part of the Data Curation Section, describe any potential privacy to the data subjects and creators risks when using the dataset. -->
<!-- scope: microscope -->
None
### Licenses
#### Copyright Restrictions on the Dataset
<!-- info: Based on your answers in the Intended Use part of the Data Overview Section, which of the following best describe the copyright and licensing status of the dataset? -->
<!-- scope: periscope -->
`public domain`
#### Copyright Restrictions on the Language Data
<!-- info: Based on your answers in the Language part of the Data Curation Section, which of the following best describe the copyright and licensing status of the underlying language data? -->
<!-- scope: periscope -->
`public domain`
### Known Technical Limitations
|
Deojoandco/capstone_hal_without_gold | ---
dataset_info:
features:
- name: dialog_id
dtype: int32
- name: source
sequence: string
- name: tags
sequence:
class_label:
names:
'0': C
'1': M
'2': N
'3': O
'4': OB
'5': W
splits:
- name: train
num_bytes: 239933
num_examples: 76
- name: validation
num_bytes: 47958
num_examples: 12
- name: test
num_bytes: 27286
num_examples: 12
download_size: 35488
dataset_size: 315177
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
- split: validation
path: data/validation-*
- split: test
path: data/test-*
---
# Dataset Card for "capstone_hal_without_gold"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
MatrixStudio/Codeforces-Python-Submissions-PPO | ---
dataset_info:
features:
- name: contestId
dtype: int64
- name: index
dtype: string
- name: name
dtype: string
- name: type
dtype: string
- name: rating
dtype: int64
- name: tags
sequence: string
- name: title
dtype: string
- name: time-limit
dtype: string
- name: memory-limit
dtype: string
- name: problem-description
dtype: string
- name: input-specification
dtype: string
- name: output-specification
dtype: string
- name: demo-input
sequence: string
- name: demo-output
sequence: string
- name: note
dtype: string
- name: points
dtype: float64
- name: test_cases
list:
- name: input
dtype: string
- name: output
dtype: string
- name: creationTimeSeconds
dtype: int64
- name: relativeTimeSeconds
dtype: int64
- name: programmingLanguage
dtype: string
- name: verdict
dtype: string
- name: testset
dtype: string
- name: passedTestCount
dtype: int64
- name: timeConsumedMillis
dtype: int64
- name: memoryConsumedBytes
dtype: int64
- name: code
dtype: string
- name: prompt
dtype: string
- name: response
dtype: string
- name: score
dtype: float64
splits:
- name: train
num_bytes: 326352255.9446646
num_examples: 49021
- name: test
num_bytes: 41407414
num_examples: 6115
download_size: 49192265
dataset_size: 367759669.9446646
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
- split: test
path: data/test-*
---
|
openaccess-ai-collective/43ced50688ae8a59dd5c38ab6d36f7f9 | Invalid username or password. |
tmuzaffarmydost/data-parsing-new-dataset-v2-updated-labels | ---
dataset_info:
features:
- name: image
dtype: image
- name: ground_truth
struct:
- name: gt_parse
struct:
- name: CustomerCompanyAddress
dtype: string
- name: CustomerCompanyName
dtype: string
- name: CustomerCompanyID
dtype: string
- name: VendorCompanyAddress
dtype: string
- name: VendorCompanyName
dtype: string
- name: VendorCompanyID
dtype: string
- name: InvoiceID
dtype: string
- name: InvoiceDate
dtype: string
- name: TotalAmount
dtype: string
- name: TotalTax
dtype: string
- name: Items-table-general/0/Description
dtype: string
- name: Items-table-general/0/Amount
dtype: string
- name: Items-table-general/0/VAT %
dtype: string
- name: TotalwithoutTax
dtype: string
- name: VAT %
dtype: string
- name: DueDate
dtype: string
- name: Items-table-general/0/Reference~1Code
dtype: string
- name: Items-table-general/0/Quantity
dtype: string
- name: Items-table-general/0/UnitPrice
dtype: string
- name: Currency
dtype: string
- name: WithholdingTax
dtype: string
- name: taxes-table/0/Base-Amount
dtype: string
- name: taxes-table/0/VAT%
dtype: string
- name: taxes-table/0/VAT
dtype: string
- name: Items-table-general/1/Quantity
dtype: string
- name: Items-table-general/1/Amount
dtype: string
- name: Items-table-general/1/UnitPrice
dtype: string
- name: Items-table-general/2/Quantity
dtype: string
- name: Items-table-general/2/Amount
dtype: string
- name: Items-table-general/2/UnitPrice
dtype: string
- name: Items-table-general/0/DeliveryNote
dtype: string
- name: Items-table-general/1/DeliveryNote
dtype: string
- name: Items-table-general/2/DeliveryNote
dtype: string
- name: Items-table-general/1/Description
dtype: string
- name: Items-table-general/2/Description
dtype: string
- name: Items-table-general/0/VAT
dtype: string
- name: Items-table-general/0/SubTotalAmount
dtype: string
- name: Items-table-general/1/Reference~1Code
dtype: string
- name: Items-table-general/2/Reference~1Code
dtype: string
- name: Items-table-general/2/Dto %
dtype: string
- name: Items-table-general/1/VAT %
dtype: string
- name: Items-table-general/2/VAT %
dtype: string
- name: Items-table-general/3/Reference~1Code
dtype: string
- name: Items-table-general/3/Description
dtype: string
- name: Items-table-general/3/Quantity
dtype: string
- name: Items-table-general/3/UnitPrice
dtype: string
- name: Items-table-general/3/Amount
dtype: string
- name: Items-table-general/4/Reference~1Code
dtype: string
- name: Items-table-general/4/Description
dtype: string
- name: Items-table-general/4/Quantity
dtype: string
- name: Items-table-general/4/UnitPrice
dtype: string
- name: Items-table-general/4/Dto %
dtype: string
- name: Items-table-general/4/Amount
dtype: string
- name: Items-table-general/3/VAT %
dtype: string
- name: Items-table-general/4/VAT %
dtype: string
- name: Items-table-general/5/Reference~1Code
dtype: string
- name: Items-table-general/5/Description
dtype: string
- name: Items-table-general/5/Quantity
dtype: string
- name: Items-table-general/5/Amount
dtype: string
- name: Items-table-general/5/VAT %
dtype: string
- name: Items-table-general/6/Reference~1Code
dtype: string
- name: Items-table-general/6/Description
dtype: string
- name: Items-table-general/6/Quantity
dtype: string
- name: Items-table-general/6/Amount
dtype: string
- name: Items-table-general/6/VAT %
dtype: string
- name: Items-table-general/7/Reference~1Code
dtype: string
- name: Items-table-general/7/Description
dtype: string
- name: Items-table-general/7/Quantity
dtype: string
- name: Items-table-general/7/Amount
dtype: string
- name: Items-table-general/7/VAT %
dtype: string
- name: Items-table-general/8/Reference~1Code
dtype: string
- name: Items-table-general/8/Description
dtype: string
- name: Items-table-general/8/Quantity
dtype: string
- name: Items-table-general/8/Amount
dtype: string
- name: Items-table-general/8/VAT %
dtype: string
- name: Items-table-general/3/DeliveryNote
dtype: string
- name: Items-table-general/5/DeliveryNote
dtype: string
- name: Items-table-general/7/DeliveryNote
dtype: string
- name: Items-table-general/8/DeliveryNote
dtype: string
- name: Items-table-general/7/Dto %
dtype: string
- name: Items-table-general/5/UnitPrice
dtype: string
- name: Items-table-general/6/UnitPrice
dtype: string
- name: Items-table-general/7/UnitPrice
dtype: string
- name: Items-table-general/8/UnitPrice
dtype: string
- name: PONumber
dtype: string
- name: DeliveryNote
dtype: string
- name: taxes-table/1/Base-Amount
dtype: string
- name: taxes-table/1/VAT%
dtype: string
- name: taxes-table/1/VAT
dtype: string
- name: Items-table-general/0/PONumber
dtype: string
- name: Items-table-general/9/Reference~1Code
dtype: string
- name: Items-table-general/9/Description
dtype: string
- name: Items-table-general/9/Quantity
dtype: string
- name: Items-table-general/9/Amount
dtype: string
- name: Items-table-general/9/VAT %
dtype: string
- name: Items-table-general/10/Reference~1Code
dtype: string
- name: Items-table-general/10/Description
dtype: string
- name: Items-table-general/10/Quantity
dtype: string
- name: Items-table-general/10/Amount
dtype: string
- name: Items-table-general/10/VAT %
dtype: string
- name: Items-table-general/10/DeliveryNote
dtype: string
- name: Items-table-general/10/UnitPrice
dtype: string
- name: Items-table-general/9/UnitPrice
dtype: string
- name: Items-table-general/1/Dto %
dtype: string
- name: Items-table-general/3/Dto %
dtype: string
- name: Items-table-general/5/Dto %
dtype: string
- name: Items-table-general/0/Dto %
dtype: string
- name: Items-table-general/6/DeliveryNote
dtype: string
- name: Items-table-general/4/DeliveryNote
dtype: string
- name: meta
struct:
- name: version
dtype: string
- name: split
dtype: string
- name: image_id
dtype: int64
- name: image_size
struct:
- name: width
dtype: int64
- name: height
dtype: int64
- name: valid_line
sequence: 'null'
splits:
- name: train
num_bytes: 293897792.0
num_examples: 146
download_size: 31170758
dataset_size: 293897792.0
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
# Dataset Card for "data-parsing-new-dataset-v2-updated-labels"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
patelmiteshn/llama2_chat_datasetformat | ---
license: apache-2.0
---
|
AdapterOcean/oasst_top1_standardized_cluster_1_alpaca | ---
dataset_info:
features:
- name: input
dtype: string
- name: output
dtype: string
splits:
- name: train
num_bytes: 7189806
num_examples: 4949
download_size: 4218122
dataset_size: 7189806
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
# Dataset Card for "oasst_top1_standardized_cluster_1_alpaca"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
nateraw/fuego-20230209-003125-8f59a7 | ---
tags:
- fuego
fuego:
id: 20230209-003125-8f59a7
status: done
script: run_glue.py
requirements_file: requirements.txt
space_id: nateraw/fuego-20230209-003125-8f59a7
space_hardware: cpu-basic
github_repo_id: huggingface/transformers
github_repo_branch: main
github_repo_sha: c35bb6de547f8839434c3d5772777c699e9595de
---
|
farazeftekhar/geojson | ---
license: other
---
|
Tsuinzues/cristianotorreao | ---
license: openrail
---
|
Saxo/en_ko_translation_tech_science_linkbricks_single_dataset_with_prompt_text_huggingface_sampled | ---
license: apache-2.0
---
|
fandave/mateus | ---
license: openrail
---
|