datasetId
stringlengths 5
121
| author
stringlengths 2
42
| last_modified
unknown | downloads
int64 0
2.54M
| likes
int64 0
6.27k
| tags
sequencelengths 1
7.92k
| task_categories
sequencelengths 0
40
⌀ | createdAt
unknown | card
stringlengths 19
1.01M
|
---|---|---|---|---|---|---|---|---|
yaygomii/Tamil-Speech-Dialect-Corpus-Shuffled-Split | yaygomii | "2024-02-04T03:49:26Z" | 33 | 0 | [
"size_categories:10K<n<100K",
"format:parquet",
"modality:audio",
"modality:text",
"library:datasets",
"library:dask",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-02-04T03:47:04Z" | ---
dataset_info:
features:
- name: label
dtype: string
- name: audio
dtype: audio
- name: sentence
dtype: string
splits:
- name: train
num_bytes: 2441162650.9392853
num_examples: 8649
- name: test
num_bytes: 305392291.844953
num_examples: 1082
- name: valid
num_bytes: 305109988.75476176
num_examples: 1081
download_size: 2872546550
dataset_size: 3051664931.539
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
- split: test
path: data/test-*
- split: valid
path: data/valid-*
---
|
Henriettaisme/123 | Henriettaisme | "2024-02-04T03:54:17Z" | 33 | 0 | [
"license:apache-2.0",
"region:us"
] | null | "2024-02-04T03:54:16Z" | ---
license: apache-2.0
---
|
Jiayao/UEGPT-Datasets | Jiayao | "2024-02-04T06:07:17Z" | 33 | 0 | [
"license:apache-2.0",
"region:us"
] | null | "2024-02-04T06:07:17Z" | ---
license: apache-2.0
---
|
empiricist/philosophy_chat | empiricist | "2024-02-04T07:45:14Z" | 33 | 2 | [
"license:mit",
"size_categories:100K<n<1M",
"format:csv",
"modality:tabular",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-02-04T07:36:32Z" | ---
license: mit
---
|
mtc/xnli_de_sub_sampled_3000_with_all_gpt-3-5_explanations | mtc | "2024-02-04T09:15:19Z" | 33 | 0 | [
"size_categories:10K<n<100K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-02-04T09:15:16Z" | ---
dataset_info:
features:
- name: premise
dtype: string
- name: hypothesis
dtype: string
- name: label
dtype:
class_label:
names:
'0': entailment
'1': neutral
'2': contradiction
- name: explanation
dtype: string
splits:
- name: train
num_bytes: 1343062
num_examples: 3000
- name: validation
num_bytes: 504564
num_examples: 2490
- name: test
num_bytes: 1016528
num_examples: 5010
download_size: 1260547
dataset_size: 2864154
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
- split: validation
path: data/validation-*
- split: test
path: data/test-*
---
|
Nexdata/PPT_OCR_Data_of_8_Languages | Nexdata | "2024-08-05T03:27:32Z" | 33 | 1 | [
"license:cc-by-nc-4.0",
"size_categories:n<1K",
"format:imagefolder",
"modality:image",
"library:datasets",
"library:mlcroissant",
"region:us"
] | null | "2024-02-04T09:28:23Z" | ---
license: cc-by-nc-4.0
---
## Description
14,980 Images PPT OCR Data of 8 Languages. This dataset includes 8 languages, multiple scenes, different photographic angles, different photographic distances, different light conditions. For annotation, line-level quadrilateral bounding box annotation and transcription for the texts were annotated in the data. The dataset can be used for tasks such as OCR of multi-language.
For more details, please refer to the link: https://www.nexdata.ai/datasets/ocr/979?source=Huggingface
## Data size
14,980 images, 8 languages
## Data environment
including meeting room, conference room
## Language types
French, Korean, Japanese, Spanish, German, Italian, Portuguese and Russian
## Data diversity
multiple scenes, multiple languages, different photographic angles, different photographic distances, different light conditions
## Device
cellphone
## Collecting angles
front, left, right, looking up angle
## Data format
the image data format is .jpg, the annotation file data format is .json
## Annotation content
line-level quadrilateral bounding box annotation and transcription for the texts
## Accuracy
the error bound of each vertex of quadrilateral bounding box is within 5 pixels, which is a qualified annotation, the accuracy of bounding boxes is not less than 95%; the texts transcription accuracy is not less than 95%
# Licensing Information
Commercial License |
chathuranga-jayanath/context-5-finmath-times4j-html-mavendoxia-wro4j-guava-supercsv-len-1000-prompt-2 | chathuranga-jayanath | "2024-02-04T11:29:14Z" | 33 | 0 | [
"size_categories:1K<n<10K",
"format:parquet",
"modality:tabular",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-02-04T11:29:01Z" | ---
dataset_info:
features:
- name: id
dtype: int64
- name: filepath
dtype: string
- name: start_bug_line
dtype: int64
- name: end_bug_line
dtype: int64
- name: bug
dtype: string
- name: fix
dtype: string
- name: ctx
dtype: string
splits:
- name: train
num_bytes: 5510268
num_examples: 5105
- name: validation
num_bytes: 672859
num_examples: 637
- name: test
num_bytes: 672684
num_examples: 637
download_size: 2623280
dataset_size: 6855811
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
- split: validation
path: data/validation-*
- split: test
path: data/test-*
---
|
StellarHouse/RequestDisect | StellarHouse | "2024-02-04T14:22:57Z" | 33 | 0 | [
"license:mit",
"region:us"
] | null | "2024-02-04T14:22:57Z" | ---
license: mit
---
|
DataStudio/OCRWordLevelClear_06 | DataStudio | "2024-02-04T14:53:57Z" | 33 | 0 | [
"size_categories:1M<n<10M",
"format:parquet",
"modality:image",
"modality:text",
"library:datasets",
"library:dask",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-02-04T14:40:24Z" | ---
dataset_info:
features:
- name: image
dtype: image
- name: text
dtype: string
splits:
- name: train
num_bytes: 5898077007.076
num_examples: 1034148
download_size: 5779110760
dataset_size: 5898077007.076
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
IanTseng/Med-term3 | IanTseng | "2024-02-05T06:20:40Z" | 33 | 0 | [
"size_categories:1M<n<10M",
"format:parquet",
"modality:text",
"library:datasets",
"library:dask",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-02-04T14:53:16Z" | ---
dataset_info:
features:
- name: TEXT
dtype: string
- name: LOCATION
dtype: string
- name: LABEL
dtype: string
splits:
- name: train
num_bytes: 2433641009
num_examples: 2393619
download_size: 1381318742
dataset_size: 2433641009
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
McSpicyWithMilo/target-elements-0.3split | McSpicyWithMilo | "2024-02-04T15:17:48Z" | 33 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-02-04T15:17:40Z" | ---
dataset_info:
features:
- name: instruction
dtype: string
- name: target_element
dtype: string
- name: instruction_type
dtype: string
splits:
- name: train
num_bytes: 31885.0
num_examples: 280
- name: test
num_bytes: 13665.0
num_examples: 120
download_size: 24258
dataset_size: 45550.0
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
- split: test
path: data/test-*
---
# Dataset Card for "target-elements-0.3split"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
paperplaneflyr/recepies_reduced_3.0_1K | paperplaneflyr | "2024-02-04T16:25:09Z" | 33 | 0 | [
"license:mit",
"size_categories:1K<n<10K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-02-04T15:28:53Z" | ---
license: mit
---
|
tyzhu/random25eof_find_passage_train5000_eval1000_rare | tyzhu | "2024-02-04T15:44:33Z" | 33 | 0 | [
"size_categories:10K<n<100K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-02-04T15:31:01Z" | ---
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
- split: validation
path: data/validation-*
dataset_info:
features:
- name: inputs
dtype: string
- name: targets
dtype: string
splits:
- name: train
num_bytes: 1132624
num_examples: 11000
- name: validation
num_bytes: 118222
num_examples: 1000
download_size: 452523
dataset_size: 1250846
---
# Dataset Card for "random25eof_find_passage_train5000_eval1000_rare"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
tyzhu/random25eof_find_passage_train50000000_eval1000_rare | tyzhu | "2024-02-04T15:47:37Z" | 33 | 0 | [
"size_categories:100M<n<1B",
"format:parquet",
"modality:text",
"library:datasets",
"library:dask",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-02-04T15:35:57Z" | ---
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
- split: validation
path: data/validation-*
dataset_info:
features:
- name: inputs
dtype: string
- name: targets
dtype: string
splits:
- name: train
num_bytes: 10421821954
num_examples: 100001000
- name: validation
num_bytes: 118222
num_examples: 1000
download_size: 0
dataset_size: 10421940176
---
# Dataset Card for "random25eof_find_passage_train50000000_eval1000_rare"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
MarkrAI/multilingual | MarkrAI | "2024-02-04T17:04:05Z" | 33 | 0 | [
"license:cc-by-nc-sa-4.0",
"region:us"
] | null | "2024-02-04T16:49:00Z" | ---
license: cc-by-nc-sa-4.0
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
dataset_info:
features:
- name: text
dtype: string
- name: doc_id
dtype: string
splits:
- name: train
num_bytes: 7214103845
num_examples: 23234246
download_size: 3703431865
dataset_size: 7214103845
---
|
Mohamad-Jaallouk/ConstSceneSnow | Mohamad-Jaallouk | "2024-02-04T16:55:07Z" | 33 | 0 | [
"size_categories:1K<n<10K",
"format:parquet",
"modality:image",
"library:datasets",
"library:dask",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-02-04T16:54:29Z" | ---
dataset_info:
features:
- name: pixel_values
dtype: image
- name: label
dtype: image
splits:
- name: train
num_bytes: 779226145.9
num_examples: 5540
- name: validation
num_bytes: 97711471.0
num_examples: 704
- name: test
num_bytes: 61079886.0
num_examples: 348
download_size: 931981436
dataset_size: 938017502.9
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
- split: validation
path: data/validation-*
- split: test
path: data/test-*
---
|
P1ot3r/cv-train-pl-whisper-small | P1ot3r | "2024-02-04T17:05:46Z" | 33 | 0 | [
"size_categories:1K<n<10K",
"format:parquet",
"library:datasets",
"library:dask",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-02-04T16:59:16Z" | ---
dataset_info:
features:
- name: input_features
sequence:
sequence: float32
- name: labels
sequence: int64
splits:
- name: train
num_bytes: 15886403552
num_examples: 16539
download_size: 3037146801
dataset_size: 15886403552
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
open-llm-leaderboard-old/details_gmonsoon__MiaAffogato-Indo-Mistral-7b | open-llm-leaderboard-old | "2024-02-04T17:36:47Z" | 33 | 0 | [
"region:us"
] | null | "2024-02-04T17:36:25Z" | ---
pretty_name: Evaluation run of gmonsoon/MiaAffogato-Indo-Mistral-7b
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [gmonsoon/MiaAffogato-Indo-Mistral-7b](https://huggingface.co/gmonsoon/MiaAffogato-Indo-Mistral-7b)\
\ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\
\nThe dataset is composed of 63 configuration, each one coresponding to one of the\
\ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\
\ found as a specific split in each configuration, the split being named using the\
\ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\
\nAn additional configuration \"results\" store all the aggregated results of the\
\ run (and is used to compute and display the aggregated metrics on the [Open LLM\
\ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\
\nTo load the details from a run, you can for instance do the following:\n```python\n\
from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_gmonsoon__MiaAffogato-Indo-Mistral-7b\"\
,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\
These are the [latest results from run 2024-02-04T17:34:06.201391](https://huggingface.co/datasets/open-llm-leaderboard/details_gmonsoon__MiaAffogato-Indo-Mistral-7b/blob/main/results_2024-02-04T17-34-06.201391.json)(note\
\ that their might be results for other tasks in the repos if successive evals didn't\
\ cover the same tasks. You find each in the results and the \"latest\" split for\
\ each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.644912685809928,\n\
\ \"acc_stderr\": 0.03209671177294616,\n \"acc_norm\": 0.6450526633312276,\n\
\ \"acc_norm_stderr\": 0.032762897968506996,\n \"mc1\": 0.4186046511627907,\n\
\ \"mc1_stderr\": 0.017270015284476855,\n \"mc2\": 0.5818075900070901,\n\
\ \"mc2_stderr\": 0.015361574502931194\n },\n \"harness|arc:challenge|25\"\
: {\n \"acc\": 0.6322525597269625,\n \"acc_stderr\": 0.014090995618168477,\n\
\ \"acc_norm\": 0.6638225255972696,\n \"acc_norm_stderr\": 0.013804855026205761\n\
\ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6585341565425215,\n\
\ \"acc_stderr\": 0.004732322172153749,\n \"acc_norm\": 0.8543118900617407,\n\
\ \"acc_norm_stderr\": 0.0035207225053320934\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\
: {\n \"acc\": 0.32,\n \"acc_stderr\": 0.046882617226215034,\n \
\ \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.046882617226215034\n \
\ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6370370370370371,\n\
\ \"acc_stderr\": 0.041539484047423976,\n \"acc_norm\": 0.6370370370370371,\n\
\ \"acc_norm_stderr\": 0.041539484047423976\n },\n \"harness|hendrycksTest-astronomy|5\"\
: {\n \"acc\": 0.7171052631578947,\n \"acc_stderr\": 0.03665349695640767,\n\
\ \"acc_norm\": 0.7171052631578947,\n \"acc_norm_stderr\": 0.03665349695640767\n\
\ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.61,\n\
\ \"acc_stderr\": 0.04902071300001975,\n \"acc_norm\": 0.61,\n \
\ \"acc_norm_stderr\": 0.04902071300001975\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\
: {\n \"acc\": 0.720754716981132,\n \"acc_stderr\": 0.027611163402399715,\n\
\ \"acc_norm\": 0.720754716981132,\n \"acc_norm_stderr\": 0.027611163402399715\n\
\ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7222222222222222,\n\
\ \"acc_stderr\": 0.037455547914624555,\n \"acc_norm\": 0.7222222222222222,\n\
\ \"acc_norm_stderr\": 0.037455547914624555\n },\n \"harness|hendrycksTest-college_chemistry|5\"\
: {\n \"acc\": 0.47,\n \"acc_stderr\": 0.050161355804659205,\n \
\ \"acc_norm\": 0.47,\n \"acc_norm_stderr\": 0.050161355804659205\n \
\ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"\
acc\": 0.49,\n \"acc_stderr\": 0.05024183937956912,\n \"acc_norm\"\
: 0.49,\n \"acc_norm_stderr\": 0.05024183937956912\n },\n \"harness|hendrycksTest-college_mathematics|5\"\
: {\n \"acc\": 0.29,\n \"acc_stderr\": 0.04560480215720683,\n \
\ \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.04560480215720683\n \
\ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.630057803468208,\n\
\ \"acc_stderr\": 0.0368122963339432,\n \"acc_norm\": 0.630057803468208,\n\
\ \"acc_norm_stderr\": 0.0368122963339432\n },\n \"harness|hendrycksTest-college_physics|5\"\
: {\n \"acc\": 0.4117647058823529,\n \"acc_stderr\": 0.048971049527263666,\n\
\ \"acc_norm\": 0.4117647058823529,\n \"acc_norm_stderr\": 0.048971049527263666\n\
\ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\
\ 0.77,\n \"acc_stderr\": 0.042295258468165065,\n \"acc_norm\": 0.77,\n\
\ \"acc_norm_stderr\": 0.042295258468165065\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\
: {\n \"acc\": 0.574468085106383,\n \"acc_stderr\": 0.03232146916224469,\n\
\ \"acc_norm\": 0.574468085106383,\n \"acc_norm_stderr\": 0.03232146916224469\n\
\ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.4649122807017544,\n\
\ \"acc_stderr\": 0.046920083813689104,\n \"acc_norm\": 0.4649122807017544,\n\
\ \"acc_norm_stderr\": 0.046920083813689104\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\
: {\n \"acc\": 0.5655172413793104,\n \"acc_stderr\": 0.04130740879555498,\n\
\ \"acc_norm\": 0.5655172413793104,\n \"acc_norm_stderr\": 0.04130740879555498\n\
\ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\
: 0.4021164021164021,\n \"acc_stderr\": 0.025253032554997692,\n \"\
acc_norm\": 0.4021164021164021,\n \"acc_norm_stderr\": 0.025253032554997692\n\
\ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.49206349206349204,\n\
\ \"acc_stderr\": 0.044715725362943486,\n \"acc_norm\": 0.49206349206349204,\n\
\ \"acc_norm_stderr\": 0.044715725362943486\n },\n \"harness|hendrycksTest-global_facts|5\"\
: {\n \"acc\": 0.35,\n \"acc_stderr\": 0.047937248544110196,\n \
\ \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.047937248544110196\n \
\ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\"\
: 0.7677419354838709,\n \"acc_stderr\": 0.024022256130308235,\n \"\
acc_norm\": 0.7677419354838709,\n \"acc_norm_stderr\": 0.024022256130308235\n\
\ },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\"\
: 0.4975369458128079,\n \"acc_stderr\": 0.03517945038691063,\n \"\
acc_norm\": 0.4975369458128079,\n \"acc_norm_stderr\": 0.03517945038691063\n\
\ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \
\ \"acc\": 0.69,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\"\
: 0.69,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\
: {\n \"acc\": 0.7757575757575758,\n \"acc_stderr\": 0.032568666616811015,\n\
\ \"acc_norm\": 0.7757575757575758,\n \"acc_norm_stderr\": 0.032568666616811015\n\
\ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\
: 0.8080808080808081,\n \"acc_stderr\": 0.02805779167298902,\n \"\
acc_norm\": 0.8080808080808081,\n \"acc_norm_stderr\": 0.02805779167298902\n\
\ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\
\ \"acc\": 0.8808290155440415,\n \"acc_stderr\": 0.023381935348121437,\n\
\ \"acc_norm\": 0.8808290155440415,\n \"acc_norm_stderr\": 0.023381935348121437\n\
\ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \
\ \"acc\": 0.6666666666666666,\n \"acc_stderr\": 0.023901157979402534,\n\
\ \"acc_norm\": 0.6666666666666666,\n \"acc_norm_stderr\": 0.023901157979402534\n\
\ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\
acc\": 0.3074074074074074,\n \"acc_stderr\": 0.028133252578815632,\n \
\ \"acc_norm\": 0.3074074074074074,\n \"acc_norm_stderr\": 0.028133252578815632\n\
\ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \
\ \"acc\": 0.6638655462184874,\n \"acc_stderr\": 0.030684737115135353,\n\
\ \"acc_norm\": 0.6638655462184874,\n \"acc_norm_stderr\": 0.030684737115135353\n\
\ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\
: 0.32450331125827814,\n \"acc_stderr\": 0.038227469376587525,\n \"\
acc_norm\": 0.32450331125827814,\n \"acc_norm_stderr\": 0.038227469376587525\n\
\ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\
: 0.8330275229357799,\n \"acc_stderr\": 0.01599015488507338,\n \"\
acc_norm\": 0.8330275229357799,\n \"acc_norm_stderr\": 0.01599015488507338\n\
\ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\
: 0.5370370370370371,\n \"acc_stderr\": 0.03400603625538272,\n \"\
acc_norm\": 0.5370370370370371,\n \"acc_norm_stderr\": 0.03400603625538272\n\
\ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\
: 0.8382352941176471,\n \"acc_stderr\": 0.02584501798692692,\n \"\
acc_norm\": 0.8382352941176471,\n \"acc_norm_stderr\": 0.02584501798692692\n\
\ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\
acc\": 0.810126582278481,\n \"acc_stderr\": 0.025530100460233494,\n \
\ \"acc_norm\": 0.810126582278481,\n \"acc_norm_stderr\": 0.025530100460233494\n\
\ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6816143497757847,\n\
\ \"acc_stderr\": 0.03126580522513713,\n \"acc_norm\": 0.6816143497757847,\n\
\ \"acc_norm_stderr\": 0.03126580522513713\n },\n \"harness|hendrycksTest-human_sexuality|5\"\
: {\n \"acc\": 0.8091603053435115,\n \"acc_stderr\": 0.03446513350752599,\n\
\ \"acc_norm\": 0.8091603053435115,\n \"acc_norm_stderr\": 0.03446513350752599\n\
\ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\
\ 0.7933884297520661,\n \"acc_stderr\": 0.036959801280988226,\n \"\
acc_norm\": 0.7933884297520661,\n \"acc_norm_stderr\": 0.036959801280988226\n\
\ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7962962962962963,\n\
\ \"acc_stderr\": 0.03893542518824847,\n \"acc_norm\": 0.7962962962962963,\n\
\ \"acc_norm_stderr\": 0.03893542518824847\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\
: {\n \"acc\": 0.7607361963190185,\n \"acc_stderr\": 0.033519538795212696,\n\
\ \"acc_norm\": 0.7607361963190185,\n \"acc_norm_stderr\": 0.033519538795212696\n\
\ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.5267857142857143,\n\
\ \"acc_stderr\": 0.047389751192741546,\n \"acc_norm\": 0.5267857142857143,\n\
\ \"acc_norm_stderr\": 0.047389751192741546\n },\n \"harness|hendrycksTest-management|5\"\
: {\n \"acc\": 0.8058252427184466,\n \"acc_stderr\": 0.03916667762822585,\n\
\ \"acc_norm\": 0.8058252427184466,\n \"acc_norm_stderr\": 0.03916667762822585\n\
\ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8888888888888888,\n\
\ \"acc_stderr\": 0.020588491316092382,\n \"acc_norm\": 0.8888888888888888,\n\
\ \"acc_norm_stderr\": 0.020588491316092382\n },\n \"harness|hendrycksTest-medical_genetics|5\"\
: {\n \"acc\": 0.7,\n \"acc_stderr\": 0.046056618647183814,\n \
\ \"acc_norm\": 0.7,\n \"acc_norm_stderr\": 0.046056618647183814\n \
\ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8263090676883781,\n\
\ \"acc_stderr\": 0.013547415658662255,\n \"acc_norm\": 0.8263090676883781,\n\
\ \"acc_norm_stderr\": 0.013547415658662255\n },\n \"harness|hendrycksTest-moral_disputes|5\"\
: {\n \"acc\": 0.7427745664739884,\n \"acc_stderr\": 0.023532925431044283,\n\
\ \"acc_norm\": 0.7427745664739884,\n \"acc_norm_stderr\": 0.023532925431044283\n\
\ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.36983240223463687,\n\
\ \"acc_stderr\": 0.016145881256056215,\n \"acc_norm\": 0.36983240223463687,\n\
\ \"acc_norm_stderr\": 0.016145881256056215\n },\n \"harness|hendrycksTest-nutrition|5\"\
: {\n \"acc\": 0.7189542483660131,\n \"acc_stderr\": 0.02573885479781874,\n\
\ \"acc_norm\": 0.7189542483660131,\n \"acc_norm_stderr\": 0.02573885479781874\n\
\ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7138263665594855,\n\
\ \"acc_stderr\": 0.025670259242188933,\n \"acc_norm\": 0.7138263665594855,\n\
\ \"acc_norm_stderr\": 0.025670259242188933\n },\n \"harness|hendrycksTest-prehistory|5\"\
: {\n \"acc\": 0.7345679012345679,\n \"acc_stderr\": 0.024569223600460845,\n\
\ \"acc_norm\": 0.7345679012345679,\n \"acc_norm_stderr\": 0.024569223600460845\n\
\ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\
acc\": 0.4716312056737589,\n \"acc_stderr\": 0.029779450957303062,\n \
\ \"acc_norm\": 0.4716312056737589,\n \"acc_norm_stderr\": 0.029779450957303062\n\
\ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4517601043024772,\n\
\ \"acc_stderr\": 0.012710662233660247,\n \"acc_norm\": 0.4517601043024772,\n\
\ \"acc_norm_stderr\": 0.012710662233660247\n },\n \"harness|hendrycksTest-professional_medicine|5\"\
: {\n \"acc\": 0.6654411764705882,\n \"acc_stderr\": 0.028661996202335303,\n\
\ \"acc_norm\": 0.6654411764705882,\n \"acc_norm_stderr\": 0.028661996202335303\n\
\ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\
acc\": 0.6470588235294118,\n \"acc_stderr\": 0.01933314202079716,\n \
\ \"acc_norm\": 0.6470588235294118,\n \"acc_norm_stderr\": 0.01933314202079716\n\
\ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6545454545454545,\n\
\ \"acc_stderr\": 0.04554619617541054,\n \"acc_norm\": 0.6545454545454545,\n\
\ \"acc_norm_stderr\": 0.04554619617541054\n },\n \"harness|hendrycksTest-security_studies|5\"\
: {\n \"acc\": 0.7142857142857143,\n \"acc_stderr\": 0.028920583220675596,\n\
\ \"acc_norm\": 0.7142857142857143,\n \"acc_norm_stderr\": 0.028920583220675596\n\
\ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8308457711442786,\n\
\ \"acc_stderr\": 0.026508590656233257,\n \"acc_norm\": 0.8308457711442786,\n\
\ \"acc_norm_stderr\": 0.026508590656233257\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\
: {\n \"acc\": 0.86,\n \"acc_stderr\": 0.0348735088019777,\n \
\ \"acc_norm\": 0.86,\n \"acc_norm_stderr\": 0.0348735088019777\n },\n\
\ \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5120481927710844,\n\
\ \"acc_stderr\": 0.03891364495835816,\n \"acc_norm\": 0.5120481927710844,\n\
\ \"acc_norm_stderr\": 0.03891364495835816\n },\n \"harness|hendrycksTest-world_religions|5\"\
: {\n \"acc\": 0.8245614035087719,\n \"acc_stderr\": 0.029170885500727665,\n\
\ \"acc_norm\": 0.8245614035087719,\n \"acc_norm_stderr\": 0.029170885500727665\n\
\ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.4186046511627907,\n\
\ \"mc1_stderr\": 0.017270015284476855,\n \"mc2\": 0.5818075900070901,\n\
\ \"mc2_stderr\": 0.015361574502931194\n },\n \"harness|winogrande|5\"\
: {\n \"acc\": 0.8318863456985004,\n \"acc_stderr\": 0.010510336954166737\n\
\ },\n \"harness|gsm8k|5\": {\n \"acc\": 0.6770280515542078,\n \
\ \"acc_stderr\": 0.012880360794851815\n }\n}\n```"
repo_url: https://huggingface.co/gmonsoon/MiaAffogato-Indo-Mistral-7b
leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard
point_of_contact: clementine@hf.co
configs:
- config_name: harness_arc_challenge_25
data_files:
- split: 2024_02_04T17_34_06.201391
path:
- '**/details_harness|arc:challenge|25_2024-02-04T17-34-06.201391.parquet'
- split: latest
path:
- '**/details_harness|arc:challenge|25_2024-02-04T17-34-06.201391.parquet'
- config_name: harness_gsm8k_5
data_files:
- split: 2024_02_04T17_34_06.201391
path:
- '**/details_harness|gsm8k|5_2024-02-04T17-34-06.201391.parquet'
- split: latest
path:
- '**/details_harness|gsm8k|5_2024-02-04T17-34-06.201391.parquet'
- config_name: harness_hellaswag_10
data_files:
- split: 2024_02_04T17_34_06.201391
path:
- '**/details_harness|hellaswag|10_2024-02-04T17-34-06.201391.parquet'
- split: latest
path:
- '**/details_harness|hellaswag|10_2024-02-04T17-34-06.201391.parquet'
- config_name: harness_hendrycksTest_5
data_files:
- split: 2024_02_04T17_34_06.201391
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-management|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-virology|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2024-02-04T17-34-06.201391.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-management|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-virology|5_2024-02-04T17-34-06.201391.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2024-02-04T17-34-06.201391.parquet'
- config_name: harness_hendrycksTest_abstract_algebra_5
data_files:
- split: 2024_02_04T17_34_06.201391
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-04T17-34-06.201391.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-04T17-34-06.201391.parquet'
- config_name: harness_hendrycksTest_anatomy_5
data_files:
- split: 2024_02_04T17_34_06.201391
path:
- '**/details_harness|hendrycksTest-anatomy|5_2024-02-04T17-34-06.201391.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-anatomy|5_2024-02-04T17-34-06.201391.parquet'
- config_name: harness_hendrycksTest_astronomy_5
data_files:
- split: 2024_02_04T17_34_06.201391
path:
- '**/details_harness|hendrycksTest-astronomy|5_2024-02-04T17-34-06.201391.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-astronomy|5_2024-02-04T17-34-06.201391.parquet'
- config_name: harness_hendrycksTest_business_ethics_5
data_files:
- split: 2024_02_04T17_34_06.201391
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2024-02-04T17-34-06.201391.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2024-02-04T17-34-06.201391.parquet'
- config_name: harness_hendrycksTest_clinical_knowledge_5
data_files:
- split: 2024_02_04T17_34_06.201391
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-04T17-34-06.201391.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-04T17-34-06.201391.parquet'
- config_name: harness_hendrycksTest_college_biology_5
data_files:
- split: 2024_02_04T17_34_06.201391
path:
- '**/details_harness|hendrycksTest-college_biology|5_2024-02-04T17-34-06.201391.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_biology|5_2024-02-04T17-34-06.201391.parquet'
- config_name: harness_hendrycksTest_college_chemistry_5
data_files:
- split: 2024_02_04T17_34_06.201391
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-02-04T17-34-06.201391.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-02-04T17-34-06.201391.parquet'
- config_name: harness_hendrycksTest_college_computer_science_5
data_files:
- split: 2024_02_04T17_34_06.201391
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-02-04T17-34-06.201391.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-02-04T17-34-06.201391.parquet'
- config_name: harness_hendrycksTest_college_mathematics_5
data_files:
- split: 2024_02_04T17_34_06.201391
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-02-04T17-34-06.201391.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-02-04T17-34-06.201391.parquet'
- config_name: harness_hendrycksTest_college_medicine_5
data_files:
- split: 2024_02_04T17_34_06.201391
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2024-02-04T17-34-06.201391.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2024-02-04T17-34-06.201391.parquet'
- config_name: harness_hendrycksTest_college_physics_5
data_files:
- split: 2024_02_04T17_34_06.201391
path:
- '**/details_harness|hendrycksTest-college_physics|5_2024-02-04T17-34-06.201391.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_physics|5_2024-02-04T17-34-06.201391.parquet'
- config_name: harness_hendrycksTest_computer_security_5
data_files:
- split: 2024_02_04T17_34_06.201391
path:
- '**/details_harness|hendrycksTest-computer_security|5_2024-02-04T17-34-06.201391.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-computer_security|5_2024-02-04T17-34-06.201391.parquet'
- config_name: harness_hendrycksTest_conceptual_physics_5
data_files:
- split: 2024_02_04T17_34_06.201391
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-04T17-34-06.201391.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-04T17-34-06.201391.parquet'
- config_name: harness_hendrycksTest_econometrics_5
data_files:
- split: 2024_02_04T17_34_06.201391
path:
- '**/details_harness|hendrycksTest-econometrics|5_2024-02-04T17-34-06.201391.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-econometrics|5_2024-02-04T17-34-06.201391.parquet'
- config_name: harness_hendrycksTest_electrical_engineering_5
data_files:
- split: 2024_02_04T17_34_06.201391
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-04T17-34-06.201391.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-04T17-34-06.201391.parquet'
- config_name: harness_hendrycksTest_elementary_mathematics_5
data_files:
- split: 2024_02_04T17_34_06.201391
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-04T17-34-06.201391.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-04T17-34-06.201391.parquet'
- config_name: harness_hendrycksTest_formal_logic_5
data_files:
- split: 2024_02_04T17_34_06.201391
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2024-02-04T17-34-06.201391.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2024-02-04T17-34-06.201391.parquet'
- config_name: harness_hendrycksTest_global_facts_5
data_files:
- split: 2024_02_04T17_34_06.201391
path:
- '**/details_harness|hendrycksTest-global_facts|5_2024-02-04T17-34-06.201391.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-global_facts|5_2024-02-04T17-34-06.201391.parquet'
- config_name: harness_hendrycksTest_high_school_biology_5
data_files:
- split: 2024_02_04T17_34_06.201391
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-02-04T17-34-06.201391.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-02-04T17-34-06.201391.parquet'
- config_name: harness_hendrycksTest_high_school_chemistry_5
data_files:
- split: 2024_02_04T17_34_06.201391
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-04T17-34-06.201391.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-04T17-34-06.201391.parquet'
- config_name: harness_hendrycksTest_high_school_computer_science_5
data_files:
- split: 2024_02_04T17_34_06.201391
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-04T17-34-06.201391.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-04T17-34-06.201391.parquet'
- config_name: harness_hendrycksTest_high_school_european_history_5
data_files:
- split: 2024_02_04T17_34_06.201391
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-04T17-34-06.201391.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-04T17-34-06.201391.parquet'
- config_name: harness_hendrycksTest_high_school_geography_5
data_files:
- split: 2024_02_04T17_34_06.201391
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-02-04T17-34-06.201391.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-02-04T17-34-06.201391.parquet'
- config_name: harness_hendrycksTest_high_school_government_and_politics_5
data_files:
- split: 2024_02_04T17_34_06.201391
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-04T17-34-06.201391.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-04T17-34-06.201391.parquet'
- config_name: harness_hendrycksTest_high_school_macroeconomics_5
data_files:
- split: 2024_02_04T17_34_06.201391
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-04T17-34-06.201391.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-04T17-34-06.201391.parquet'
- config_name: harness_hendrycksTest_high_school_mathematics_5
data_files:
- split: 2024_02_04T17_34_06.201391
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-04T17-34-06.201391.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-04T17-34-06.201391.parquet'
- config_name: harness_hendrycksTest_high_school_microeconomics_5
data_files:
- split: 2024_02_04T17_34_06.201391
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-04T17-34-06.201391.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-04T17-34-06.201391.parquet'
- config_name: harness_hendrycksTest_high_school_physics_5
data_files:
- split: 2024_02_04T17_34_06.201391
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-02-04T17-34-06.201391.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-02-04T17-34-06.201391.parquet'
- config_name: harness_hendrycksTest_high_school_psychology_5
data_files:
- split: 2024_02_04T17_34_06.201391
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-04T17-34-06.201391.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-04T17-34-06.201391.parquet'
- config_name: harness_hendrycksTest_high_school_statistics_5
data_files:
- split: 2024_02_04T17_34_06.201391
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-04T17-34-06.201391.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-04T17-34-06.201391.parquet'
- config_name: harness_hendrycksTest_high_school_us_history_5
data_files:
- split: 2024_02_04T17_34_06.201391
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-04T17-34-06.201391.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-04T17-34-06.201391.parquet'
- config_name: harness_hendrycksTest_high_school_world_history_5
data_files:
- split: 2024_02_04T17_34_06.201391
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-04T17-34-06.201391.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-04T17-34-06.201391.parquet'
- config_name: harness_hendrycksTest_human_aging_5
data_files:
- split: 2024_02_04T17_34_06.201391
path:
- '**/details_harness|hendrycksTest-human_aging|5_2024-02-04T17-34-06.201391.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_aging|5_2024-02-04T17-34-06.201391.parquet'
- config_name: harness_hendrycksTest_human_sexuality_5
data_files:
- split: 2024_02_04T17_34_06.201391
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-02-04T17-34-06.201391.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-02-04T17-34-06.201391.parquet'
- config_name: harness_hendrycksTest_international_law_5
data_files:
- split: 2024_02_04T17_34_06.201391
path:
- '**/details_harness|hendrycksTest-international_law|5_2024-02-04T17-34-06.201391.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-international_law|5_2024-02-04T17-34-06.201391.parquet'
- config_name: harness_hendrycksTest_jurisprudence_5
data_files:
- split: 2024_02_04T17_34_06.201391
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-02-04T17-34-06.201391.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-02-04T17-34-06.201391.parquet'
- config_name: harness_hendrycksTest_logical_fallacies_5
data_files:
- split: 2024_02_04T17_34_06.201391
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-04T17-34-06.201391.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-04T17-34-06.201391.parquet'
- config_name: harness_hendrycksTest_machine_learning_5
data_files:
- split: 2024_02_04T17_34_06.201391
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2024-02-04T17-34-06.201391.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2024-02-04T17-34-06.201391.parquet'
- config_name: harness_hendrycksTest_management_5
data_files:
- split: 2024_02_04T17_34_06.201391
path:
- '**/details_harness|hendrycksTest-management|5_2024-02-04T17-34-06.201391.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-management|5_2024-02-04T17-34-06.201391.parquet'
- config_name: harness_hendrycksTest_marketing_5
data_files:
- split: 2024_02_04T17_34_06.201391
path:
- '**/details_harness|hendrycksTest-marketing|5_2024-02-04T17-34-06.201391.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-marketing|5_2024-02-04T17-34-06.201391.parquet'
- config_name: harness_hendrycksTest_medical_genetics_5
data_files:
- split: 2024_02_04T17_34_06.201391
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-02-04T17-34-06.201391.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-02-04T17-34-06.201391.parquet'
- config_name: harness_hendrycksTest_miscellaneous_5
data_files:
- split: 2024_02_04T17_34_06.201391
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-02-04T17-34-06.201391.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-02-04T17-34-06.201391.parquet'
- config_name: harness_hendrycksTest_moral_disputes_5
data_files:
- split: 2024_02_04T17_34_06.201391
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-02-04T17-34-06.201391.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-02-04T17-34-06.201391.parquet'
- config_name: harness_hendrycksTest_moral_scenarios_5
data_files:
- split: 2024_02_04T17_34_06.201391
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-04T17-34-06.201391.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-04T17-34-06.201391.parquet'
- config_name: harness_hendrycksTest_nutrition_5
data_files:
- split: 2024_02_04T17_34_06.201391
path:
- '**/details_harness|hendrycksTest-nutrition|5_2024-02-04T17-34-06.201391.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-nutrition|5_2024-02-04T17-34-06.201391.parquet'
- config_name: harness_hendrycksTest_philosophy_5
data_files:
- split: 2024_02_04T17_34_06.201391
path:
- '**/details_harness|hendrycksTest-philosophy|5_2024-02-04T17-34-06.201391.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-philosophy|5_2024-02-04T17-34-06.201391.parquet'
- config_name: harness_hendrycksTest_prehistory_5
data_files:
- split: 2024_02_04T17_34_06.201391
path:
- '**/details_harness|hendrycksTest-prehistory|5_2024-02-04T17-34-06.201391.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-prehistory|5_2024-02-04T17-34-06.201391.parquet'
- config_name: harness_hendrycksTest_professional_accounting_5
data_files:
- split: 2024_02_04T17_34_06.201391
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-02-04T17-34-06.201391.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-02-04T17-34-06.201391.parquet'
- config_name: harness_hendrycksTest_professional_law_5
data_files:
- split: 2024_02_04T17_34_06.201391
path:
- '**/details_harness|hendrycksTest-professional_law|5_2024-02-04T17-34-06.201391.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_law|5_2024-02-04T17-34-06.201391.parquet'
- config_name: harness_hendrycksTest_professional_medicine_5
data_files:
- split: 2024_02_04T17_34_06.201391
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-02-04T17-34-06.201391.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-02-04T17-34-06.201391.parquet'
- config_name: harness_hendrycksTest_professional_psychology_5
data_files:
- split: 2024_02_04T17_34_06.201391
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-02-04T17-34-06.201391.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-02-04T17-34-06.201391.parquet'
- config_name: harness_hendrycksTest_public_relations_5
data_files:
- split: 2024_02_04T17_34_06.201391
path:
- '**/details_harness|hendrycksTest-public_relations|5_2024-02-04T17-34-06.201391.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-public_relations|5_2024-02-04T17-34-06.201391.parquet'
- config_name: harness_hendrycksTest_security_studies_5
data_files:
- split: 2024_02_04T17_34_06.201391
path:
- '**/details_harness|hendrycksTest-security_studies|5_2024-02-04T17-34-06.201391.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-security_studies|5_2024-02-04T17-34-06.201391.parquet'
- config_name: harness_hendrycksTest_sociology_5
data_files:
- split: 2024_02_04T17_34_06.201391
path:
- '**/details_harness|hendrycksTest-sociology|5_2024-02-04T17-34-06.201391.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-sociology|5_2024-02-04T17-34-06.201391.parquet'
- config_name: harness_hendrycksTest_us_foreign_policy_5
data_files:
- split: 2024_02_04T17_34_06.201391
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-04T17-34-06.201391.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-04T17-34-06.201391.parquet'
- config_name: harness_hendrycksTest_virology_5
data_files:
- split: 2024_02_04T17_34_06.201391
path:
- '**/details_harness|hendrycksTest-virology|5_2024-02-04T17-34-06.201391.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-virology|5_2024-02-04T17-34-06.201391.parquet'
- config_name: harness_hendrycksTest_world_religions_5
data_files:
- split: 2024_02_04T17_34_06.201391
path:
- '**/details_harness|hendrycksTest-world_religions|5_2024-02-04T17-34-06.201391.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-world_religions|5_2024-02-04T17-34-06.201391.parquet'
- config_name: harness_truthfulqa_mc_0
data_files:
- split: 2024_02_04T17_34_06.201391
path:
- '**/details_harness|truthfulqa:mc|0_2024-02-04T17-34-06.201391.parquet'
- split: latest
path:
- '**/details_harness|truthfulqa:mc|0_2024-02-04T17-34-06.201391.parquet'
- config_name: harness_winogrande_5
data_files:
- split: 2024_02_04T17_34_06.201391
path:
- '**/details_harness|winogrande|5_2024-02-04T17-34-06.201391.parquet'
- split: latest
path:
- '**/details_harness|winogrande|5_2024-02-04T17-34-06.201391.parquet'
- config_name: results
data_files:
- split: 2024_02_04T17_34_06.201391
path:
- results_2024-02-04T17-34-06.201391.parquet
- split: latest
path:
- results_2024-02-04T17-34-06.201391.parquet
---
# Dataset Card for Evaluation run of gmonsoon/MiaAffogato-Indo-Mistral-7b
<!-- Provide a quick summary of the dataset. -->
Dataset automatically created during the evaluation run of model [gmonsoon/MiaAffogato-Indo-Mistral-7b](https://huggingface.co/gmonsoon/MiaAffogato-Indo-Mistral-7b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).
The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset("open-llm-leaderboard/details_gmonsoon__MiaAffogato-Indo-Mistral-7b",
"harness_winogrande_5",
split="train")
```
## Latest results
These are the [latest results from run 2024-02-04T17:34:06.201391](https://huggingface.co/datasets/open-llm-leaderboard/details_gmonsoon__MiaAffogato-Indo-Mistral-7b/blob/main/results_2024-02-04T17-34-06.201391.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"acc": 0.644912685809928,
"acc_stderr": 0.03209671177294616,
"acc_norm": 0.6450526633312276,
"acc_norm_stderr": 0.032762897968506996,
"mc1": 0.4186046511627907,
"mc1_stderr": 0.017270015284476855,
"mc2": 0.5818075900070901,
"mc2_stderr": 0.015361574502931194
},
"harness|arc:challenge|25": {
"acc": 0.6322525597269625,
"acc_stderr": 0.014090995618168477,
"acc_norm": 0.6638225255972696,
"acc_norm_stderr": 0.013804855026205761
},
"harness|hellaswag|10": {
"acc": 0.6585341565425215,
"acc_stderr": 0.004732322172153749,
"acc_norm": 0.8543118900617407,
"acc_norm_stderr": 0.0035207225053320934
},
"harness|hendrycksTest-abstract_algebra|5": {
"acc": 0.32,
"acc_stderr": 0.046882617226215034,
"acc_norm": 0.32,
"acc_norm_stderr": 0.046882617226215034
},
"harness|hendrycksTest-anatomy|5": {
"acc": 0.6370370370370371,
"acc_stderr": 0.041539484047423976,
"acc_norm": 0.6370370370370371,
"acc_norm_stderr": 0.041539484047423976
},
"harness|hendrycksTest-astronomy|5": {
"acc": 0.7171052631578947,
"acc_stderr": 0.03665349695640767,
"acc_norm": 0.7171052631578947,
"acc_norm_stderr": 0.03665349695640767
},
"harness|hendrycksTest-business_ethics|5": {
"acc": 0.61,
"acc_stderr": 0.04902071300001975,
"acc_norm": 0.61,
"acc_norm_stderr": 0.04902071300001975
},
"harness|hendrycksTest-clinical_knowledge|5": {
"acc": 0.720754716981132,
"acc_stderr": 0.027611163402399715,
"acc_norm": 0.720754716981132,
"acc_norm_stderr": 0.027611163402399715
},
"harness|hendrycksTest-college_biology|5": {
"acc": 0.7222222222222222,
"acc_stderr": 0.037455547914624555,
"acc_norm": 0.7222222222222222,
"acc_norm_stderr": 0.037455547914624555
},
"harness|hendrycksTest-college_chemistry|5": {
"acc": 0.47,
"acc_stderr": 0.050161355804659205,
"acc_norm": 0.47,
"acc_norm_stderr": 0.050161355804659205
},
"harness|hendrycksTest-college_computer_science|5": {
"acc": 0.49,
"acc_stderr": 0.05024183937956912,
"acc_norm": 0.49,
"acc_norm_stderr": 0.05024183937956912
},
"harness|hendrycksTest-college_mathematics|5": {
"acc": 0.29,
"acc_stderr": 0.04560480215720683,
"acc_norm": 0.29,
"acc_norm_stderr": 0.04560480215720683
},
"harness|hendrycksTest-college_medicine|5": {
"acc": 0.630057803468208,
"acc_stderr": 0.0368122963339432,
"acc_norm": 0.630057803468208,
"acc_norm_stderr": 0.0368122963339432
},
"harness|hendrycksTest-college_physics|5": {
"acc": 0.4117647058823529,
"acc_stderr": 0.048971049527263666,
"acc_norm": 0.4117647058823529,
"acc_norm_stderr": 0.048971049527263666
},
"harness|hendrycksTest-computer_security|5": {
"acc": 0.77,
"acc_stderr": 0.042295258468165065,
"acc_norm": 0.77,
"acc_norm_stderr": 0.042295258468165065
},
"harness|hendrycksTest-conceptual_physics|5": {
"acc": 0.574468085106383,
"acc_stderr": 0.03232146916224469,
"acc_norm": 0.574468085106383,
"acc_norm_stderr": 0.03232146916224469
},
"harness|hendrycksTest-econometrics|5": {
"acc": 0.4649122807017544,
"acc_stderr": 0.046920083813689104,
"acc_norm": 0.4649122807017544,
"acc_norm_stderr": 0.046920083813689104
},
"harness|hendrycksTest-electrical_engineering|5": {
"acc": 0.5655172413793104,
"acc_stderr": 0.04130740879555498,
"acc_norm": 0.5655172413793104,
"acc_norm_stderr": 0.04130740879555498
},
"harness|hendrycksTest-elementary_mathematics|5": {
"acc": 0.4021164021164021,
"acc_stderr": 0.025253032554997692,
"acc_norm": 0.4021164021164021,
"acc_norm_stderr": 0.025253032554997692
},
"harness|hendrycksTest-formal_logic|5": {
"acc": 0.49206349206349204,
"acc_stderr": 0.044715725362943486,
"acc_norm": 0.49206349206349204,
"acc_norm_stderr": 0.044715725362943486
},
"harness|hendrycksTest-global_facts|5": {
"acc": 0.35,
"acc_stderr": 0.047937248544110196,
"acc_norm": 0.35,
"acc_norm_stderr": 0.047937248544110196
},
"harness|hendrycksTest-high_school_biology|5": {
"acc": 0.7677419354838709,
"acc_stderr": 0.024022256130308235,
"acc_norm": 0.7677419354838709,
"acc_norm_stderr": 0.024022256130308235
},
"harness|hendrycksTest-high_school_chemistry|5": {
"acc": 0.4975369458128079,
"acc_stderr": 0.03517945038691063,
"acc_norm": 0.4975369458128079,
"acc_norm_stderr": 0.03517945038691063
},
"harness|hendrycksTest-high_school_computer_science|5": {
"acc": 0.69,
"acc_stderr": 0.04648231987117316,
"acc_norm": 0.69,
"acc_norm_stderr": 0.04648231987117316
},
"harness|hendrycksTest-high_school_european_history|5": {
"acc": 0.7757575757575758,
"acc_stderr": 0.032568666616811015,
"acc_norm": 0.7757575757575758,
"acc_norm_stderr": 0.032568666616811015
},
"harness|hendrycksTest-high_school_geography|5": {
"acc": 0.8080808080808081,
"acc_stderr": 0.02805779167298902,
"acc_norm": 0.8080808080808081,
"acc_norm_stderr": 0.02805779167298902
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"acc": 0.8808290155440415,
"acc_stderr": 0.023381935348121437,
"acc_norm": 0.8808290155440415,
"acc_norm_stderr": 0.023381935348121437
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"acc": 0.6666666666666666,
"acc_stderr": 0.023901157979402534,
"acc_norm": 0.6666666666666666,
"acc_norm_stderr": 0.023901157979402534
},
"harness|hendrycksTest-high_school_mathematics|5": {
"acc": 0.3074074074074074,
"acc_stderr": 0.028133252578815632,
"acc_norm": 0.3074074074074074,
"acc_norm_stderr": 0.028133252578815632
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"acc": 0.6638655462184874,
"acc_stderr": 0.030684737115135353,
"acc_norm": 0.6638655462184874,
"acc_norm_stderr": 0.030684737115135353
},
"harness|hendrycksTest-high_school_physics|5": {
"acc": 0.32450331125827814,
"acc_stderr": 0.038227469376587525,
"acc_norm": 0.32450331125827814,
"acc_norm_stderr": 0.038227469376587525
},
"harness|hendrycksTest-high_school_psychology|5": {
"acc": 0.8330275229357799,
"acc_stderr": 0.01599015488507338,
"acc_norm": 0.8330275229357799,
"acc_norm_stderr": 0.01599015488507338
},
"harness|hendrycksTest-high_school_statistics|5": {
"acc": 0.5370370370370371,
"acc_stderr": 0.03400603625538272,
"acc_norm": 0.5370370370370371,
"acc_norm_stderr": 0.03400603625538272
},
"harness|hendrycksTest-high_school_us_history|5": {
"acc": 0.8382352941176471,
"acc_stderr": 0.02584501798692692,
"acc_norm": 0.8382352941176471,
"acc_norm_stderr": 0.02584501798692692
},
"harness|hendrycksTest-high_school_world_history|5": {
"acc": 0.810126582278481,
"acc_stderr": 0.025530100460233494,
"acc_norm": 0.810126582278481,
"acc_norm_stderr": 0.025530100460233494
},
"harness|hendrycksTest-human_aging|5": {
"acc": 0.6816143497757847,
"acc_stderr": 0.03126580522513713,
"acc_norm": 0.6816143497757847,
"acc_norm_stderr": 0.03126580522513713
},
"harness|hendrycksTest-human_sexuality|5": {
"acc": 0.8091603053435115,
"acc_stderr": 0.03446513350752599,
"acc_norm": 0.8091603053435115,
"acc_norm_stderr": 0.03446513350752599
},
"harness|hendrycksTest-international_law|5": {
"acc": 0.7933884297520661,
"acc_stderr": 0.036959801280988226,
"acc_norm": 0.7933884297520661,
"acc_norm_stderr": 0.036959801280988226
},
"harness|hendrycksTest-jurisprudence|5": {
"acc": 0.7962962962962963,
"acc_stderr": 0.03893542518824847,
"acc_norm": 0.7962962962962963,
"acc_norm_stderr": 0.03893542518824847
},
"harness|hendrycksTest-logical_fallacies|5": {
"acc": 0.7607361963190185,
"acc_stderr": 0.033519538795212696,
"acc_norm": 0.7607361963190185,
"acc_norm_stderr": 0.033519538795212696
},
"harness|hendrycksTest-machine_learning|5": {
"acc": 0.5267857142857143,
"acc_stderr": 0.047389751192741546,
"acc_norm": 0.5267857142857143,
"acc_norm_stderr": 0.047389751192741546
},
"harness|hendrycksTest-management|5": {
"acc": 0.8058252427184466,
"acc_stderr": 0.03916667762822585,
"acc_norm": 0.8058252427184466,
"acc_norm_stderr": 0.03916667762822585
},
"harness|hendrycksTest-marketing|5": {
"acc": 0.8888888888888888,
"acc_stderr": 0.020588491316092382,
"acc_norm": 0.8888888888888888,
"acc_norm_stderr": 0.020588491316092382
},
"harness|hendrycksTest-medical_genetics|5": {
"acc": 0.7,
"acc_stderr": 0.046056618647183814,
"acc_norm": 0.7,
"acc_norm_stderr": 0.046056618647183814
},
"harness|hendrycksTest-miscellaneous|5": {
"acc": 0.8263090676883781,
"acc_stderr": 0.013547415658662255,
"acc_norm": 0.8263090676883781,
"acc_norm_stderr": 0.013547415658662255
},
"harness|hendrycksTest-moral_disputes|5": {
"acc": 0.7427745664739884,
"acc_stderr": 0.023532925431044283,
"acc_norm": 0.7427745664739884,
"acc_norm_stderr": 0.023532925431044283
},
"harness|hendrycksTest-moral_scenarios|5": {
"acc": 0.36983240223463687,
"acc_stderr": 0.016145881256056215,
"acc_norm": 0.36983240223463687,
"acc_norm_stderr": 0.016145881256056215
},
"harness|hendrycksTest-nutrition|5": {
"acc": 0.7189542483660131,
"acc_stderr": 0.02573885479781874,
"acc_norm": 0.7189542483660131,
"acc_norm_stderr": 0.02573885479781874
},
"harness|hendrycksTest-philosophy|5": {
"acc": 0.7138263665594855,
"acc_stderr": 0.025670259242188933,
"acc_norm": 0.7138263665594855,
"acc_norm_stderr": 0.025670259242188933
},
"harness|hendrycksTest-prehistory|5": {
"acc": 0.7345679012345679,
"acc_stderr": 0.024569223600460845,
"acc_norm": 0.7345679012345679,
"acc_norm_stderr": 0.024569223600460845
},
"harness|hendrycksTest-professional_accounting|5": {
"acc": 0.4716312056737589,
"acc_stderr": 0.029779450957303062,
"acc_norm": 0.4716312056737589,
"acc_norm_stderr": 0.029779450957303062
},
"harness|hendrycksTest-professional_law|5": {
"acc": 0.4517601043024772,
"acc_stderr": 0.012710662233660247,
"acc_norm": 0.4517601043024772,
"acc_norm_stderr": 0.012710662233660247
},
"harness|hendrycksTest-professional_medicine|5": {
"acc": 0.6654411764705882,
"acc_stderr": 0.028661996202335303,
"acc_norm": 0.6654411764705882,
"acc_norm_stderr": 0.028661996202335303
},
"harness|hendrycksTest-professional_psychology|5": {
"acc": 0.6470588235294118,
"acc_stderr": 0.01933314202079716,
"acc_norm": 0.6470588235294118,
"acc_norm_stderr": 0.01933314202079716
},
"harness|hendrycksTest-public_relations|5": {
"acc": 0.6545454545454545,
"acc_stderr": 0.04554619617541054,
"acc_norm": 0.6545454545454545,
"acc_norm_stderr": 0.04554619617541054
},
"harness|hendrycksTest-security_studies|5": {
"acc": 0.7142857142857143,
"acc_stderr": 0.028920583220675596,
"acc_norm": 0.7142857142857143,
"acc_norm_stderr": 0.028920583220675596
},
"harness|hendrycksTest-sociology|5": {
"acc": 0.8308457711442786,
"acc_stderr": 0.026508590656233257,
"acc_norm": 0.8308457711442786,
"acc_norm_stderr": 0.026508590656233257
},
"harness|hendrycksTest-us_foreign_policy|5": {
"acc": 0.86,
"acc_stderr": 0.0348735088019777,
"acc_norm": 0.86,
"acc_norm_stderr": 0.0348735088019777
},
"harness|hendrycksTest-virology|5": {
"acc": 0.5120481927710844,
"acc_stderr": 0.03891364495835816,
"acc_norm": 0.5120481927710844,
"acc_norm_stderr": 0.03891364495835816
},
"harness|hendrycksTest-world_religions|5": {
"acc": 0.8245614035087719,
"acc_stderr": 0.029170885500727665,
"acc_norm": 0.8245614035087719,
"acc_norm_stderr": 0.029170885500727665
},
"harness|truthfulqa:mc|0": {
"mc1": 0.4186046511627907,
"mc1_stderr": 0.017270015284476855,
"mc2": 0.5818075900070901,
"mc2_stderr": 0.015361574502931194
},
"harness|winogrande|5": {
"acc": 0.8318863456985004,
"acc_stderr": 0.010510336954166737
},
"harness|gsm8k|5": {
"acc": 0.6770280515542078,
"acc_stderr": 0.012880360794851815
}
}
```
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] |
felipedinely/tadeu | felipedinely | "2024-02-04T18:36:58Z" | 33 | 0 | [
"license:openrail",
"size_categories:n<1K",
"format:audiofolder",
"modality:audio",
"library:datasets",
"library:mlcroissant",
"region:us"
] | null | "2024-02-04T18:14:59Z" | ---
license: openrail
---
|
nguyenphuthien/ViOpenHermes-2.5 | nguyenphuthien | "2024-05-23T03:38:58Z" | 33 | 1 | [
"task_categories:text-generation",
"language:vi",
"license:mit",
"size_categories:1M<n<10M",
"format:json",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | [
"text-generation"
] | "2024-02-04T18:18:51Z" | ---
license: mit
task_categories:
- text-generation
language:
- vi
size_categories:
- 1M<n<10M
--- |
myrtotsok/clf | myrtotsok | "2024-02-06T08:38:02Z" | 33 | 0 | [
"size_categories:1K<n<10K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-02-04T18:22:03Z" | ---
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
- split: validation
path: data/validation-*
dataset_info:
features:
- name: request
dtype: string
- name: label
dtype: int64
splits:
- name: train
num_bytes: 96731
num_examples: 1120
- name: validation
num_bytes: 24176
num_examples: 280
download_size: 27784
dataset_size: 120907
---
## id2label = {
## 0: 'binary visual question answering',
## 1:'search by image',
## 2:'image search by text',
## 3:'geospatial question answering',
## 4:'count objects in image' ,
## 5:'object extraction in image',
## 6:'image segmentation'
## }
---
size_categories:
- 1K<n<10K
task_categories:
- text-classification
dataset_info:
features:
- name: request
dtype: string
- name: label
dtype: int64
splits:
- name: train
num_bytes: 99611
num_examples: 1120
- name: validation
num_bytes: 24896
num_examples: 280
download_size: 27907
dataset_size: 124507
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
- split: validation
path: data/validation-*
---
|
hojzas/proj4-uniq_orig_order-lab1 | hojzas | "2024-02-04T19:06:13Z" | 33 | 0 | [
"license:apache-2.0",
"size_categories:n<1K",
"format:csv",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-02-04T18:56:28Z" | ---
license: apache-2.0
---
|
dhuynh95/Magicoder-Evol-Instruct-10000-CodeLlama-70b-tokenized-0.5-v2 | dhuynh95 | "2024-02-04T19:29:54Z" | 33 | 0 | [
"size_categories:10K<n<100K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-02-04T19:29:53Z" | ---
dataset_info:
features:
- name: instruction
dtype: string
- name: output
dtype: string
splits:
- name: train
num_bytes: 22972759
num_examples: 10000
download_size: 11249692
dataset_size: 22972759
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
zicsx/Wikipedia-Hindi | zicsx | "2024-02-04T23:49:20Z" | 33 | 1 | [
"size_categories:100K<n<1M",
"format:parquet",
"modality:text",
"library:datasets",
"library:dask",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-02-04T23:41:14Z" | ---
dataset_info:
features:
- name: text
dtype: string
splits:
- name: train
num_bytes: 576464909.5937778
num_examples: 154867
download_size: 216951489
dataset_size: 576464909.5937778
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
# Dataset Card for "Wikipedia-Hindi"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
Maxsinan/minhavoz | Maxsinan | "2024-02-04T23:52:30Z" | 33 | 0 | [
"license:openrail",
"size_categories:n<1K",
"format:audiofolder",
"modality:audio",
"library:datasets",
"library:mlcroissant",
"region:us"
] | null | "2024-02-04T23:48:48Z" | ---
license: openrail
---
|
Edopangui/promo_parquet | Edopangui | "2024-02-04T23:50:33Z" | 33 | 0 | [
"license:apache-2.0",
"size_categories:1K<n<10K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-02-04T23:49:54Z" | ---
license: apache-2.0
---
|
steven2521/squad_v2_rag_doc | steven2521 | "2024-02-04T23:53:31Z" | 33 | 0 | [
"license:mit",
"size_categories:10K<n<100K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-02-04T23:50:14Z" | ---
license: mit
dataset_info:
features:
- name: document
dtype: string
- name: embedding
sequence: float32
splits:
- name: train
num_bytes: 131093312
num_examples: 19029
- name: validation
num_bytes: 8375044
num_examples: 1204
download_size: 145719965
dataset_size: 139468356
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
- split: validation
path: data/validation-*
---
|
Mitsuki-Sakamoto/alpaca_farm-reward-model-deberta-v3-large-v2-re-eval-preference | Mitsuki-Sakamoto | "2024-02-05T04:29:42Z" | 33 | 0 | [
"size_categories:1K<n<10K",
"format:parquet",
"modality:tabular",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-02-05T04:29:38Z" | ---
dataset_info:
config_name: alpaca_instructions-pythia-1.4b_alpaca_farm_instructions_sft_constant_pa-checkpoint-7500
features:
- name: instruction
dtype: string
- name: input
dtype: string
- name: output
dtype: string
- name: preference
dtype: int64
- name: output_1
dtype: string
- name: output_2
dtype: string
- name: reward_model_prompt_format
dtype: string
- name: gen_prompt_format
dtype: string
- name: gen_kwargs
struct:
- name: do_sample
dtype: bool
- name: max_new_tokens
dtype: int64
- name: pad_token_id
dtype: int64
- name: top_k
dtype: int64
- name: top_p
dtype: float64
- name: reward_1
dtype: float64
- name: reward_2
dtype: float64
splits:
- name: val
num_bytes: 2575959
num_examples: 2000
download_size: 1232867
dataset_size: 2575959
configs:
- config_name: alpaca_instructions-pythia-1.4b_alpaca_farm_instructions_sft_constant_pa-checkpoint-7500
data_files:
- split: val
path: alpaca_instructions-pythia-1.4b_alpaca_farm_instructions_sft_constant_pa-checkpoint-7500/val-*
---
# Dataset Card for "alpaca_farm-reward-model-deberta-v3-large-v2-re-eval-preference"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
jetaudio/name_hantu | jetaudio | "2024-02-05T05:55:53Z" | 33 | 0 | [
"size_categories:1M<n<10M",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-02-05T05:55:03Z" | ---
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
dataset_info:
features:
- name: scr
dtype: string
- name: trg
dtype: string
splits:
- name: train
num_bytes: 28583889.0
num_examples: 1021281
download_size: 16288768
dataset_size: 28583889.0
---
# Dataset Card for "name_hantu"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
kenilshah35/dictation-test | kenilshah35 | "2024-02-05T06:55:08Z" | 33 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:audio",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-02-05T06:54:19Z" | ---
dataset_info:
features:
- name: audio
dtype: audio
- name: text
dtype: string
splits:
- name: train
num_bytes: 8894736.0
num_examples: 19
download_size: 4493848
dataset_size: 8894736.0
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
open-llm-leaderboard-old/details_dddsaty__SOLAR_Merge_Adapter_DPO_Orca | open-llm-leaderboard-old | "2024-02-05T08:51:01Z" | 33 | 0 | [
"region:us"
] | null | "2024-02-05T08:50:36Z" | ---
pretty_name: Evaluation run of dddsaty/SOLAR_Merge_Adapter_DPO_Orca
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [dddsaty/SOLAR_Merge_Adapter_DPO_Orca](https://huggingface.co/dddsaty/SOLAR_Merge_Adapter_DPO_Orca)\
\ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\
\nThe dataset is composed of 63 configuration, each one coresponding to one of the\
\ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\
\ found as a specific split in each configuration, the split being named using the\
\ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\
\nAn additional configuration \"results\" store all the aggregated results of the\
\ run (and is used to compute and display the aggregated metrics on the [Open LLM\
\ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\
\nTo load the details from a run, you can for instance do the following:\n```python\n\
from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_dddsaty__SOLAR_Merge_Adapter_DPO_Orca\"\
,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\
These are the [latest results from run 2024-02-05T08:48:15.938281](https://huggingface.co/datasets/open-llm-leaderboard/details_dddsaty__SOLAR_Merge_Adapter_DPO_Orca/blob/main/results_2024-02-05T08-48-15.938281.json)(note\
\ that their might be results for other tasks in the repos if successive evals didn't\
\ cover the same tasks. You find each in the results and the \"latest\" split for\
\ each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6327439375185642,\n\
\ \"acc_stderr\": 0.032443250608216324,\n \"acc_norm\": 0.6355200172658205,\n\
\ \"acc_norm_stderr\": 0.03310341022715256,\n \"mc1\": 0.36107711138310894,\n\
\ \"mc1_stderr\": 0.016814312844836882,\n \"mc2\": 0.51488245253393,\n\
\ \"mc2_stderr\": 0.015188854393420268\n },\n \"harness|arc:challenge|25\"\
: {\n \"acc\": 0.6109215017064846,\n \"acc_stderr\": 0.014247309976045607,\n\
\ \"acc_norm\": 0.6390784982935154,\n \"acc_norm_stderr\": 0.014034761386175452\n\
\ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6499701254730134,\n\
\ \"acc_stderr\": 0.004760041843651493,\n \"acc_norm\": 0.8458474407488548,\n\
\ \"acc_norm_stderr\": 0.0036035695286784114\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\
: {\n \"acc\": 0.29,\n \"acc_stderr\": 0.045604802157206845,\n \
\ \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.045604802157206845\n \
\ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.562962962962963,\n\
\ \"acc_stderr\": 0.04284958639753401,\n \"acc_norm\": 0.562962962962963,\n\
\ \"acc_norm_stderr\": 0.04284958639753401\n },\n \"harness|hendrycksTest-astronomy|5\"\
: {\n \"acc\": 0.6907894736842105,\n \"acc_stderr\": 0.037610708698674805,\n\
\ \"acc_norm\": 0.6907894736842105,\n \"acc_norm_stderr\": 0.037610708698674805\n\
\ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.65,\n\
\ \"acc_stderr\": 0.0479372485441102,\n \"acc_norm\": 0.65,\n \
\ \"acc_norm_stderr\": 0.0479372485441102\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\
: {\n \"acc\": 0.6641509433962264,\n \"acc_stderr\": 0.02906722014664483,\n\
\ \"acc_norm\": 0.6641509433962264,\n \"acc_norm_stderr\": 0.02906722014664483\n\
\ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7222222222222222,\n\
\ \"acc_stderr\": 0.03745554791462456,\n \"acc_norm\": 0.7222222222222222,\n\
\ \"acc_norm_stderr\": 0.03745554791462456\n },\n \"harness|hendrycksTest-college_chemistry|5\"\
: {\n \"acc\": 0.42,\n \"acc_stderr\": 0.04960449637488584,\n \
\ \"acc_norm\": 0.42,\n \"acc_norm_stderr\": 0.04960449637488584\n \
\ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\"\
: 0.53,\n \"acc_stderr\": 0.050161355804659205,\n \"acc_norm\": 0.53,\n\
\ \"acc_norm_stderr\": 0.050161355804659205\n },\n \"harness|hendrycksTest-college_mathematics|5\"\
: {\n \"acc\": 0.33,\n \"acc_stderr\": 0.047258156262526045,\n \
\ \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.047258156262526045\n \
\ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6184971098265896,\n\
\ \"acc_stderr\": 0.03703851193099522,\n \"acc_norm\": 0.6184971098265896,\n\
\ \"acc_norm_stderr\": 0.03703851193099522\n },\n \"harness|hendrycksTest-college_physics|5\"\
: {\n \"acc\": 0.37254901960784315,\n \"acc_stderr\": 0.04810840148082635,\n\
\ \"acc_norm\": 0.37254901960784315,\n \"acc_norm_stderr\": 0.04810840148082635\n\
\ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\
\ 0.74,\n \"acc_stderr\": 0.04408440022768078,\n \"acc_norm\": 0.74,\n\
\ \"acc_norm_stderr\": 0.04408440022768078\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\
: {\n \"acc\": 0.5531914893617021,\n \"acc_stderr\": 0.0325005368436584,\n\
\ \"acc_norm\": 0.5531914893617021,\n \"acc_norm_stderr\": 0.0325005368436584\n\
\ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.49122807017543857,\n\
\ \"acc_stderr\": 0.04702880432049615,\n \"acc_norm\": 0.49122807017543857,\n\
\ \"acc_norm_stderr\": 0.04702880432049615\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\
: {\n \"acc\": 0.6,\n \"acc_stderr\": 0.04082482904638628,\n \
\ \"acc_norm\": 0.6,\n \"acc_norm_stderr\": 0.04082482904638628\n },\n\
\ \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.41005291005291006,\n\
\ \"acc_stderr\": 0.025331202438944423,\n \"acc_norm\": 0.41005291005291006,\n\
\ \"acc_norm_stderr\": 0.025331202438944423\n },\n \"harness|hendrycksTest-formal_logic|5\"\
: {\n \"acc\": 0.3888888888888889,\n \"acc_stderr\": 0.04360314860077459,\n\
\ \"acc_norm\": 0.3888888888888889,\n \"acc_norm_stderr\": 0.04360314860077459\n\
\ },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.36,\n\
\ \"acc_stderr\": 0.04824181513244218,\n \"acc_norm\": 0.36,\n \
\ \"acc_norm_stderr\": 0.04824181513244218\n },\n \"harness|hendrycksTest-high_school_biology|5\"\
: {\n \"acc\": 0.7322580645161291,\n \"acc_stderr\": 0.02518900666021238,\n\
\ \"acc_norm\": 0.7322580645161291,\n \"acc_norm_stderr\": 0.02518900666021238\n\
\ },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\"\
: 0.43349753694581283,\n \"acc_stderr\": 0.03486731727419872,\n \"\
acc_norm\": 0.43349753694581283,\n \"acc_norm_stderr\": 0.03486731727419872\n\
\ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \
\ \"acc\": 0.63,\n \"acc_stderr\": 0.04852365870939098,\n \"acc_norm\"\
: 0.63,\n \"acc_norm_stderr\": 0.04852365870939098\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\
: {\n \"acc\": 0.7757575757575758,\n \"acc_stderr\": 0.03256866661681102,\n\
\ \"acc_norm\": 0.7757575757575758,\n \"acc_norm_stderr\": 0.03256866661681102\n\
\ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\
: 0.8282828282828283,\n \"acc_stderr\": 0.0268697161874299,\n \"acc_norm\"\
: 0.8282828282828283,\n \"acc_norm_stderr\": 0.0268697161874299\n },\n\
\ \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \
\ \"acc\": 0.8756476683937824,\n \"acc_stderr\": 0.023814477086593566,\n\
\ \"acc_norm\": 0.8756476683937824,\n \"acc_norm_stderr\": 0.023814477086593566\n\
\ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \
\ \"acc\": 0.6076923076923076,\n \"acc_stderr\": 0.02475600038213095,\n \
\ \"acc_norm\": 0.6076923076923076,\n \"acc_norm_stderr\": 0.02475600038213095\n\
\ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\
acc\": 0.3888888888888889,\n \"acc_stderr\": 0.029723278961476664,\n \
\ \"acc_norm\": 0.3888888888888889,\n \"acc_norm_stderr\": 0.029723278961476664\n\
\ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \
\ \"acc\": 0.6638655462184874,\n \"acc_stderr\": 0.030684737115135353,\n\
\ \"acc_norm\": 0.6638655462184874,\n \"acc_norm_stderr\": 0.030684737115135353\n\
\ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\
: 0.39072847682119205,\n \"acc_stderr\": 0.03983798306659809,\n \"\
acc_norm\": 0.39072847682119205,\n \"acc_norm_stderr\": 0.03983798306659809\n\
\ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\
: 0.8165137614678899,\n \"acc_stderr\": 0.016595259710399293,\n \"\
acc_norm\": 0.8165137614678899,\n \"acc_norm_stderr\": 0.016595259710399293\n\
\ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\
: 0.5648148148148148,\n \"acc_stderr\": 0.033812000056435254,\n \"\
acc_norm\": 0.5648148148148148,\n \"acc_norm_stderr\": 0.033812000056435254\n\
\ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\
: 0.8725490196078431,\n \"acc_stderr\": 0.02340553048084631,\n \"\
acc_norm\": 0.8725490196078431,\n \"acc_norm_stderr\": 0.02340553048084631\n\
\ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\
acc\": 0.810126582278481,\n \"acc_stderr\": 0.02553010046023349,\n \
\ \"acc_norm\": 0.810126582278481,\n \"acc_norm_stderr\": 0.02553010046023349\n\
\ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.7085201793721974,\n\
\ \"acc_stderr\": 0.030500283176545847,\n \"acc_norm\": 0.7085201793721974,\n\
\ \"acc_norm_stderr\": 0.030500283176545847\n },\n \"harness|hendrycksTest-human_sexuality|5\"\
: {\n \"acc\": 0.7175572519083969,\n \"acc_stderr\": 0.03948406125768361,\n\
\ \"acc_norm\": 0.7175572519083969,\n \"acc_norm_stderr\": 0.03948406125768361\n\
\ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\
\ 0.7768595041322314,\n \"acc_stderr\": 0.03800754475228733,\n \"\
acc_norm\": 0.7768595041322314,\n \"acc_norm_stderr\": 0.03800754475228733\n\
\ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.75,\n\
\ \"acc_stderr\": 0.04186091791394607,\n \"acc_norm\": 0.75,\n \
\ \"acc_norm_stderr\": 0.04186091791394607\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\
: {\n \"acc\": 0.7423312883435583,\n \"acc_stderr\": 0.03436150827846917,\n\
\ \"acc_norm\": 0.7423312883435583,\n \"acc_norm_stderr\": 0.03436150827846917\n\
\ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4642857142857143,\n\
\ \"acc_stderr\": 0.04733667890053756,\n \"acc_norm\": 0.4642857142857143,\n\
\ \"acc_norm_stderr\": 0.04733667890053756\n },\n \"harness|hendrycksTest-management|5\"\
: {\n \"acc\": 0.7961165048543689,\n \"acc_stderr\": 0.039891398595317706,\n\
\ \"acc_norm\": 0.7961165048543689,\n \"acc_norm_stderr\": 0.039891398595317706\n\
\ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8760683760683761,\n\
\ \"acc_stderr\": 0.021586494001281372,\n \"acc_norm\": 0.8760683760683761,\n\
\ \"acc_norm_stderr\": 0.021586494001281372\n },\n \"harness|hendrycksTest-medical_genetics|5\"\
: {\n \"acc\": 0.69,\n \"acc_stderr\": 0.04648231987117316,\n \
\ \"acc_norm\": 0.69,\n \"acc_norm_stderr\": 0.04648231987117316\n \
\ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8199233716475096,\n\
\ \"acc_stderr\": 0.013740797258579828,\n \"acc_norm\": 0.8199233716475096,\n\
\ \"acc_norm_stderr\": 0.013740797258579828\n },\n \"harness|hendrycksTest-moral_disputes|5\"\
: {\n \"acc\": 0.684971098265896,\n \"acc_stderr\": 0.025009313790069716,\n\
\ \"acc_norm\": 0.684971098265896,\n \"acc_norm_stderr\": 0.025009313790069716\n\
\ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.35195530726256985,\n\
\ \"acc_stderr\": 0.01597266852368907,\n \"acc_norm\": 0.35195530726256985,\n\
\ \"acc_norm_stderr\": 0.01597266852368907\n },\n \"harness|hendrycksTest-nutrition|5\"\
: {\n \"acc\": 0.7124183006535948,\n \"acc_stderr\": 0.025917806117147158,\n\
\ \"acc_norm\": 0.7124183006535948,\n \"acc_norm_stderr\": 0.025917806117147158\n\
\ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7106109324758842,\n\
\ \"acc_stderr\": 0.025755865922632945,\n \"acc_norm\": 0.7106109324758842,\n\
\ \"acc_norm_stderr\": 0.025755865922632945\n },\n \"harness|hendrycksTest-prehistory|5\"\
: {\n \"acc\": 0.7345679012345679,\n \"acc_stderr\": 0.02456922360046085,\n\
\ \"acc_norm\": 0.7345679012345679,\n \"acc_norm_stderr\": 0.02456922360046085\n\
\ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\
acc\": 0.475177304964539,\n \"acc_stderr\": 0.029790719243829714,\n \
\ \"acc_norm\": 0.475177304964539,\n \"acc_norm_stderr\": 0.029790719243829714\n\
\ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4680573663624511,\n\
\ \"acc_stderr\": 0.012744149704869647,\n \"acc_norm\": 0.4680573663624511,\n\
\ \"acc_norm_stderr\": 0.012744149704869647\n },\n \"harness|hendrycksTest-professional_medicine|5\"\
: {\n \"acc\": 0.6323529411764706,\n \"acc_stderr\": 0.02928941340940319,\n\
\ \"acc_norm\": 0.6323529411764706,\n \"acc_norm_stderr\": 0.02928941340940319\n\
\ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\
acc\": 0.6437908496732027,\n \"acc_stderr\": 0.0193733324207245,\n \
\ \"acc_norm\": 0.6437908496732027,\n \"acc_norm_stderr\": 0.0193733324207245\n\
\ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6818181818181818,\n\
\ \"acc_stderr\": 0.044612721759105085,\n \"acc_norm\": 0.6818181818181818,\n\
\ \"acc_norm_stderr\": 0.044612721759105085\n },\n \"harness|hendrycksTest-security_studies|5\"\
: {\n \"acc\": 0.7428571428571429,\n \"acc_stderr\": 0.02797982353874455,\n\
\ \"acc_norm\": 0.7428571428571429,\n \"acc_norm_stderr\": 0.02797982353874455\n\
\ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.835820895522388,\n\
\ \"acc_stderr\": 0.026193923544454115,\n \"acc_norm\": 0.835820895522388,\n\
\ \"acc_norm_stderr\": 0.026193923544454115\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\
: {\n \"acc\": 0.86,\n \"acc_stderr\": 0.03487350880197769,\n \
\ \"acc_norm\": 0.86,\n \"acc_norm_stderr\": 0.03487350880197769\n \
\ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5542168674698795,\n\
\ \"acc_stderr\": 0.038695433234721015,\n \"acc_norm\": 0.5542168674698795,\n\
\ \"acc_norm_stderr\": 0.038695433234721015\n },\n \"harness|hendrycksTest-world_religions|5\"\
: {\n \"acc\": 0.7953216374269005,\n \"acc_stderr\": 0.03094445977853321,\n\
\ \"acc_norm\": 0.7953216374269005,\n \"acc_norm_stderr\": 0.03094445977853321\n\
\ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.36107711138310894,\n\
\ \"mc1_stderr\": 0.016814312844836882,\n \"mc2\": 0.51488245253393,\n\
\ \"mc2_stderr\": 0.015188854393420268\n },\n \"harness|winogrande|5\"\
: {\n \"acc\": 0.8200473559589582,\n \"acc_stderr\": 0.01079646868806868\n\
\ },\n \"harness|gsm8k|5\": {\n \"acc\": 0.5056861258529188,\n \
\ \"acc_stderr\": 0.013771594106283036\n }\n}\n```"
repo_url: https://huggingface.co/dddsaty/SOLAR_Merge_Adapter_DPO_Orca
leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard
point_of_contact: clementine@hf.co
configs:
- config_name: harness_arc_challenge_25
data_files:
- split: 2024_02_05T08_48_15.938281
path:
- '**/details_harness|arc:challenge|25_2024-02-05T08-48-15.938281.parquet'
- split: latest
path:
- '**/details_harness|arc:challenge|25_2024-02-05T08-48-15.938281.parquet'
- config_name: harness_gsm8k_5
data_files:
- split: 2024_02_05T08_48_15.938281
path:
- '**/details_harness|gsm8k|5_2024-02-05T08-48-15.938281.parquet'
- split: latest
path:
- '**/details_harness|gsm8k|5_2024-02-05T08-48-15.938281.parquet'
- config_name: harness_hellaswag_10
data_files:
- split: 2024_02_05T08_48_15.938281
path:
- '**/details_harness|hellaswag|10_2024-02-05T08-48-15.938281.parquet'
- split: latest
path:
- '**/details_harness|hellaswag|10_2024-02-05T08-48-15.938281.parquet'
- config_name: harness_hendrycksTest_5
data_files:
- split: 2024_02_05T08_48_15.938281
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-management|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-virology|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2024-02-05T08-48-15.938281.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-management|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-virology|5_2024-02-05T08-48-15.938281.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2024-02-05T08-48-15.938281.parquet'
- config_name: harness_hendrycksTest_abstract_algebra_5
data_files:
- split: 2024_02_05T08_48_15.938281
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-05T08-48-15.938281.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-05T08-48-15.938281.parquet'
- config_name: harness_hendrycksTest_anatomy_5
data_files:
- split: 2024_02_05T08_48_15.938281
path:
- '**/details_harness|hendrycksTest-anatomy|5_2024-02-05T08-48-15.938281.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-anatomy|5_2024-02-05T08-48-15.938281.parquet'
- config_name: harness_hendrycksTest_astronomy_5
data_files:
- split: 2024_02_05T08_48_15.938281
path:
- '**/details_harness|hendrycksTest-astronomy|5_2024-02-05T08-48-15.938281.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-astronomy|5_2024-02-05T08-48-15.938281.parquet'
- config_name: harness_hendrycksTest_business_ethics_5
data_files:
- split: 2024_02_05T08_48_15.938281
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2024-02-05T08-48-15.938281.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2024-02-05T08-48-15.938281.parquet'
- config_name: harness_hendrycksTest_clinical_knowledge_5
data_files:
- split: 2024_02_05T08_48_15.938281
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-05T08-48-15.938281.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-05T08-48-15.938281.parquet'
- config_name: harness_hendrycksTest_college_biology_5
data_files:
- split: 2024_02_05T08_48_15.938281
path:
- '**/details_harness|hendrycksTest-college_biology|5_2024-02-05T08-48-15.938281.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_biology|5_2024-02-05T08-48-15.938281.parquet'
- config_name: harness_hendrycksTest_college_chemistry_5
data_files:
- split: 2024_02_05T08_48_15.938281
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-02-05T08-48-15.938281.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-02-05T08-48-15.938281.parquet'
- config_name: harness_hendrycksTest_college_computer_science_5
data_files:
- split: 2024_02_05T08_48_15.938281
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-02-05T08-48-15.938281.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-02-05T08-48-15.938281.parquet'
- config_name: harness_hendrycksTest_college_mathematics_5
data_files:
- split: 2024_02_05T08_48_15.938281
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-02-05T08-48-15.938281.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-02-05T08-48-15.938281.parquet'
- config_name: harness_hendrycksTest_college_medicine_5
data_files:
- split: 2024_02_05T08_48_15.938281
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2024-02-05T08-48-15.938281.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2024-02-05T08-48-15.938281.parquet'
- config_name: harness_hendrycksTest_college_physics_5
data_files:
- split: 2024_02_05T08_48_15.938281
path:
- '**/details_harness|hendrycksTest-college_physics|5_2024-02-05T08-48-15.938281.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_physics|5_2024-02-05T08-48-15.938281.parquet'
- config_name: harness_hendrycksTest_computer_security_5
data_files:
- split: 2024_02_05T08_48_15.938281
path:
- '**/details_harness|hendrycksTest-computer_security|5_2024-02-05T08-48-15.938281.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-computer_security|5_2024-02-05T08-48-15.938281.parquet'
- config_name: harness_hendrycksTest_conceptual_physics_5
data_files:
- split: 2024_02_05T08_48_15.938281
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-05T08-48-15.938281.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-05T08-48-15.938281.parquet'
- config_name: harness_hendrycksTest_econometrics_5
data_files:
- split: 2024_02_05T08_48_15.938281
path:
- '**/details_harness|hendrycksTest-econometrics|5_2024-02-05T08-48-15.938281.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-econometrics|5_2024-02-05T08-48-15.938281.parquet'
- config_name: harness_hendrycksTest_electrical_engineering_5
data_files:
- split: 2024_02_05T08_48_15.938281
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-05T08-48-15.938281.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-05T08-48-15.938281.parquet'
- config_name: harness_hendrycksTest_elementary_mathematics_5
data_files:
- split: 2024_02_05T08_48_15.938281
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-05T08-48-15.938281.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-05T08-48-15.938281.parquet'
- config_name: harness_hendrycksTest_formal_logic_5
data_files:
- split: 2024_02_05T08_48_15.938281
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2024-02-05T08-48-15.938281.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2024-02-05T08-48-15.938281.parquet'
- config_name: harness_hendrycksTest_global_facts_5
data_files:
- split: 2024_02_05T08_48_15.938281
path:
- '**/details_harness|hendrycksTest-global_facts|5_2024-02-05T08-48-15.938281.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-global_facts|5_2024-02-05T08-48-15.938281.parquet'
- config_name: harness_hendrycksTest_high_school_biology_5
data_files:
- split: 2024_02_05T08_48_15.938281
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-02-05T08-48-15.938281.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-02-05T08-48-15.938281.parquet'
- config_name: harness_hendrycksTest_high_school_chemistry_5
data_files:
- split: 2024_02_05T08_48_15.938281
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-05T08-48-15.938281.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-05T08-48-15.938281.parquet'
- config_name: harness_hendrycksTest_high_school_computer_science_5
data_files:
- split: 2024_02_05T08_48_15.938281
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-05T08-48-15.938281.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-05T08-48-15.938281.parquet'
- config_name: harness_hendrycksTest_high_school_european_history_5
data_files:
- split: 2024_02_05T08_48_15.938281
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-05T08-48-15.938281.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-05T08-48-15.938281.parquet'
- config_name: harness_hendrycksTest_high_school_geography_5
data_files:
- split: 2024_02_05T08_48_15.938281
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-02-05T08-48-15.938281.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-02-05T08-48-15.938281.parquet'
- config_name: harness_hendrycksTest_high_school_government_and_politics_5
data_files:
- split: 2024_02_05T08_48_15.938281
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-05T08-48-15.938281.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-05T08-48-15.938281.parquet'
- config_name: harness_hendrycksTest_high_school_macroeconomics_5
data_files:
- split: 2024_02_05T08_48_15.938281
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-05T08-48-15.938281.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-05T08-48-15.938281.parquet'
- config_name: harness_hendrycksTest_high_school_mathematics_5
data_files:
- split: 2024_02_05T08_48_15.938281
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-05T08-48-15.938281.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-05T08-48-15.938281.parquet'
- config_name: harness_hendrycksTest_high_school_microeconomics_5
data_files:
- split: 2024_02_05T08_48_15.938281
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-05T08-48-15.938281.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-05T08-48-15.938281.parquet'
- config_name: harness_hendrycksTest_high_school_physics_5
data_files:
- split: 2024_02_05T08_48_15.938281
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-02-05T08-48-15.938281.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-02-05T08-48-15.938281.parquet'
- config_name: harness_hendrycksTest_high_school_psychology_5
data_files:
- split: 2024_02_05T08_48_15.938281
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-05T08-48-15.938281.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-05T08-48-15.938281.parquet'
- config_name: harness_hendrycksTest_high_school_statistics_5
data_files:
- split: 2024_02_05T08_48_15.938281
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-05T08-48-15.938281.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-05T08-48-15.938281.parquet'
- config_name: harness_hendrycksTest_high_school_us_history_5
data_files:
- split: 2024_02_05T08_48_15.938281
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-05T08-48-15.938281.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-05T08-48-15.938281.parquet'
- config_name: harness_hendrycksTest_high_school_world_history_5
data_files:
- split: 2024_02_05T08_48_15.938281
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-05T08-48-15.938281.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-05T08-48-15.938281.parquet'
- config_name: harness_hendrycksTest_human_aging_5
data_files:
- split: 2024_02_05T08_48_15.938281
path:
- '**/details_harness|hendrycksTest-human_aging|5_2024-02-05T08-48-15.938281.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_aging|5_2024-02-05T08-48-15.938281.parquet'
- config_name: harness_hendrycksTest_human_sexuality_5
data_files:
- split: 2024_02_05T08_48_15.938281
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-02-05T08-48-15.938281.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-02-05T08-48-15.938281.parquet'
- config_name: harness_hendrycksTest_international_law_5
data_files:
- split: 2024_02_05T08_48_15.938281
path:
- '**/details_harness|hendrycksTest-international_law|5_2024-02-05T08-48-15.938281.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-international_law|5_2024-02-05T08-48-15.938281.parquet'
- config_name: harness_hendrycksTest_jurisprudence_5
data_files:
- split: 2024_02_05T08_48_15.938281
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-02-05T08-48-15.938281.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-02-05T08-48-15.938281.parquet'
- config_name: harness_hendrycksTest_logical_fallacies_5
data_files:
- split: 2024_02_05T08_48_15.938281
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-05T08-48-15.938281.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-05T08-48-15.938281.parquet'
- config_name: harness_hendrycksTest_machine_learning_5
data_files:
- split: 2024_02_05T08_48_15.938281
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2024-02-05T08-48-15.938281.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2024-02-05T08-48-15.938281.parquet'
- config_name: harness_hendrycksTest_management_5
data_files:
- split: 2024_02_05T08_48_15.938281
path:
- '**/details_harness|hendrycksTest-management|5_2024-02-05T08-48-15.938281.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-management|5_2024-02-05T08-48-15.938281.parquet'
- config_name: harness_hendrycksTest_marketing_5
data_files:
- split: 2024_02_05T08_48_15.938281
path:
- '**/details_harness|hendrycksTest-marketing|5_2024-02-05T08-48-15.938281.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-marketing|5_2024-02-05T08-48-15.938281.parquet'
- config_name: harness_hendrycksTest_medical_genetics_5
data_files:
- split: 2024_02_05T08_48_15.938281
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-02-05T08-48-15.938281.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-02-05T08-48-15.938281.parquet'
- config_name: harness_hendrycksTest_miscellaneous_5
data_files:
- split: 2024_02_05T08_48_15.938281
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-02-05T08-48-15.938281.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-02-05T08-48-15.938281.parquet'
- config_name: harness_hendrycksTest_moral_disputes_5
data_files:
- split: 2024_02_05T08_48_15.938281
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-02-05T08-48-15.938281.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-02-05T08-48-15.938281.parquet'
- config_name: harness_hendrycksTest_moral_scenarios_5
data_files:
- split: 2024_02_05T08_48_15.938281
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-05T08-48-15.938281.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-05T08-48-15.938281.parquet'
- config_name: harness_hendrycksTest_nutrition_5
data_files:
- split: 2024_02_05T08_48_15.938281
path:
- '**/details_harness|hendrycksTest-nutrition|5_2024-02-05T08-48-15.938281.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-nutrition|5_2024-02-05T08-48-15.938281.parquet'
- config_name: harness_hendrycksTest_philosophy_5
data_files:
- split: 2024_02_05T08_48_15.938281
path:
- '**/details_harness|hendrycksTest-philosophy|5_2024-02-05T08-48-15.938281.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-philosophy|5_2024-02-05T08-48-15.938281.parquet'
- config_name: harness_hendrycksTest_prehistory_5
data_files:
- split: 2024_02_05T08_48_15.938281
path:
- '**/details_harness|hendrycksTest-prehistory|5_2024-02-05T08-48-15.938281.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-prehistory|5_2024-02-05T08-48-15.938281.parquet'
- config_name: harness_hendrycksTest_professional_accounting_5
data_files:
- split: 2024_02_05T08_48_15.938281
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-02-05T08-48-15.938281.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-02-05T08-48-15.938281.parquet'
- config_name: harness_hendrycksTest_professional_law_5
data_files:
- split: 2024_02_05T08_48_15.938281
path:
- '**/details_harness|hendrycksTest-professional_law|5_2024-02-05T08-48-15.938281.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_law|5_2024-02-05T08-48-15.938281.parquet'
- config_name: harness_hendrycksTest_professional_medicine_5
data_files:
- split: 2024_02_05T08_48_15.938281
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-02-05T08-48-15.938281.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-02-05T08-48-15.938281.parquet'
- config_name: harness_hendrycksTest_professional_psychology_5
data_files:
- split: 2024_02_05T08_48_15.938281
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-02-05T08-48-15.938281.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-02-05T08-48-15.938281.parquet'
- config_name: harness_hendrycksTest_public_relations_5
data_files:
- split: 2024_02_05T08_48_15.938281
path:
- '**/details_harness|hendrycksTest-public_relations|5_2024-02-05T08-48-15.938281.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-public_relations|5_2024-02-05T08-48-15.938281.parquet'
- config_name: harness_hendrycksTest_security_studies_5
data_files:
- split: 2024_02_05T08_48_15.938281
path:
- '**/details_harness|hendrycksTest-security_studies|5_2024-02-05T08-48-15.938281.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-security_studies|5_2024-02-05T08-48-15.938281.parquet'
- config_name: harness_hendrycksTest_sociology_5
data_files:
- split: 2024_02_05T08_48_15.938281
path:
- '**/details_harness|hendrycksTest-sociology|5_2024-02-05T08-48-15.938281.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-sociology|5_2024-02-05T08-48-15.938281.parquet'
- config_name: harness_hendrycksTest_us_foreign_policy_5
data_files:
- split: 2024_02_05T08_48_15.938281
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-05T08-48-15.938281.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-05T08-48-15.938281.parquet'
- config_name: harness_hendrycksTest_virology_5
data_files:
- split: 2024_02_05T08_48_15.938281
path:
- '**/details_harness|hendrycksTest-virology|5_2024-02-05T08-48-15.938281.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-virology|5_2024-02-05T08-48-15.938281.parquet'
- config_name: harness_hendrycksTest_world_religions_5
data_files:
- split: 2024_02_05T08_48_15.938281
path:
- '**/details_harness|hendrycksTest-world_religions|5_2024-02-05T08-48-15.938281.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-world_religions|5_2024-02-05T08-48-15.938281.parquet'
- config_name: harness_truthfulqa_mc_0
data_files:
- split: 2024_02_05T08_48_15.938281
path:
- '**/details_harness|truthfulqa:mc|0_2024-02-05T08-48-15.938281.parquet'
- split: latest
path:
- '**/details_harness|truthfulqa:mc|0_2024-02-05T08-48-15.938281.parquet'
- config_name: harness_winogrande_5
data_files:
- split: 2024_02_05T08_48_15.938281
path:
- '**/details_harness|winogrande|5_2024-02-05T08-48-15.938281.parquet'
- split: latest
path:
- '**/details_harness|winogrande|5_2024-02-05T08-48-15.938281.parquet'
- config_name: results
data_files:
- split: 2024_02_05T08_48_15.938281
path:
- results_2024-02-05T08-48-15.938281.parquet
- split: latest
path:
- results_2024-02-05T08-48-15.938281.parquet
---
# Dataset Card for Evaluation run of dddsaty/SOLAR_Merge_Adapter_DPO_Orca
<!-- Provide a quick summary of the dataset. -->
Dataset automatically created during the evaluation run of model [dddsaty/SOLAR_Merge_Adapter_DPO_Orca](https://huggingface.co/dddsaty/SOLAR_Merge_Adapter_DPO_Orca) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).
The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset("open-llm-leaderboard/details_dddsaty__SOLAR_Merge_Adapter_DPO_Orca",
"harness_winogrande_5",
split="train")
```
## Latest results
These are the [latest results from run 2024-02-05T08:48:15.938281](https://huggingface.co/datasets/open-llm-leaderboard/details_dddsaty__SOLAR_Merge_Adapter_DPO_Orca/blob/main/results_2024-02-05T08-48-15.938281.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"acc": 0.6327439375185642,
"acc_stderr": 0.032443250608216324,
"acc_norm": 0.6355200172658205,
"acc_norm_stderr": 0.03310341022715256,
"mc1": 0.36107711138310894,
"mc1_stderr": 0.016814312844836882,
"mc2": 0.51488245253393,
"mc2_stderr": 0.015188854393420268
},
"harness|arc:challenge|25": {
"acc": 0.6109215017064846,
"acc_stderr": 0.014247309976045607,
"acc_norm": 0.6390784982935154,
"acc_norm_stderr": 0.014034761386175452
},
"harness|hellaswag|10": {
"acc": 0.6499701254730134,
"acc_stderr": 0.004760041843651493,
"acc_norm": 0.8458474407488548,
"acc_norm_stderr": 0.0036035695286784114
},
"harness|hendrycksTest-abstract_algebra|5": {
"acc": 0.29,
"acc_stderr": 0.045604802157206845,
"acc_norm": 0.29,
"acc_norm_stderr": 0.045604802157206845
},
"harness|hendrycksTest-anatomy|5": {
"acc": 0.562962962962963,
"acc_stderr": 0.04284958639753401,
"acc_norm": 0.562962962962963,
"acc_norm_stderr": 0.04284958639753401
},
"harness|hendrycksTest-astronomy|5": {
"acc": 0.6907894736842105,
"acc_stderr": 0.037610708698674805,
"acc_norm": 0.6907894736842105,
"acc_norm_stderr": 0.037610708698674805
},
"harness|hendrycksTest-business_ethics|5": {
"acc": 0.65,
"acc_stderr": 0.0479372485441102,
"acc_norm": 0.65,
"acc_norm_stderr": 0.0479372485441102
},
"harness|hendrycksTest-clinical_knowledge|5": {
"acc": 0.6641509433962264,
"acc_stderr": 0.02906722014664483,
"acc_norm": 0.6641509433962264,
"acc_norm_stderr": 0.02906722014664483
},
"harness|hendrycksTest-college_biology|5": {
"acc": 0.7222222222222222,
"acc_stderr": 0.03745554791462456,
"acc_norm": 0.7222222222222222,
"acc_norm_stderr": 0.03745554791462456
},
"harness|hendrycksTest-college_chemistry|5": {
"acc": 0.42,
"acc_stderr": 0.04960449637488584,
"acc_norm": 0.42,
"acc_norm_stderr": 0.04960449637488584
},
"harness|hendrycksTest-college_computer_science|5": {
"acc": 0.53,
"acc_stderr": 0.050161355804659205,
"acc_norm": 0.53,
"acc_norm_stderr": 0.050161355804659205
},
"harness|hendrycksTest-college_mathematics|5": {
"acc": 0.33,
"acc_stderr": 0.047258156262526045,
"acc_norm": 0.33,
"acc_norm_stderr": 0.047258156262526045
},
"harness|hendrycksTest-college_medicine|5": {
"acc": 0.6184971098265896,
"acc_stderr": 0.03703851193099522,
"acc_norm": 0.6184971098265896,
"acc_norm_stderr": 0.03703851193099522
},
"harness|hendrycksTest-college_physics|5": {
"acc": 0.37254901960784315,
"acc_stderr": 0.04810840148082635,
"acc_norm": 0.37254901960784315,
"acc_norm_stderr": 0.04810840148082635
},
"harness|hendrycksTest-computer_security|5": {
"acc": 0.74,
"acc_stderr": 0.04408440022768078,
"acc_norm": 0.74,
"acc_norm_stderr": 0.04408440022768078
},
"harness|hendrycksTest-conceptual_physics|5": {
"acc": 0.5531914893617021,
"acc_stderr": 0.0325005368436584,
"acc_norm": 0.5531914893617021,
"acc_norm_stderr": 0.0325005368436584
},
"harness|hendrycksTest-econometrics|5": {
"acc": 0.49122807017543857,
"acc_stderr": 0.04702880432049615,
"acc_norm": 0.49122807017543857,
"acc_norm_stderr": 0.04702880432049615
},
"harness|hendrycksTest-electrical_engineering|5": {
"acc": 0.6,
"acc_stderr": 0.04082482904638628,
"acc_norm": 0.6,
"acc_norm_stderr": 0.04082482904638628
},
"harness|hendrycksTest-elementary_mathematics|5": {
"acc": 0.41005291005291006,
"acc_stderr": 0.025331202438944423,
"acc_norm": 0.41005291005291006,
"acc_norm_stderr": 0.025331202438944423
},
"harness|hendrycksTest-formal_logic|5": {
"acc": 0.3888888888888889,
"acc_stderr": 0.04360314860077459,
"acc_norm": 0.3888888888888889,
"acc_norm_stderr": 0.04360314860077459
},
"harness|hendrycksTest-global_facts|5": {
"acc": 0.36,
"acc_stderr": 0.04824181513244218,
"acc_norm": 0.36,
"acc_norm_stderr": 0.04824181513244218
},
"harness|hendrycksTest-high_school_biology|5": {
"acc": 0.7322580645161291,
"acc_stderr": 0.02518900666021238,
"acc_norm": 0.7322580645161291,
"acc_norm_stderr": 0.02518900666021238
},
"harness|hendrycksTest-high_school_chemistry|5": {
"acc": 0.43349753694581283,
"acc_stderr": 0.03486731727419872,
"acc_norm": 0.43349753694581283,
"acc_norm_stderr": 0.03486731727419872
},
"harness|hendrycksTest-high_school_computer_science|5": {
"acc": 0.63,
"acc_stderr": 0.04852365870939098,
"acc_norm": 0.63,
"acc_norm_stderr": 0.04852365870939098
},
"harness|hendrycksTest-high_school_european_history|5": {
"acc": 0.7757575757575758,
"acc_stderr": 0.03256866661681102,
"acc_norm": 0.7757575757575758,
"acc_norm_stderr": 0.03256866661681102
},
"harness|hendrycksTest-high_school_geography|5": {
"acc": 0.8282828282828283,
"acc_stderr": 0.0268697161874299,
"acc_norm": 0.8282828282828283,
"acc_norm_stderr": 0.0268697161874299
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"acc": 0.8756476683937824,
"acc_stderr": 0.023814477086593566,
"acc_norm": 0.8756476683937824,
"acc_norm_stderr": 0.023814477086593566
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"acc": 0.6076923076923076,
"acc_stderr": 0.02475600038213095,
"acc_norm": 0.6076923076923076,
"acc_norm_stderr": 0.02475600038213095
},
"harness|hendrycksTest-high_school_mathematics|5": {
"acc": 0.3888888888888889,
"acc_stderr": 0.029723278961476664,
"acc_norm": 0.3888888888888889,
"acc_norm_stderr": 0.029723278961476664
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"acc": 0.6638655462184874,
"acc_stderr": 0.030684737115135353,
"acc_norm": 0.6638655462184874,
"acc_norm_stderr": 0.030684737115135353
},
"harness|hendrycksTest-high_school_physics|5": {
"acc": 0.39072847682119205,
"acc_stderr": 0.03983798306659809,
"acc_norm": 0.39072847682119205,
"acc_norm_stderr": 0.03983798306659809
},
"harness|hendrycksTest-high_school_psychology|5": {
"acc": 0.8165137614678899,
"acc_stderr": 0.016595259710399293,
"acc_norm": 0.8165137614678899,
"acc_norm_stderr": 0.016595259710399293
},
"harness|hendrycksTest-high_school_statistics|5": {
"acc": 0.5648148148148148,
"acc_stderr": 0.033812000056435254,
"acc_norm": 0.5648148148148148,
"acc_norm_stderr": 0.033812000056435254
},
"harness|hendrycksTest-high_school_us_history|5": {
"acc": 0.8725490196078431,
"acc_stderr": 0.02340553048084631,
"acc_norm": 0.8725490196078431,
"acc_norm_stderr": 0.02340553048084631
},
"harness|hendrycksTest-high_school_world_history|5": {
"acc": 0.810126582278481,
"acc_stderr": 0.02553010046023349,
"acc_norm": 0.810126582278481,
"acc_norm_stderr": 0.02553010046023349
},
"harness|hendrycksTest-human_aging|5": {
"acc": 0.7085201793721974,
"acc_stderr": 0.030500283176545847,
"acc_norm": 0.7085201793721974,
"acc_norm_stderr": 0.030500283176545847
},
"harness|hendrycksTest-human_sexuality|5": {
"acc": 0.7175572519083969,
"acc_stderr": 0.03948406125768361,
"acc_norm": 0.7175572519083969,
"acc_norm_stderr": 0.03948406125768361
},
"harness|hendrycksTest-international_law|5": {
"acc": 0.7768595041322314,
"acc_stderr": 0.03800754475228733,
"acc_norm": 0.7768595041322314,
"acc_norm_stderr": 0.03800754475228733
},
"harness|hendrycksTest-jurisprudence|5": {
"acc": 0.75,
"acc_stderr": 0.04186091791394607,
"acc_norm": 0.75,
"acc_norm_stderr": 0.04186091791394607
},
"harness|hendrycksTest-logical_fallacies|5": {
"acc": 0.7423312883435583,
"acc_stderr": 0.03436150827846917,
"acc_norm": 0.7423312883435583,
"acc_norm_stderr": 0.03436150827846917
},
"harness|hendrycksTest-machine_learning|5": {
"acc": 0.4642857142857143,
"acc_stderr": 0.04733667890053756,
"acc_norm": 0.4642857142857143,
"acc_norm_stderr": 0.04733667890053756
},
"harness|hendrycksTest-management|5": {
"acc": 0.7961165048543689,
"acc_stderr": 0.039891398595317706,
"acc_norm": 0.7961165048543689,
"acc_norm_stderr": 0.039891398595317706
},
"harness|hendrycksTest-marketing|5": {
"acc": 0.8760683760683761,
"acc_stderr": 0.021586494001281372,
"acc_norm": 0.8760683760683761,
"acc_norm_stderr": 0.021586494001281372
},
"harness|hendrycksTest-medical_genetics|5": {
"acc": 0.69,
"acc_stderr": 0.04648231987117316,
"acc_norm": 0.69,
"acc_norm_stderr": 0.04648231987117316
},
"harness|hendrycksTest-miscellaneous|5": {
"acc": 0.8199233716475096,
"acc_stderr": 0.013740797258579828,
"acc_norm": 0.8199233716475096,
"acc_norm_stderr": 0.013740797258579828
},
"harness|hendrycksTest-moral_disputes|5": {
"acc": 0.684971098265896,
"acc_stderr": 0.025009313790069716,
"acc_norm": 0.684971098265896,
"acc_norm_stderr": 0.025009313790069716
},
"harness|hendrycksTest-moral_scenarios|5": {
"acc": 0.35195530726256985,
"acc_stderr": 0.01597266852368907,
"acc_norm": 0.35195530726256985,
"acc_norm_stderr": 0.01597266852368907
},
"harness|hendrycksTest-nutrition|5": {
"acc": 0.7124183006535948,
"acc_stderr": 0.025917806117147158,
"acc_norm": 0.7124183006535948,
"acc_norm_stderr": 0.025917806117147158
},
"harness|hendrycksTest-philosophy|5": {
"acc": 0.7106109324758842,
"acc_stderr": 0.025755865922632945,
"acc_norm": 0.7106109324758842,
"acc_norm_stderr": 0.025755865922632945
},
"harness|hendrycksTest-prehistory|5": {
"acc": 0.7345679012345679,
"acc_stderr": 0.02456922360046085,
"acc_norm": 0.7345679012345679,
"acc_norm_stderr": 0.02456922360046085
},
"harness|hendrycksTest-professional_accounting|5": {
"acc": 0.475177304964539,
"acc_stderr": 0.029790719243829714,
"acc_norm": 0.475177304964539,
"acc_norm_stderr": 0.029790719243829714
},
"harness|hendrycksTest-professional_law|5": {
"acc": 0.4680573663624511,
"acc_stderr": 0.012744149704869647,
"acc_norm": 0.4680573663624511,
"acc_norm_stderr": 0.012744149704869647
},
"harness|hendrycksTest-professional_medicine|5": {
"acc": 0.6323529411764706,
"acc_stderr": 0.02928941340940319,
"acc_norm": 0.6323529411764706,
"acc_norm_stderr": 0.02928941340940319
},
"harness|hendrycksTest-professional_psychology|5": {
"acc": 0.6437908496732027,
"acc_stderr": 0.0193733324207245,
"acc_norm": 0.6437908496732027,
"acc_norm_stderr": 0.0193733324207245
},
"harness|hendrycksTest-public_relations|5": {
"acc": 0.6818181818181818,
"acc_stderr": 0.044612721759105085,
"acc_norm": 0.6818181818181818,
"acc_norm_stderr": 0.044612721759105085
},
"harness|hendrycksTest-security_studies|5": {
"acc": 0.7428571428571429,
"acc_stderr": 0.02797982353874455,
"acc_norm": 0.7428571428571429,
"acc_norm_stderr": 0.02797982353874455
},
"harness|hendrycksTest-sociology|5": {
"acc": 0.835820895522388,
"acc_stderr": 0.026193923544454115,
"acc_norm": 0.835820895522388,
"acc_norm_stderr": 0.026193923544454115
},
"harness|hendrycksTest-us_foreign_policy|5": {
"acc": 0.86,
"acc_stderr": 0.03487350880197769,
"acc_norm": 0.86,
"acc_norm_stderr": 0.03487350880197769
},
"harness|hendrycksTest-virology|5": {
"acc": 0.5542168674698795,
"acc_stderr": 0.038695433234721015,
"acc_norm": 0.5542168674698795,
"acc_norm_stderr": 0.038695433234721015
},
"harness|hendrycksTest-world_religions|5": {
"acc": 0.7953216374269005,
"acc_stderr": 0.03094445977853321,
"acc_norm": 0.7953216374269005,
"acc_norm_stderr": 0.03094445977853321
},
"harness|truthfulqa:mc|0": {
"mc1": 0.36107711138310894,
"mc1_stderr": 0.016814312844836882,
"mc2": 0.51488245253393,
"mc2_stderr": 0.015188854393420268
},
"harness|winogrande|5": {
"acc": 0.8200473559589582,
"acc_stderr": 0.01079646868806868
},
"harness|gsm8k|5": {
"acc": 0.5056861258529188,
"acc_stderr": 0.013771594106283036
}
}
```
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] |
plaguss/test-dataset | plaguss | "2024-02-05T09:27:12Z" | 33 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-02-05T09:21:00Z" | ---
dataset_info:
features:
- name: input
dtype: string
- name: generation_model
sequence: string
- name: generation_prompt
list:
list:
- name: content
dtype: string
- name: role
dtype: string
- name: raw_generation_responses
sequence: string
- name: generations
sequence: string
splits:
- name: train
num_bytes: 4270
num_examples: 2
download_size: 16358
dataset_size: 4270
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
Atipico1/NQ-colbert | Atipico1 | "2024-02-05T09:55:32Z" | 33 | 0 | [
"size_categories:10K<n<100K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-02-05T09:55:13Z" | ---
dataset_info:
features:
- name: question
dtype: string
- name: answers
sequence: string
- name: ctxs
list:
- name: hasanswer
dtype: bool
- name: score
dtype: float64
- name: text
dtype: string
- name: title
dtype: string
splits:
- name: train
num_bytes: 290367640
num_examples: 87925
- name: test
num_bytes: 12000594
num_examples: 3610
download_size: 176894459
dataset_size: 302368234
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
- split: test
path: data/test-*
---
|
chathuranga-jayanath/selfapr-manipulation-bug-context-10000 | chathuranga-jayanath | "2024-02-05T11:26:58Z" | 33 | 0 | [
"size_categories:10K<n<100K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-02-05T10:43:31Z" | ---
dataset_info:
features:
- name: fix
dtype: string
- name: ctx
dtype: string
splits:
- name: train
num_bytes: 4622003
num_examples: 8000
- name: validation
num_bytes: 563762
num_examples: 1000
- name: test
num_bytes: 563472
num_examples: 1000
download_size: 2669319
dataset_size: 5749237
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
- split: validation
path: data/validation-*
- split: test
path: data/test-*
---
|
Arman123/nalog_kz | Arman123 | "2024-02-07T21:13:00Z" | 33 | 0 | [
"task_categories:text-generation",
"language:ru",
"size_categories:1K<n<10K",
"region:us"
] | [
"text-generation"
] | "2024-02-05T10:47:08Z" | ---
task_categories:
- text-generation
language:
- ru
size_categories:
- 1K<n<10K
--- |
evilback/lama | evilback | "2024-02-05T10:59:37Z" | 33 | 0 | [
"license:llama2",
"region:us"
] | null | "2024-02-05T10:59:37Z" | ---
license: llama2
---
|
chathuranga-jayanath/selfapr-manipulation-bug-context-method-10000 | chathuranga-jayanath | "2024-02-05T11:25:52Z" | 33 | 0 | [
"size_categories:10K<n<100K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-02-05T11:04:52Z" | ---
dataset_info:
features:
- name: fix
dtype: string
- name: ctx
dtype: string
splits:
- name: train
num_bytes: 4798650
num_examples: 8000
- name: validation
num_bytes: 585787
num_examples: 1000
- name: test
num_bytes: 585244
num_examples: 1000
download_size: 2746436
dataset_size: 5969681
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
- split: validation
path: data/validation-*
- split: test
path: data/test-*
---
|
ibm/Wish-Summarization-Llama | ibm | "2024-02-05T11:06:05Z" | 33 | 0 | [
"size_categories:10K<n<100K",
"format:parquet",
"modality:tabular",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-02-05T11:05:50Z" | ---
dataset_info:
features:
- name: idx
dtype: int64
- name: text
dtype: string
- name: highlights
dtype: string
- name: id
dtype: string
- name: qa
dtype: string
- name: question
dtype: string
- name: answer
dtype: string
- name: doc_score
dtype: float64
- name: score_qa
dtype: float64
- name: ans_num_words
dtype: int64
- name: text_num_words
dtype: int64
- name: text_longer_0.5
dtype: int64
splits:
- name: train
num_bytes: 37134142
num_examples: 10000
download_size: 22985746
dataset_size: 37134142
---
# Dataset Card for "Wish-Summarization-Llama"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
kristmh/high_vs_random_min_length_500 | kristmh | "2024-02-05T11:16:28Z" | 33 | 0 | [
"size_categories:100K<n<1M",
"format:parquet",
"modality:tabular",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-02-05T11:15:42Z" | ---
configs:
- config_name: default
data_files:
- split: test
path: data/test-*
- split: train
path: data/train-*
- split: validate
path: data/validate-*
dataset_info:
features:
- name: text_clean
dtype: string
- name: label
dtype: int64
- name: __index_level_0__
dtype: int64
splits:
- name: test
num_bytes: 26129807
num_examples: 15895
- name: train
num_bytes: 212631694
num_examples: 127154
- name: validate
num_bytes: 25999306
num_examples: 15894
download_size: 123342839
dataset_size: 264760807
---
# Dataset Card for "high_vs_random_min_length_500"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
chathuranga-jayanath/selfapr-manipulation-bug-error-context-method-10000 | chathuranga-jayanath | "2024-02-05T11:32:24Z" | 33 | 0 | [
"size_categories:10K<n<100K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-02-05T11:32:16Z" | ---
dataset_info:
features:
- name: fix
dtype: string
- name: ctx
dtype: string
splits:
- name: train
num_bytes: 5017924
num_examples: 8000
- name: validation
num_bytes: 614517
num_examples: 1000
- name: test
num_bytes: 608165
num_examples: 1000
download_size: 2850672
dataset_size: 6240606
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
- split: validation
path: data/validation-*
- split: test
path: data/test-*
---
|
Sai-Manisha/Events-after-feb-2023 | Sai-Manisha | "2024-02-06T09:40:03Z" | 33 | 0 | [
"license:mit",
"size_categories:n<1K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-02-05T11:45:35Z" | ---
license: mit
dataset_info:
features:
- name: Date_of_event
dtype: string
- name: Event
dtype: string
splits:
- name: train
num_bytes: 10679
num_examples: 60
- name: validation
num_bytes: 7242
num_examples: 31
download_size: 16558
dataset_size: 17921
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
- split: validation
path: data/validation-*
---
|
DjSteker/el_quijote | DjSteker | "2024-02-05T13:52:50Z" | 33 | 0 | [
"size_categories:10K<n<100K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-02-05T12:31:29Z" | ---
dataset_info:
features:
- name: instruction
dtype: string
splits:
- name: train
num_bytes: 8741405
num_examples: 18119
download_size: 3868290
dataset_size: 8741405
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
DragosGorduza/dataset_QUERY_FAQ_MISTRAL_TRAIN | DragosGorduza | "2024-02-05T13:55:23Z" | 33 | 0 | [
"size_categories:10K<n<100K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-02-05T13:55:11Z" | ---
dataset_info:
features:
- name: positive_id
dtype: string
- name: query_id
dtype: string
- name: positive_content
dtype: string
- name: query_content
dtype: string
- name: positive_name
dtype: string
- name: query_name
dtype: string
- name: query_type
dtype: string
- name: instruction
dtype: string
- name: output
dtype:
class_label:
names:
'0': 'NO'
'1': 'YES'
- name: text
dtype: string
splits:
- name: train
num_bytes: 115217882.46257988
num_examples: 50582
download_size: 49534813
dataset_size: 115217882.46257988
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
yiyic/clirmatrix | yiyic | "2024-02-05T14:19:16Z" | 33 | 0 | [
"size_categories:10K<n<100K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-02-05T14:19:02Z" | ---
dataset_info:
features:
- name: text
dtype: string
- name: rate
dtype: int64
- name: __index_level_0__
dtype: string
splits:
- name: de_en_multi8_test1
num_bytes: 1334400
num_examples: 1000
- name: de_fr_multi8_test1
num_bytes: 1336714
num_examples: 1000
- name: de_es_multi8_test1
num_bytes: 1336408
num_examples: 1000
- name: en_de_multi8_test1
num_bytes: 1146916
num_examples: 1000
- name: en_fr_multi8_test1
num_bytes: 1148710
num_examples: 1000
- name: en_es_multi8_test1
num_bytes: 1148404
num_examples: 1000
- name: es_en_multi8_test1
num_bytes: 1119660
num_examples: 1000
- name: es_fr_multi8_test1
num_bytes: 1121974
num_examples: 1000
- name: es_de_multi8_test1
num_bytes: 1120180
num_examples: 1000
- name: fr_en_multi8_test1
num_bytes: 1161002
num_examples: 1000
- name: fr_de_multi8_test1
num_bytes: 1161522
num_examples: 1000
- name: fr_es_multi8_test1
num_bytes: 1163010
num_examples: 1000
download_size: 8823803
dataset_size: 14298900
configs:
- config_name: default
data_files:
- split: de_en_multi8_test1
path: data/de_en_multi8_test1-*
- split: de_fr_multi8_test1
path: data/de_fr_multi8_test1-*
- split: de_es_multi8_test1
path: data/de_es_multi8_test1-*
- split: en_de_multi8_test1
path: data/en_de_multi8_test1-*
- split: en_fr_multi8_test1
path: data/en_fr_multi8_test1-*
- split: en_es_multi8_test1
path: data/en_es_multi8_test1-*
- split: es_en_multi8_test1
path: data/es_en_multi8_test1-*
- split: es_fr_multi8_test1
path: data/es_fr_multi8_test1-*
- split: es_de_multi8_test1
path: data/es_de_multi8_test1-*
- split: fr_en_multi8_test1
path: data/fr_en_multi8_test1-*
- split: fr_de_multi8_test1
path: data/fr_de_multi8_test1-*
- split: fr_es_multi8_test1
path: data/fr_es_multi8_test1-*
---
|
yiyic/eval_clir_mtg | yiyic | "2024-02-05T14:52:14Z" | 33 | 0 | [
"size_categories:10K<n<100K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-02-05T14:51:55Z" | ---
dataset_info:
features:
- name: text
dtype: string
splits:
- name: mtg_en
num_bytes: 48454
num_examples: 500
- name: mtg_de
num_bytes: 57186
num_examples: 500
- name: mtg_es
num_bytes: 51263
num_examples: 500
- name: mtg_fr
num_bytes: 59140
num_examples: 500
- name: nq_en
num_bytes: 306325
num_examples: 500
- name: en_rt
num_bytes: 49483
num_examples: 500
- name: de_en_multi8_test1
num_bytes: 1308045
num_examples: 1000
- name: de_fr_multi8_test1
num_bytes: 1310347
num_examples: 1000
- name: de_es_multi8_test1
num_bytes: 1310100
num_examples: 1000
- name: en_de_multi8_test1
num_bytes: 1120561
num_examples: 1000
- name: en_fr_multi8_test1
num_bytes: 1122148
num_examples: 1000
- name: en_es_multi8_test1
num_bytes: 1121901
num_examples: 1000
- name: es_en_multi8_test1
num_bytes: 1093157
num_examples: 1000
- name: es_fr_multi8_test1
num_bytes: 1095459
num_examples: 1000
- name: es_de_multi8_test1
num_bytes: 1093872
num_examples: 1000
- name: fr_en_multi8_test1
num_bytes: 1134440
num_examples: 1000
- name: fr_de_multi8_test1
num_bytes: 1135155
num_examples: 1000
- name: fr_es_multi8_test1
num_bytes: 1136495
num_examples: 1000
download_size: 9037857
dataset_size: 14553531
configs:
- config_name: default
data_files:
- split: mtg_en
path: data/mtg_en-*
- split: mtg_de
path: data/mtg_de-*
- split: mtg_es
path: data/mtg_es-*
- split: mtg_fr
path: data/mtg_fr-*
- split: nq_en
path: data/nq_en-*
- split: en_rt
path: data/en_rt-*
- split: de_en_multi8_test1
path: data/de_en_multi8_test1-*
- split: de_fr_multi8_test1
path: data/de_fr_multi8_test1-*
- split: de_es_multi8_test1
path: data/de_es_multi8_test1-*
- split: en_de_multi8_test1
path: data/en_de_multi8_test1-*
- split: en_fr_multi8_test1
path: data/en_fr_multi8_test1-*
- split: en_es_multi8_test1
path: data/en_es_multi8_test1-*
- split: es_en_multi8_test1
path: data/es_en_multi8_test1-*
- split: es_fr_multi8_test1
path: data/es_fr_multi8_test1-*
- split: es_de_multi8_test1
path: data/es_de_multi8_test1-*
- split: fr_en_multi8_test1
path: data/fr_en_multi8_test1-*
- split: fr_de_multi8_test1
path: data/fr_de_multi8_test1-*
- split: fr_es_multi8_test1
path: data/fr_es_multi8_test1-*
---
|
tyzhu/lmind_nq_train10000_eval6489_v1_qa | tyzhu | "2024-02-05T14:57:26Z" | 33 | 0 | [
"size_categories:10K<n<100K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-02-05T14:56:45Z" | ---
configs:
- config_name: default
data_files:
- split: train_qa
path: data/train_qa-*
- split: train_recite_qa
path: data/train_recite_qa-*
- split: eval_qa
path: data/eval_qa-*
- split: eval_recite_qa
path: data/eval_recite_qa-*
- split: all_docs
path: data/all_docs-*
- split: all_docs_eval
path: data/all_docs_eval-*
- split: train
path: data/train-*
- split: validation
path: data/validation-*
dataset_info:
features:
- name: answers
struct:
- name: answer_start
sequence: 'null'
- name: text
sequence: string
- name: inputs
dtype: string
- name: targets
dtype: string
splits:
- name: train_qa
num_bytes: 1159729
num_examples: 10000
- name: train_recite_qa
num_bytes: 7573876
num_examples: 10000
- name: eval_qa
num_bytes: 752802
num_examples: 6489
- name: eval_recite_qa
num_bytes: 4912675
num_examples: 6489
- name: all_docs
num_bytes: 9144930
num_examples: 14014
- name: all_docs_eval
num_bytes: 9144126
num_examples: 14014
- name: train
num_bytes: 1159729
num_examples: 10000
- name: validation
num_bytes: 752802
num_examples: 6489
download_size: 21497845
dataset_size: 34600669
---
# Dataset Card for "lmind_nq_train10000_eval6489_v1_qa"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
chathuranga-jayanath/selfapr-manipulation-bug-error-context-rest-10000 | chathuranga-jayanath | "2024-02-05T16:12:57Z" | 33 | 0 | [
"size_categories:10K<n<100K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-02-05T16:12:33Z" | ---
dataset_info:
features:
- name: fix
dtype: string
- name: ctx
dtype: string
splits:
- name: train
num_bytes: 9118377
num_examples: 8000
- name: validation
num_bytes: 1122541
num_examples: 1000
- name: test
num_bytes: 1114546
num_examples: 1000
download_size: 5227704
dataset_size: 11355464
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
- split: validation
path: data/validation-*
- split: test
path: data/test-*
---
|
stefania-radu/rendered_wikipedia_rw | stefania-radu | "2024-02-05T16:30:38Z" | 33 | 0 | [
"size_categories:10K<n<100K",
"format:parquet",
"modality:image",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-02-05T16:30:28Z" | ---
dataset_info:
features:
- name: pixel_values
dtype: image
- name: num_patches
dtype: int64
splits:
- name: train
num_bytes: 123179045.375
num_examples: 13957
download_size: 122274959
dataset_size: 123179045.375
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
CyberHarem/megumin_konosuba | CyberHarem | "2024-02-05T17:30:09Z" | 33 | 0 | [
"task_categories:text-to-image",
"license:mit",
"size_categories:1K<n<10K",
"library:datasets",
"library:mlcroissant",
"region:us",
"art",
"not-for-all-audiences"
] | [
"text-to-image"
] | "2024-02-05T16:35:55Z" | ---
license: mit
task_categories:
- text-to-image
tags:
- art
- not-for-all-audiences
size_categories:
- n<1K
---
# Dataset of megumin/めぐみん (Kono Subarashii Sekai ni Shukufuku wo!)
This is the dataset of megumin/めぐみん (Kono Subarashii Sekai ni Shukufuku wo!), containing 450 images and their tags.
The core tags of this character are `short_hair, brown_hair, red_eyes, hat, witch_hat, black_hair`, which are pruned in this dataset.
Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by [DeepGHS Team](https://github.com/deepghs)([huggingface organization](https://huggingface.co/deepghs)).
## List of Packages
| Name | Images | Size | Download | Type | Description |
|:-----------------|---------:|:-----------|:------------------------------------------------------------------------------------------------------------------|:-----------|:---------------------------------------------------------------------|
| raw | 450 | 430.69 MiB | [Download](https://huggingface.co/datasets/CyberHarem/megumin_konosuba/resolve/main/dataset-raw.zip) | Waifuc-Raw | Raw data with meta information (min edge aligned to 1400 if larger). |
| 800 | 450 | 323.90 MiB | [Download](https://huggingface.co/datasets/CyberHarem/megumin_konosuba/resolve/main/dataset-800.zip) | IMG+TXT | dataset with the shorter side not exceeding 800 pixels. |
| stage3-p480-800 | 844 | 566.88 MiB | [Download](https://huggingface.co/datasets/CyberHarem/megumin_konosuba/resolve/main/dataset-stage3-p480-800.zip) | IMG+TXT | 3-stage cropped dataset with the area not less than 480x480 pixels. |
| 1200 | 450 | 430.50 MiB | [Download](https://huggingface.co/datasets/CyberHarem/megumin_konosuba/resolve/main/dataset-1200.zip) | IMG+TXT | dataset with the shorter side not exceeding 1200 pixels. |
| stage3-p480-1200 | 844 | 722.60 MiB | [Download](https://huggingface.co/datasets/CyberHarem/megumin_konosuba/resolve/main/dataset-stage3-p480-1200.zip) | IMG+TXT | 3-stage cropped dataset with the area not less than 480x480 pixels. |
### Load Raw Dataset with Waifuc
We provide raw dataset (including tagged images) for [waifuc](https://deepghs.github.io/waifuc/main/tutorials/installation/index.html) loading. If you need this, just run the following code
```python
import os
import zipfile
from huggingface_hub import hf_hub_download
from waifuc.source import LocalSource
# download raw archive file
zip_file = hf_hub_download(
repo_id='CyberHarem/megumin_konosuba',
repo_type='dataset',
filename='dataset-raw.zip',
)
# extract files to your directory
dataset_dir = 'dataset_dir'
os.makedirs(dataset_dir, exist_ok=True)
with zipfile.ZipFile(zip_file, 'r') as zf:
zf.extractall(dataset_dir)
# load the dataset with waifuc
source = LocalSource(dataset_dir)
for item in source:
print(item.image, item.meta['filename'], item.meta['tags'])
```
## List of Clusters
List of tag clustering result, maybe some outfits can be mined here.
### Raw Text Version
| # | Samples | Img-1 | Img-2 | Img-3 | Img-4 | Img-5 | Tags |
|----:|----------:|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| 0 | 10 | ![](samples/0/clu0-sample0.png) | ![](samples/0/clu0-sample1.png) | ![](samples/0/clu0-sample2.png) | ![](samples/0/clu0-sample3.png) | ![](samples/0/clu0-sample4.png) | 1girl, anime_coloring, solo, cape, parody, closed_mouth, bangs |
| 1 | 6 | ![](samples/1/clu1-sample0.png) | ![](samples/1/clu1-sample1.png) | ![](samples/1/clu1-sample2.png) | ![](samples/1/clu1-sample3.png) | ![](samples/1/clu1-sample4.png) | 1girl, holding_staff, red_dress, short_hair_with_long_locks, solo, black_cape, black_gloves, collarbone, fingerless_gloves, looking_at_viewer, bangs, upper_body, anime_coloring, black_choker, sidelocks |
| 2 | 25 | ![](samples/2/clu2-sample0.png) | ![](samples/2/clu2-sample1.png) | ![](samples/2/clu2-sample2.png) | ![](samples/2/clu2-sample3.png) | ![](samples/2/clu2-sample4.png) | 1girl, cape, red_dress, short_hair_with_long_locks, fingerless_gloves, solo, belt, black_gloves, holding_staff, choker, collar |
| 3 | 38 | ![](samples/3/clu3-sample0.png) | ![](samples/3/clu3-sample1.png) | ![](samples/3/clu3-sample2.png) | ![](samples/3/clu3-sample3.png) | ![](samples/3/clu3-sample4.png) | bandaged_leg, belt, black_thighhighs, fingerless_gloves, red_dress, 1girl, single_thighhigh, cape, short_hair_with_long_locks, solo, black_gloves, mismatched_legwear, looking_at_viewer, holding_staff |
| 4 | 5 | ![](samples/4/clu4-sample0.png) | ![](samples/4/clu4-sample1.png) | ![](samples/4/clu4-sample2.png) | ![](samples/4/clu4-sample3.png) | ![](samples/4/clu4-sample4.png) | 1girl, bangs, bare_shoulders, black_choker, collarbone, fingerless_gloves, red_dress, short_hair_with_long_locks, sidelocks, solo, upper_body, black_gloves, off_shoulder, indoors, anime_coloring, chair, open_mouth, sweatdrop |
| 5 | 5 | ![](samples/5/clu5-sample0.png) | ![](samples/5/clu5-sample1.png) | ![](samples/5/clu5-sample2.png) | ![](samples/5/clu5-sample3.png) | ![](samples/5/clu5-sample4.png) | 1girl, anime_coloring, clenched_teeth, solo, bangs, black_choker, cape, collar, parody |
| 6 | 6 | ![](samples/6/clu6-sample0.png) | ![](samples/6/clu6-sample1.png) | ![](samples/6/clu6-sample2.png) | ![](samples/6/clu6-sample3.png) | ![](samples/6/clu6-sample4.png) | 1girl, anime_coloring, bare_shoulders, choker, collar, parody, short_hair_with_long_locks, solo, open_mouth |
### Table Version
| # | Samples | Img-1 | Img-2 | Img-3 | Img-4 | Img-5 | 1girl | anime_coloring | solo | cape | parody | closed_mouth | bangs | holding_staff | red_dress | short_hair_with_long_locks | black_cape | black_gloves | collarbone | fingerless_gloves | looking_at_viewer | upper_body | black_choker | sidelocks | belt | choker | collar | bandaged_leg | black_thighhighs | single_thighhigh | mismatched_legwear | bare_shoulders | off_shoulder | indoors | chair | open_mouth | sweatdrop | clenched_teeth |
|----:|----------:|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------|:-----------------|:-------|:-------|:---------|:---------------|:--------|:----------------|:------------|:-----------------------------|:-------------|:---------------|:-------------|:--------------------|:--------------------|:-------------|:---------------|:------------|:-------|:---------|:---------|:---------------|:-------------------|:-------------------|:---------------------|:-----------------|:---------------|:----------|:--------|:-------------|:------------|:-----------------|
| 0 | 10 | ![](samples/0/clu0-sample0.png) | ![](samples/0/clu0-sample1.png) | ![](samples/0/clu0-sample2.png) | ![](samples/0/clu0-sample3.png) | ![](samples/0/clu0-sample4.png) | X | X | X | X | X | X | X | | | | | | | | | | | | | | | | | | | | | | | | | |
| 1 | 6 | ![](samples/1/clu1-sample0.png) | ![](samples/1/clu1-sample1.png) | ![](samples/1/clu1-sample2.png) | ![](samples/1/clu1-sample3.png) | ![](samples/1/clu1-sample4.png) | X | X | X | | | | X | X | X | X | X | X | X | X | X | X | X | X | | | | | | | | | | | | | | |
| 2 | 25 | ![](samples/2/clu2-sample0.png) | ![](samples/2/clu2-sample1.png) | ![](samples/2/clu2-sample2.png) | ![](samples/2/clu2-sample3.png) | ![](samples/2/clu2-sample4.png) | X | | X | X | | | | X | X | X | | X | | X | | | | | X | X | X | | | | | | | | | | | |
| 3 | 38 | ![](samples/3/clu3-sample0.png) | ![](samples/3/clu3-sample1.png) | ![](samples/3/clu3-sample2.png) | ![](samples/3/clu3-sample3.png) | ![](samples/3/clu3-sample4.png) | X | | X | X | | | | X | X | X | | X | | X | X | | | | X | | | X | X | X | X | | | | | | | |
| 4 | 5 | ![](samples/4/clu4-sample0.png) | ![](samples/4/clu4-sample1.png) | ![](samples/4/clu4-sample2.png) | ![](samples/4/clu4-sample3.png) | ![](samples/4/clu4-sample4.png) | X | X | X | | | | X | | X | X | | X | X | X | | X | X | X | | | | | | | | X | X | X | X | X | X | |
| 5 | 5 | ![](samples/5/clu5-sample0.png) | ![](samples/5/clu5-sample1.png) | ![](samples/5/clu5-sample2.png) | ![](samples/5/clu5-sample3.png) | ![](samples/5/clu5-sample4.png) | X | X | X | X | X | | X | | | | | | | | | | X | | | | X | | | | | | | | | | | X |
| 6 | 6 | ![](samples/6/clu6-sample0.png) | ![](samples/6/clu6-sample1.png) | ![](samples/6/clu6-sample2.png) | ![](samples/6/clu6-sample3.png) | ![](samples/6/clu6-sample4.png) | X | X | X | | X | | | | | X | | | | | | | | | | X | X | | | | | X | | | | X | | |
|
Thanmay/commonsense_qa-mr | Thanmay | "2024-02-05T16:40:17Z" | 33 | 0 | [
"size_categories:1K<n<10K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-02-05T16:40:11Z" | ---
dataset_info:
features:
- name: id
dtype: string
- name: question
dtype: string
- name: question_concept
dtype: string
- name: choices
sequence:
- name: label
dtype: string
- name: text
dtype: string
- name: answerKey
dtype: string
- name: itv2 mr question
dtype: string
splits:
- name: validation
num_bytes: 502700
num_examples: 1221
- name: test
num_bytes: 479030
num_examples: 1140
download_size: 496015
dataset_size: 981730
configs:
- config_name: default
data_files:
- split: validation
path: data/validation-*
- split: test
path: data/test-*
---
|
chathuranga-jayanath/selfapr-manipulation-bug-error-context-variables-10000 | chathuranga-jayanath | "2024-02-05T16:43:12Z" | 33 | 0 | [
"size_categories:10K<n<100K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-02-05T16:42:53Z" | ---
dataset_info:
features:
- name: fix
dtype: string
- name: ctx
dtype: string
splits:
- name: train
num_bytes: 5153924
num_examples: 8000
- name: validation
num_bytes: 631517
num_examples: 1000
- name: test
num_bytes: 625165
num_examples: 1000
download_size: 2885154
dataset_size: 6410606
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
- split: validation
path: data/validation-*
- split: test
path: data/test-*
---
|
PBWR/Building3D | PBWR | "2024-02-05T16:51:52Z" | 33 | 0 | [
"license:apache-2.0",
"region:us"
] | null | "2024-02-05T16:51:51Z" | ---
license: apache-2.0
---
|
Thanmay/commonsense_qa-hi | Thanmay | "2024-02-05T17:17:26Z" | 33 | 0 | [
"size_categories:1K<n<10K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-02-05T17:17:20Z" | ---
dataset_info:
features:
- name: id
dtype: string
- name: question
dtype: string
- name: question_concept
dtype: string
- name: choices
sequence:
- name: label
dtype: string
- name: text
dtype: string
- name: answerKey
dtype: string
- name: itv2 hi question
dtype: string
splits:
- name: validation
num_bytes: 495303
num_examples: 1221
- name: test
num_bytes: 470666
num_examples: 1140
download_size: 487500
dataset_size: 965969
configs:
- config_name: default
data_files:
- split: validation
path: data/validation-*
- split: test
path: data/test-*
---
|
Thanmay/arc-easy-mr | Thanmay | "2024-02-11T08:50:41Z" | 33 | 0 | [
"size_categories:1K<n<10K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-02-05T17:37:13Z" | ---
dataset_info:
features:
- name: id
dtype: string
- name: answerKey
dtype: string
- name: itv2 mr
dtype: string
- name: question
dtype: string
- name: choices
struct:
- name: label
sequence: string
- name: text
sequence: string
splits:
- name: test
num_bytes: 2800898
num_examples: 2376
- name: validation
num_bytes: 676483
num_examples: 570
download_size: 1290577
dataset_size: 3477381
configs:
- config_name: default
data_files:
- split: test
path: data/test-*
- split: validation
path: data/validation-*
---
|
Thanmay/arc-easy-ta | Thanmay | "2024-02-11T18:42:50Z" | 33 | 0 | [
"size_categories:1K<n<10K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-02-05T17:57:01Z" | ---
dataset_info:
features:
- name: id
dtype: string
- name: answerKey
dtype: string
- name: itv2 ta
dtype: string
- name: question
dtype: string
- name: choices
struct:
- name: label
sequence: string
- name: text
sequence: string
splits:
- name: test
num_bytes: 3289750
num_examples: 2376
- name: validation
num_bytes: 787255
num_examples: 570
download_size: 1379065
dataset_size: 4077005
configs:
- config_name: default
data_files:
- split: test
path: data/test-*
- split: validation
path: data/validation-*
---
|
micsell/hebrew_kan_sentence0 | micsell | "2024-02-05T23:19:59Z" | 33 | 0 | [
"size_categories:10K<n<100K",
"format:parquet",
"modality:audio",
"modality:text",
"library:datasets",
"library:dask",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-02-05T17:58:50Z" | ---
dataset_info:
features:
- name: audio
dtype: audio
- name: id
dtype: string
- name: language
dtype: string
- name: sentence
dtype: string
splits:
- name: train
num_bytes: 1911995061.0
num_examples: 10000
download_size: 1911289950
dataset_size: 1911995061.0
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
Thanmay/arc-challenge-mr | Thanmay | "2024-02-11T08:37:28Z" | 33 | 0 | [
"size_categories:1K<n<10K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-02-05T18:55:36Z" | ---
dataset_info:
features:
- name: id
dtype: string
- name: answerKey
dtype: string
- name: itv2 mr
dtype: string
- name: question
dtype: string
- name: choices
struct:
- name: label
sequence: string
- name: text
sequence: string
splits:
- name: test
num_bytes: 1639407
num_examples: 1172
- name: validation
num_bytes: 419668
num_examples: 299
download_size: 766150
dataset_size: 2059075
configs:
- config_name: default
data_files:
- split: test
path: data/test-*
- split: validation
path: data/validation-*
---
|
BubbleJoe/sms_generated_mistral_v01 | BubbleJoe | "2024-02-05T18:57:54Z" | 33 | 1 | [
"size_categories:1K<n<10K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-02-05T18:57:53Z" | ---
dataset_info:
features:
- name: text
dtype: string
splits:
- name: train
num_bytes: 1041612
num_examples: 2034
download_size: 321498
dataset_size: 1041612
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
Atipico1/mrqa_preprocessed_thres-0.9_by-st | Atipico1 | "2024-02-06T05:24:52Z" | 33 | 0 | [
"size_categories:100K<n<1M",
"format:parquet",
"modality:text",
"library:datasets",
"library:dask",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-02-05T19:38:01Z" | ---
dataset_info:
features:
- name: subset
dtype: string
- name: qid
dtype: string
- name: question
dtype: string
- name: answers
sequence: string
- name: masked_query
dtype: string
- name: context
dtype: string
- name: answer_sent
dtype: string
- name: answer_in_context
sequence: string
- name: query_embedding
sequence: float32
splits:
- name: train
num_bytes: 838499490.2996268
num_examples: 208017
download_size: 873260263
dataset_size: 838499490.2996268
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
MRQA Loaded without SearchQA ! -> Size : 450309
Remove duplicates by string match -> Before : 450309 | After : 401207
Before context preprocess: 401207
After context preprocess: 381972
Before split: 381972
After split: 378213
After context length filtering: 233328
After answer length filtering: 222697
Remove duplicates by similarity-> Before : 222697 | After : 208017 |
parhasard/eir-alpaca-type | parhasard | "2024-02-05T22:31:46Z" | 33 | 0 | [
"license:mit",
"region:us"
] | null | "2024-02-05T22:31:43Z" | ---
license: mit
---
|
modelloosrvcc/Announcer | modelloosrvcc | "2024-02-05T22:36:56Z" | 33 | 0 | [
"license:openrail",
"size_categories:n<1K",
"format:audiofolder",
"modality:audio",
"library:datasets",
"library:mlcroissant",
"region:us"
] | null | "2024-02-05T22:36:37Z" | ---
license: openrail
---
|
Charles333/demo_json | Charles333 | "2024-02-06T10:09:08Z" | 33 | 0 | [
"license:apache-2.0",
"size_categories:10K<n<100K",
"format:csv",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-02-06T00:00:09Z" | ---
license: apache-2.0
---
|
FINNUMBER/FINCH_TEST_FULL | FINNUMBER | "2024-02-06T02:31:45Z" | 33 | 0 | [
"size_categories:10K<n<100K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-02-06T02:31:34Z" | ---
dataset_info:
features:
- name: task
dtype: string
- name: context
dtype: string
- name: question
dtype: string
- name: answer
dtype: string
- name: instruction
dtype: string
splits:
- name: train
num_bytes: 118761883
num_examples: 34092
download_size: 44992827
dataset_size: 118761883
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
# Dataset Card for "FINCH_TEST"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
hoangvanvietanh/user_5476d2c924204b6f9e38713118fdb9b2_dataset | hoangvanvietanh | "2024-02-07T06:30:36Z" | 33 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:audio",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-02-06T02:38:07Z" | ---
dataset_info:
features:
- name: audio
dtype: audio
- name: id
dtype: string
- name: transcription
dtype: string
splits:
- name: train
num_bytes: 1524941.0
num_examples: 3
download_size: 1507145
dataset_size: 1524941.0
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
Jeovane/minhavozmr2 | Jeovane | "2024-02-06T02:50:17Z" | 33 | 0 | [
"license:openrail",
"size_categories:n<1K",
"format:audiofolder",
"modality:audio",
"library:datasets",
"library:mlcroissant",
"region:us"
] | null | "2024-02-06T02:48:27Z" | ---
license: openrail
---
|
micsell/hebrew_kan_sentence30000 | micsell | "2024-02-06T03:27:46Z" | 33 | 0 | [
"size_categories:10K<n<100K",
"format:parquet",
"modality:audio",
"modality:text",
"library:datasets",
"library:dask",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-02-06T03:26:31Z" | ---
dataset_info:
features:
- name: audio
dtype: audio
- name: id
dtype: string
- name: language
dtype: string
- name: sentence
dtype: string
splits:
- name: train
num_bytes: 1842548455.0
num_examples: 10000
download_size: 1841787410
dataset_size: 1842548455.0
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
ZiHDeng/hf-ny8-v6 | ZiHDeng | "2024-02-07T03:48:14Z" | 33 | 0 | [
"size_categories:1K<n<10K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-02-06T03:44:15Z" | ---
dataset_info:
features:
- name: repo_id
dtype: string
- name: file_path
dtype: string
- name: content
dtype: string
- name: __index_level_0__
dtype: int64
splits:
- name: train
num_bytes: 4640608
num_examples: 6206
download_size: 413763
dataset_size: 4640608
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
micsell/hebrew_kan_sentence40000 | micsell | "2024-02-06T04:51:18Z" | 33 | 0 | [
"size_categories:10K<n<100K",
"format:parquet",
"modality:audio",
"modality:text",
"library:datasets",
"library:dask",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-02-06T04:50:07Z" | ---
dataset_info:
features:
- name: audio
dtype: audio
- name: id
dtype: string
- name: language
dtype: string
- name: sentence
dtype: string
splits:
- name: train
num_bytes: 1881163759.0
num_examples: 10000
download_size: 1880326655
dataset_size: 1881163759.0
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
yzhuang/metatree_mfeat_fourier | yzhuang | "2024-02-06T05:54:46Z" | 33 | 0 | [
"size_categories:1K<n<10K",
"format:parquet",
"modality:tabular",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-02-06T05:53:37Z" | ---
dataset_info:
features:
- name: id
dtype: int64
- name: X
sequence: float64
- name: y
dtype: int64
splits:
- name: train
num_bytes: 862872
num_examples: 1374
- name: validation
num_bytes: 393128
num_examples: 626
download_size: 0
dataset_size: 1256000
---
# Dataset Card for "metatree_mfeat_fourier"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
yzhuang/metatree_mfeat_karhunen | yzhuang | "2024-02-06T05:55:32Z" | 33 | 0 | [
"size_categories:1K<n<10K",
"format:parquet",
"modality:tabular",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-02-06T05:55:29Z" | ---
dataset_info:
features:
- name: id
dtype: int64
- name: X
sequence: float64
- name: y
dtype: int64
splits:
- name: train
num_bytes: 741076
num_examples: 1393
- name: validation
num_bytes: 322924
num_examples: 607
download_size: 1290598
dataset_size: 1064000
---
# Dataset Card for "metatree_mfeat_karhunen"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
yzhuang/metatree_RandomRBF_0_0 | yzhuang | "2024-02-06T05:58:00Z" | 33 | 0 | [
"size_categories:1M<n<10M",
"format:parquet",
"modality:tabular",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-02-06T05:57:53Z" | ---
dataset_info:
features:
- name: id
dtype: int64
- name: X
sequence: float64
- name: y
dtype: int64
splits:
- name: train
num_bytes: 69979900
num_examples: 699799
- name: validation
num_bytes: 30020100
num_examples: 300201
download_size: 103911586
dataset_size: 100000000
---
# Dataset Card for "metatree_RandomRBF_0_0"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
yzhuang/metatree_RandomRBF_10_1E_4 | yzhuang | "2024-02-06T05:59:05Z" | 33 | 0 | [
"size_categories:1M<n<10M",
"format:parquet",
"modality:tabular",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-02-06T05:58:58Z" | ---
dataset_info:
features:
- name: id
dtype: int64
- name: X
sequence: float64
- name: y
dtype: int64
splits:
- name: train
num_bytes: 70077600
num_examples: 700776
- name: validation
num_bytes: 29922400
num_examples: 299224
download_size: 103910649
dataset_size: 100000000
---
# Dataset Card for "metatree_RandomRBF_10_1E_4"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
yzhuang/metatree_BNG_breast_w_ | yzhuang | "2024-02-06T06:00:54Z" | 33 | 0 | [
"size_categories:10K<n<100K",
"format:parquet",
"modality:tabular",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-02-06T06:00:51Z" | ---
dataset_info:
features:
- name: id
dtype: int64
- name: X
sequence: float64
- name: y
dtype: int64
splits:
- name: train
num_bytes: 2519880
num_examples: 27390
- name: validation
num_bytes: 1101792
num_examples: 11976
download_size: 1587654
dataset_size: 3621672
---
# Dataset Card for "metatree_BNG_breast_w_"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
yzhuang/metatree_BNG_colic_ | yzhuang | "2024-02-06T06:03:49Z" | 33 | 0 | [
"size_categories:1M<n<10M",
"format:parquet",
"modality:tabular",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-02-06T06:03:44Z" | ---
dataset_info:
features:
- name: id
dtype: int64
- name: X
sequence: float64
- name: y
dtype: int64
splits:
- name: train
num_bytes: 53223940
num_examples: 700315
- name: validation
num_bytes: 22776060
num_examples: 299685
download_size: 73796900
dataset_size: 76000000
---
# Dataset Card for "metatree_BNG_colic_"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
yzhuang/metatree_BNG_page_blocks_ | yzhuang | "2024-02-06T06:04:34Z" | 33 | 0 | [
"size_categories:100K<n<1M",
"format:parquet",
"modality:tabular",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-02-06T06:04:29Z" | ---
dataset_info:
features:
- name: id
dtype: int64
- name: X
sequence: float64
- name: y
dtype: int64
splits:
- name: train
num_bytes: 20608800
num_examples: 206088
- name: validation
num_bytes: 8915700
num_examples: 89157
download_size: 29975608
dataset_size: 29524500
---
# Dataset Card for "metatree_BNG_page_blocks_"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
yzhuang/metatree_BNG_waveform_5000_ | yzhuang | "2024-02-06T06:09:39Z" | 33 | 0 | [
"size_categories:1M<n<10M",
"format:parquet",
"modality:tabular",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-02-06T06:09:22Z" | ---
dataset_info:
features:
- name: id
dtype: int64
- name: X
sequence: float64
- name: y
dtype: int64
splits:
- name: train
num_bytes: 237790220
num_examples: 699383
- name: validation
num_bytes: 102209780
num_examples: 300617
download_size: 405577557
dataset_size: 340000000
---
# Dataset Card for "metatree_BNG_waveform_5000_"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
yzhuang/metatree_fri_c4_1000_100 | yzhuang | "2024-02-06T06:16:34Z" | 33 | 0 | [
"size_categories:1K<n<10K",
"format:parquet",
"modality:tabular",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-02-06T06:16:31Z" | ---
dataset_info:
features:
- name: id
dtype: int64
- name: X
sequence: float64
- name: y
dtype: int64
splits:
- name: train
num_bytes: 590400
num_examples: 720
- name: validation
num_bytes: 229600
num_examples: 280
download_size: 1008641
dataset_size: 820000
---
# Dataset Card for "metatree_fri_c4_1000_100"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
yzhuang/metatree_cpu_small | yzhuang | "2024-02-06T06:18:16Z" | 33 | 0 | [
"size_categories:1K<n<10K",
"format:parquet",
"modality:tabular",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-02-06T06:18:13Z" | ---
dataset_info:
features:
- name: id
dtype: int64
- name: X
sequence: float64
- name: y
dtype: int64
splits:
- name: train
num_bytes: 655400
num_examples: 5650
- name: validation
num_bytes: 294872
num_examples: 2542
download_size: 703010
dataset_size: 950272
---
# Dataset Card for "metatree_cpu_small"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
yzhuang/metatree_space_ga | yzhuang | "2024-02-06T06:18:32Z" | 33 | 0 | [
"size_categories:1K<n<10K",
"format:parquet",
"modality:tabular",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-02-06T06:18:29Z" | ---
dataset_info:
features:
- name: id
dtype: int64
- name: X
sequence: float64
- name: y
dtype: int64
splits:
- name: train
num_bytes: 148580
num_examples: 2185
- name: validation
num_bytes: 62696
num_examples: 922
download_size: 195171
dataset_size: 211276
---
# Dataset Card for "metatree_space_ga"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
yzhuang/metatree_fri_c3_1000_5 | yzhuang | "2024-02-06T06:21:42Z" | 33 | 0 | [
"size_categories:1K<n<10K",
"format:parquet",
"modality:tabular",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-02-06T06:21:39Z" | ---
dataset_info:
features:
- name: id
dtype: int64
- name: X
sequence: float64
- name: y
dtype: int64
splits:
- name: train
num_bytes: 42180
num_examples: 703
- name: validation
num_bytes: 17820
num_examples: 297
download_size: 56734
dataset_size: 60000
---
# Dataset Card for "metatree_fri_c3_1000_5"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
yzhuang/metatree_puma8NH | yzhuang | "2024-02-06T06:21:58Z" | 33 | 0 | [
"size_categories:1K<n<10K",
"format:parquet",
"modality:tabular",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-02-06T06:21:55Z" | ---
dataset_info:
features:
- name: id
dtype: int64
- name: X
sequence: float64
- name: y
dtype: int64
splits:
- name: train
num_bytes: 489048
num_examples: 5822
- name: validation
num_bytes: 199080
num_examples: 2370
download_size: 685137
dataset_size: 688128
---
# Dataset Card for "metatree_puma8NH"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
yzhuang/metatree_fri_c0_1000_10 | yzhuang | "2024-02-06T06:23:35Z" | 33 | 0 | [
"size_categories:1K<n<10K",
"format:parquet",
"modality:tabular",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-02-06T06:23:32Z" | ---
dataset_info:
features:
- name: id
dtype: int64
- name: X
sequence: float64
- name: y
dtype: int64
splits:
- name: train
num_bytes: 71100
num_examples: 711
- name: validation
num_bytes: 28900
num_examples: 289
download_size: 105321
dataset_size: 100000
---
# Dataset Card for "metatree_fri_c0_1000_10"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
yzhuang/metatree_fri_c0_1000_25 | yzhuang | "2024-02-06T06:24:22Z" | 33 | 0 | [
"size_categories:1K<n<10K",
"format:parquet",
"modality:tabular",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-02-06T06:24:19Z" | ---
dataset_info:
features:
- name: id
dtype: int64
- name: X
sequence: float64
- name: y
dtype: int64
splits:
- name: train
num_bytes: 157080
num_examples: 714
- name: validation
num_bytes: 62920
num_examples: 286
download_size: 254313
dataset_size: 220000
---
# Dataset Card for "metatree_fri_c0_1000_25"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
yzhuang/metatree_fried | yzhuang | "2024-02-06T06:25:27Z" | 33 | 0 | [
"size_categories:10K<n<100K",
"format:parquet",
"modality:tabular",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-02-06T06:25:24Z" | ---
dataset_info:
features:
- name: id
dtype: int64
- name: X
sequence: float64
- name: y
dtype: int64
splits:
- name: train
num_bytes: 2846400
num_examples: 28464
- name: validation
num_bytes: 1230400
num_examples: 12304
download_size: 962241
dataset_size: 4076800
---
# Dataset Card for "metatree_fried"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
yzhuang/metatree_fri_c2_1000_10 | yzhuang | "2024-02-06T06:26:43Z" | 33 | 0 | [
"size_categories:1K<n<10K",
"format:parquet",
"modality:tabular",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-02-06T06:26:41Z" | ---
dataset_info:
features:
- name: id
dtype: int64
- name: X
sequence: float64
- name: y
dtype: int64
splits:
- name: train
num_bytes: 71800
num_examples: 718
- name: validation
num_bytes: 28200
num_examples: 282
download_size: 105292
dataset_size: 100000
---
# Dataset Card for "metatree_fri_c2_1000_10"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
yzhuang/metatree_socmob | yzhuang | "2024-02-06T06:27:29Z" | 33 | 0 | [
"size_categories:1K<n<10K",
"format:parquet",
"modality:tabular",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-02-06T06:27:27Z" | ---
dataset_info:
features:
- name: id
dtype: int64
- name: X
sequence: float64
- name: y
dtype: int64
splits:
- name: train
num_bytes: 22736
num_examples: 812
- name: validation
num_bytes: 9632
num_examples: 344
download_size: 13298
dataset_size: 32368
---
# Dataset Card for "metatree_socmob"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
yzhuang/metatree_pc1 | yzhuang | "2024-02-06T06:28:17Z" | 33 | 0 | [
"size_categories:1K<n<10K",
"format:parquet",
"modality:tabular",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-02-06T06:28:15Z" | ---
dataset_info:
features:
- name: id
dtype: int64
- name: X
sequence: float64
- name: y
dtype: int64
splits:
- name: train
num_bytes: 147392
num_examples: 784
- name: validation
num_bytes: 61100
num_examples: 325
download_size: 72194
dataset_size: 208492
---
# Dataset Card for "metatree_pc1"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
Sangjeong/TestData | Sangjeong | "2024-02-06T08:09:46Z" | 33 | 0 | [
"license:apache-2.0",
"size_categories:n<1K",
"format:csv",
"modality:tabular",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-02-06T06:56:37Z" | ---
license: apache-2.0
---
|
gokulraj/demo-dataset | gokulraj | "2024-02-06T07:17:46Z" | 33 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-02-06T07:16:34Z" | ---
dataset_info:
features:
- name: text
dtype: string
splits:
- name: train
num_bytes: 2272
num_examples: 8
download_size: 3903
dataset_size: 2272
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
micsell/hebrew_kan_sentence60000 | micsell | "2024-02-06T07:35:01Z" | 33 | 0 | [
"size_categories:10K<n<100K",
"format:parquet",
"modality:audio",
"modality:text",
"library:datasets",
"library:dask",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-02-06T07:33:45Z" | ---
dataset_info:
features:
- name: audio
dtype: audio
- name: id
dtype: string
- name: language
dtype: string
- name: sentence
dtype: string
splits:
- name: train
num_bytes: 1817024532.0
num_examples: 10000
download_size: 1816175274
dataset_size: 1817024532.0
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
kristmh/high_vs_random_min_len_1000 | kristmh | "2024-02-06T07:49:16Z" | 33 | 0 | [
"size_categories:10K<n<100K",
"format:parquet",
"modality:tabular",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-02-06T07:48:42Z" | ---
configs:
- config_name: default
data_files:
- split: test
path: data/test-*
- split: train
path: data/train-*
- split: validate
path: data/validate-*
dataset_info:
features:
- name: text_clean
dtype: string
- name: label
dtype: int64
- name: __index_level_0__
dtype: int64
splits:
- name: test
num_bytes: 19282841
num_examples: 7642
- name: train
num_bytes: 157361909
num_examples: 61136
- name: validate
num_bytes: 18779565
num_examples: 7642
download_size: 85467675
dataset_size: 195424315
---
# Dataset Card for "high_vs_random_min_len_1000"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
bahar125/test12 | bahar125 | "2024-02-06T09:32:36Z" | 33 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-02-06T09:23:27Z" | ---
dataset_info:
features:
- name: labels
dtype:
class_label:
names:
'0': negative
'1': neutral
'2': positive
- name: text
dtype: string
splits:
- name: train
num_bytes: 7807
num_examples: 82
- name: test
num_bytes: 1928
num_examples: 20
download_size: 10418
dataset_size: 9735
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
- split: test
path: data/test-*
---
|
jan-hq/finance_alpaca_binarized | jan-hq | "2024-02-06T09:48:28Z" | 33 | 1 | [
"size_categories:10K<n<100K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-02-06T09:40:46Z" | ---
dataset_info:
features:
- name: messages
list:
- name: content
dtype: string
- name: role
dtype: string
splits:
- name: train
num_bytes: 35528214.41925935
num_examples: 62020
- name: test
num_bytes: 3948088.580740655
num_examples: 6892
download_size: 23484879
dataset_size: 39476303.0
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
- split: test
path: data/test-*
---
|
micsell/hebrew_kan_sentence80000 | micsell | "2024-02-06T11:08:02Z" | 33 | 0 | [
"size_categories:10K<n<100K",
"format:parquet",
"modality:audio",
"modality:text",
"library:datasets",
"library:dask",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-02-06T11:07:04Z" | ---
dataset_info:
features:
- name: audio
dtype: audio
- name: id
dtype: string
- name: language
dtype: string
- name: sentence
dtype: string
splits:
- name: train
num_bytes: 1758082392.0
num_examples: 10000
download_size: 1757256596
dataset_size: 1758082392.0
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
heroza/isic_dummy | heroza | "2024-02-06T11:16:49Z" | 33 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:image",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-02-06T11:13:25Z" | ---
dataset_info:
features:
- name: image
dtype: image
- name: label
dtype:
class_label:
names:
'0': combined
'1': seb
splits:
- name: train
num_bytes: 210629176.0
num_examples: 150
- name: validation
num_bytes: 210629176.0
num_examples: 150
- name: test
num_bytes: 210629176.0
num_examples: 150
download_size: 631873878
dataset_size: 631887528.0
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
- split: validation
path: data/validation-*
- split: test
path: data/test-*
---
|
gizemgg/wiki-eng-summary-trial | gizemgg | "2024-02-06T11:46:07Z" | 33 | 0 | [
"size_categories:1K<n<10K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-02-06T11:46:03Z" | ---
dataset_info:
features:
- name: text
dtype: string
- name: __index_level_0__
dtype: int64
splits:
- name: train
num_bytes: 38422364.882128276
num_examples: 2050
- name: test
num_bytes: 4273316.679573291
num_examples: 228
download_size: 23587919
dataset_size: 42695681.561701566
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
- split: test
path: data/test-*
---
|
micsell/hebrew_kan_sentence90000 | micsell | "2024-02-06T12:30:29Z" | 33 | 0 | [
"size_categories:10K<n<100K",
"format:parquet",
"modality:audio",
"modality:text",
"library:datasets",
"library:dask",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-02-06T12:29:24Z" | ---
dataset_info:
features:
- name: audio
dtype: audio
- name: id
dtype: string
- name: language
dtype: string
- name: sentence
dtype: string
splits:
- name: train
num_bytes: 1900103768.0
num_examples: 10000
download_size: 1899330575
dataset_size: 1900103768.0
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
PRACADACERA/Drago | PRACADACERA | "2024-02-06T12:56:01Z" | 33 | 0 | [
"license:openrail",
"size_categories:n<1K",
"format:audiofolder",
"modality:audio",
"library:datasets",
"library:mlcroissant",
"region:us"
] | null | "2024-02-06T12:54:14Z" | ---
license: openrail
---
|
mertllc/test | mertllc | "2024-02-06T13:05:55Z" | 33 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:audio",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-02-06T13:05:49Z" | ---
dataset_info:
features:
- name: audio
dtype: audio
- name: text
dtype: string
- name: speaker_id
dtype: int64
- name: __index_level_0__
dtype: int64
splits:
- name: train
num_bytes: 10050421.0
num_examples: 500
download_size: 9992979
dataset_size: 10050421.0
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
breno30/LocutorLindomarTop | breno30 | "2024-02-06T13:11:13Z" | 33 | 0 | [
"license:openrail",
"size_categories:n<1K",
"format:audiofolder",
"modality:audio",
"library:datasets",
"library:mlcroissant",
"region:us"
] | null | "2024-02-06T13:10:47Z" | ---
license: openrail
---
|