Upload results3/20240723-150658-hellaswag:mc-2-tasks.jsonl with huggingface_hub
Browse files
results3/20240723-150658-hellaswag:mc-2-tasks.jsonl
ADDED
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
1 |
+
{"task_name": "hellaswag:mc", "task_hash": "2b8a9fe192b57ca3fb39ae9195fc0e18", "model_hash": "ed0459abf67f30ddf56fc6cee1182f49", "model_config": {"model": "EleutherAI/pythia-1b", "revision": null, "trust_remote_code": null, "max_length": 2048, "model_path": null, "model_type": "hf"}, "task_config": {"dataset_path": "hellaswag", "native_id_field": "ind", "primary_metric": "acc_raw", "split": "validation", "fewshot_source": "OLMES:hellaswag", "limit": 5, "random_subsample_seed": 1234, "generation_kwargs": null, "context_kwargs": null, "metric_kwargs": null, "num_shots": 0, "fewshot_seed": 1234, "dataset_name": null, "task_name": "hellaswag:mc", "version": 0, "task_core": "hellaswag"}, "compute_config": {"batch_size": "2", "max_batch_size": 32, "output_dir": "/results", "num_recorded_inputs": 3, "hf_save_dir": "jjyang7/eval-testing1//results3", "save_raw_requests": true, "check_datalake": false}, "processing_time": 13.939208030700684, "current_date": "2024-07-23 22:06:54 UTC", "num_instances": 5, "beaker_info": {"BEAKER_EXPERIMENT_ID": "01J3GSR5TV8X6HK1VE6TQ5D446", "BEAKER_ASSIGNED_GPU_COUNT": "1", "BEAKER_WORKLOAD_ID": "01J3GSR5TV8X6HK1VE6TQ5D446", "BEAKER_TASK_ID": "01J3GSR5V3BJTMJ47JF0Q9PC89", "BEAKER_NODE_ID": "01GQHF8T4AFARHF9NRGG2R1E7J", "BEAKER_JOB_ID": "01J3GSR9TNXWE3ZF2FTMJGKGME", "BEAKER_ASSIGNED_CPU_COUNT": "15.5", "BEAKER_JOB_KIND": "batch", "GIT_REF": "0374f7b8e8cae38690f838d8d9eda0415d0457b5", "BEAKER_NODE_HOSTNAME": "aristo-cirrascale-13.reviz.ai2.in"}, "metrics": {"acc_raw": 0.2, "acc_per_token": 0.2, "acc_per_char": 0.2, "primary_score": 0.2}}
|
2 |
+
{"task_name": "mmlu_high_school_biology:mc", "task_hash": "62221c82efba91bba59e691e489b6af4", "model_hash": "ed0459abf67f30ddf56fc6cee1182f49", "model_config": {"model": "EleutherAI/pythia-1b", "revision": null, "trust_remote_code": null, "max_length": 2048, "model_path": null, "model_type": "hf"}, "task_config": {"native_id_field": "index", "primary_metric": "acc_raw", "split": "test", "limit": 5, "random_subsample_seed": 1234, "num_shots": 5, "generation_kwargs": null, "context_kwargs": null, "metric_kwargs": null, "fewshot_seed": 1234, "dataset_name": "high_school_biology", "dataset_path": "cais/mmlu", "task_name": "mmlu_high_school_biology:mc", "version": 1, "task_core": "mmlu_high_school_biology"}, "compute_config": {"batch_size": "2", "max_batch_size": 32, "output_dir": "/results", "num_recorded_inputs": 3, "hf_save_dir": "jjyang7/eval-testing1//results3", "save_raw_requests": true, "check_datalake": false}, "processing_time": 3.7210774421691895, "current_date": "2024-07-23 22:06:58 UTC", "num_instances": 5, "beaker_info": {"BEAKER_EXPERIMENT_ID": "01J3GSR5TV8X6HK1VE6TQ5D446", "BEAKER_ASSIGNED_GPU_COUNT": "1", "BEAKER_WORKLOAD_ID": "01J3GSR5TV8X6HK1VE6TQ5D446", "BEAKER_TASK_ID": "01J3GSR5V3BJTMJ47JF0Q9PC89", "BEAKER_NODE_ID": "01GQHF8T4AFARHF9NRGG2R1E7J", "BEAKER_JOB_ID": "01J3GSR9TNXWE3ZF2FTMJGKGME", "BEAKER_ASSIGNED_CPU_COUNT": "15.5", "BEAKER_JOB_KIND": "batch", "GIT_REF": "0374f7b8e8cae38690f838d8d9eda0415d0457b5", "BEAKER_NODE_HOSTNAME": "aristo-cirrascale-13.reviz.ai2.in"}, "metrics": {"acc_raw": 0.6000000000000001, "acc_per_token": 0.6000000000000001, "acc_per_char": 0.6000000000000001, "primary_score": 0.6000000000000001}}
|