eduagarcia commited on
Commit
4bd28eb
·
verified ·
1 Parent(s): 0d38aa3

Update status of paloalma/ECE-TW3-JRGL-V1_eval_request_False_float16_Original to FAILED

Browse files
paloalma/ECE-TW3-JRGL-V1_eval_request_False_float16_Original.json CHANGED
@@ -8,10 +8,12 @@
8
  "architectures": "LlamaForCausalLM",
9
  "weight_type": "Original",
10
  "main_language": "English",
11
- "status": "RUNNING",
12
  "submitted_time": "2024-04-26T08:12:30Z",
13
  "model_type": "🤝 : base merges and moerges",
14
  "source": "leaderboard",
15
  "job_id": 618,
16
- "job_start_time": "2024-05-16T11-58-38.457588"
 
 
17
  }
 
8
  "architectures": "LlamaForCausalLM",
9
  "weight_type": "Original",
10
  "main_language": "English",
11
+ "status": "FAILED",
12
  "submitted_time": "2024-04-26T08:12:30Z",
13
  "model_type": "🤝 : base merges and moerges",
14
  "source": "leaderboard",
15
  "job_id": 618,
16
+ "job_start_time": "2024-05-16T11-58-38.457588",
17
+ "error_msg": "An error occurred while downloading using `hf_transfer`. Consider disabling HF_HUB_ENABLE_HF_TRANSFER for better error handling.",
18
+ "traceback": "Traceback (most recent call last):\n File \"/root/miniconda3/envs/torch21/lib/python3.11/site-packages/huggingface_hub/file_download.py\", line 509, in http_get\n hf_transfer.download(\nException: Failed too many failures in parallel (3): PyErr { type: <class 'Exception'>, value: Exception('Error while downloading: reqwest::Error { kind: Status(500), url: Url { scheme: \"https\", cannot_be_a_base: false, username: \"\", password: None, host: Some(Domain(\"cdn-lfs-us-1.huggingface.co\")), port: None, path: \"/repos/62/f7/62f79bcc2eae35953e8eeaaccab275a66e2d6435d327781c1e18a65957bb1990/a7080985869f872c9a202ab349c0fa768c2345d33c23e5f7b9956e3cc737dcde\", query: Some(\"response-content-disposition=attachment%3B+filename*%3DUTF-8%27%27model-00051-of-00081.safetensors%3B+filename%3D%22model-00051-of-00081.safetensors%22%3B&Expires=1716121205&Policy=eyJTdGF0ZW1lbnQiOlt7IkNvbmRpdGlvbiI6eyJEYXRlTGVzc1RoYW4iOnsiQVdTOkVwb2NoVGltZSI6MTcxNjEyMTIwNX19LCJSZXNvdXJjZSI6Imh0dHBzOi8vY2RuLWxmcy11cy0xLmh1Z2dpbmdmYWNlLmNvL3JlcG9zLzYyL2Y3LzYyZjc5YmNjMmVhZTM1OTUzZThlZWFhY2NhYjI3NWE2NmUyZDY0MzVkMzI3NzgxYzFlMThhNjU5NTdiYjE5OTAvYTcwODA5ODU4NjlmODcyYzlhMjAyYWIzNDljMGZhNzY4YzIzNDVkMzNjMjNlNWY3Yjk5NTZlM2NjNzM3ZGNkZT9yZXNwb25zZS1jb250ZW50LWRpc3Bvc2l0aW9uPSoifV19&Signature=N99AoBFH02yVOIYzN7mcL4m8YN0wezBBbqUl6Gcy4rzGlzN%7EbrRDNNhnfiXCyButTiLUVq9%7EMuvL3MVjZNtruOPtfFU5ce1QiGmVUz1lAMSm4Gm8-N5vCrQ4WuHhm%7ExQZWlrxDgmJa48%7EVNO93RFnX%7EH001k8m772FfRAs7S-XsMyZfrepipqRk2ApdVvUHWtpQxiSIFV--SwOc%7EwSkun8QM0FqPFDXYSHiSpYCbiVGs%7Ez1zW2XWuudvhhVqyMB7XYEqV2jUOgE5HRLXmJpr8iAxYdawPqRFTzDPqPNGAamxQCRD57JRZUIg6wUhkA8OGeRQQEYnC2bKFDq%7E9UKODQ__&Key-Pair-Id=KCD77M1F0VK2B\"), fragment: None } }'), traceback: None } (NoPermits)\n\nThe above exception was the direct cause of the following exception:\n\nTraceback (most recent call last):\n File \"/workspace/repos/llm_leaderboard/llm_leaderboard_eval_bot/evaluate_llms.py\", line 207, in wait_download_and_run_request\n run_request(\n File \"/workspace/repos/llm_leaderboard/llm_leaderboard_eval_bot/evaluate_llms.py\", line 69, in run_request\n results = run_eval_on_model(\n ^^^^^^^^^^^^^^^^^^\n File \"/workspace/repos/llm_leaderboard/llm_leaderboard_eval_bot/run_eval.py\", line 60, in run_eval_on_model\n result = evaluate(\n ^^^^^^^^^\n File \"/workspace/repos/llm_leaderboard/llm_leaderboard_eval_bot/lm_eval_util.py\", line 145, in evaluate\n results = evaluator.simple_evaluate(\n ^^^^^^^^^^^^^^^^^^^^^^^^^^\n File \"/workspace/repos/llm_leaderboard/lm-evaluation-harness-pt/lm_eval/utils.py\", line 419, in _wrapper\n return fn(*args, **kwargs)\n ^^^^^^^^^^^^^^^^^^^\n File \"/workspace/repos/llm_leaderboard/lm-evaluation-harness-pt/lm_eval/evaluator.py\", line 100, in simple_evaluate\n lm = lm_eval.api.registry.get_model(model).create_from_arg_string(\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n File \"/workspace/repos/llm_leaderboard/lm-evaluation-harness-pt/lm_eval/api/model.py\", line 134, in create_from_arg_string\n return cls(**args, **args2)\n ^^^^^^^^^^^^^^^^^^^^\n File \"/workspace/repos/llm_leaderboard/lm-evaluation-harness-pt/lm_eval/models/huggingface.py\", line 297, in __init__\n self._create_model(\n File \"/workspace/repos/llm_leaderboard/lm-evaluation-harness-pt/lm_eval/models/huggingface.py\", line 608, in _create_model\n self._model = self.AUTO_MODEL_CLASS.from_pretrained(\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n File \"/workspace/repos/llm_leaderboard/transformers/src/transformers/models/auto/auto_factory.py\", line 563, in from_pretrained\n return model_class.from_pretrained(\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n File \"/workspace/repos/llm_leaderboard/transformers/src/transformers/modeling_utils.py\", line 3441, in from_pretrained\n resolved_archive_file, sharded_metadata = get_checkpoint_shard_files(\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^\n File \"/workspace/repos/llm_leaderboard/transformers/src/transformers/utils/hub.py\", line 1038, in get_checkpoint_shard_files\n cached_filename = cached_file(\n ^^^^^^^^^^^^\n File \"/workspace/repos/llm_leaderboard/transformers/src/transformers/utils/hub.py\", line 398, in cached_file\n resolved_file = hf_hub_download(\n ^^^^^^^^^^^^^^^^\n File \"/root/miniconda3/envs/torch21/lib/python3.11/site-packages/huggingface_hub/utils/_validators.py\", line 119, in _inner_fn\n return fn(*args, **kwargs)\n ^^^^^^^^^^^^^^^^^^^\n File \"/root/miniconda3/envs/torch21/lib/python3.11/site-packages/huggingface_hub/file_download.py\", line 1492, in hf_hub_download\n http_get(\n File \"/root/miniconda3/envs/torch21/lib/python3.11/site-packages/huggingface_hub/file_download.py\", line 520, in http_get\n raise RuntimeError(\nRuntimeError: An error occurred while downloading using `hf_transfer`. Consider disabling HF_HUB_ENABLE_HF_TRANSFER for better error handling.\n"
19
  }