eduagarcia
commited on
Commit
•
d19ad55
1
Parent(s):
617ff72
Update status of automerger/YamshadowExperiment28-7B_eval_request_False_bfloat16_Original to FAILED
Browse files
automerger/YamshadowExperiment28-7B_eval_request_False_bfloat16_Original.json
CHANGED
@@ -8,10 +8,12 @@
|
|
8 |
"architectures": "MistralForCausalLM",
|
9 |
"weight_type": "Original",
|
10 |
"main_language": "English",
|
11 |
-
"status": "
|
12 |
"submitted_time": "2024-05-30T09:31:54Z",
|
13 |
"model_type": "🤝 : base merges and moerges",
|
14 |
"source": "leaderboard",
|
15 |
"job_id": 783,
|
16 |
-
"job_start_time": "2024-06-12T15-30-31.690593"
|
|
|
|
|
17 |
}
|
|
|
8 |
"architectures": "MistralForCausalLM",
|
9 |
"weight_type": "Original",
|
10 |
"main_language": "English",
|
11 |
+
"status": "FAILED",
|
12 |
"submitted_time": "2024-05-30T09:31:54Z",
|
13 |
"model_type": "🤝 : base merges and moerges",
|
14 |
"source": "leaderboard",
|
15 |
"job_id": 783,
|
16 |
+
"job_start_time": "2024-06-12T15-30-31.690593",
|
17 |
+
"error_msg": "Failed to import transformers.models.mistral.modeling_mistral because of the following error (look up to see its traceback):\ncannot import name 'SlidingWindowCache' from 'transformers.cache_utils' (/workspace/repos/llm_leaderboard/transformers/src/transformers/cache_utils.py)",
|
18 |
+
"traceback": "Traceback (most recent call last):\n File \"/workspace/repos/llm_leaderboard/transformers/src/transformers/utils/import_utils.py\", line 1515, in _get_module\n for value in values:\n ^^^^^^^^^^^^^^\n File \"/root/miniconda3/envs/torch21/lib/python3.11/importlib/__init__.py\", line 126, in import_module\n return _bootstrap._gcd_import(name[level:], package, level)\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n File \"<frozen importlib._bootstrap>\", line 1204, in _gcd_import\n File \"<frozen importlib._bootstrap>\", line 1176, in _find_and_load\n File \"<frozen importlib._bootstrap>\", line 1147, in _find_and_load_unlocked\n File \"<frozen importlib._bootstrap>\", line 690, in _load_unlocked\n File \"<frozen importlib._bootstrap_external>\", line 940, in exec_module\n File \"<frozen importlib._bootstrap>\", line 241, in _call_with_frames_removed\n File \"/workspace/repos/llm_leaderboard/transformers/src/transformers/models/mistral/modeling_mistral.py\", line 33, in <module>\n from ...cache_utils import Cache, DynamicCache, SlidingWindowCache, StaticCache\nImportError: cannot import name 'SlidingWindowCache' from 'transformers.cache_utils' (/workspace/repos/llm_leaderboard/transformers/src/transformers/cache_utils.py)\n\nThe above exception was the direct cause of the following exception:\n\nTraceback (most recent call last):\n File \"/workspace/repos/llm_leaderboard/llm_leaderboard_eval_bot/evaluate_llms.py\", line 199, in wait_download_and_run_request\n run_request(\n File \"/workspace/repos/llm_leaderboard/llm_leaderboard_eval_bot/evaluate_llms.py\", line 71, in run_request\n results = run_eval_on_model(\n ^^^^^^^^^^^^^^^^^^\n File \"/workspace/repos/llm_leaderboard/llm_leaderboard_eval_bot/run_eval.py\", line 60, in run_eval_on_model\n result = evaluate(\n ^^^^^^^^^\n File \"/workspace/repos/llm_leaderboard/llm_leaderboard_eval_bot/lm_eval_util.py\", line 145, in evaluate\n results = evaluator.simple_evaluate(\n ^^^^^^^^^^^^^^^^^^^^^^^^^^\n File \"/workspace/repos/llm_leaderboard/lm-evaluation-harness-pt/lm_eval/utils.py\", line 419, in _wrapper\n return fn(*args, **kwargs)\n ^^^^^^^^^^^^^^^^^^^\n File \"/workspace/repos/llm_leaderboard/lm-evaluation-harness-pt/lm_eval/evaluator.py\", line 100, in simple_evaluate\n lm = lm_eval.api.registry.get_model(model).create_from_arg_string(\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n File \"/workspace/repos/llm_leaderboard/lm-evaluation-harness-pt/lm_eval/api/model.py\", line 134, in create_from_arg_string\n return cls(**args, **args2)\n ^^^^^^^^^^^^^^^^^^^^\n File \"/workspace/repos/llm_leaderboard/lm-evaluation-harness-pt/lm_eval/models/huggingface.py\", line 297, in __init__\n self._create_model(\n File \"/workspace/repos/llm_leaderboard/lm-evaluation-harness-pt/lm_eval/models/huggingface.py\", line 608, in _create_model\n self._model = self.AUTO_MODEL_CLASS.from_pretrained(\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n File \"/workspace/repos/llm_leaderboard/transformers/src/transformers/models/auto/auto_factory.py\", line 562, in from_pretrained\n elif type(config) in cls._model_mapping.keys():\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n File \"/workspace/repos/llm_leaderboard/transformers/src/transformers/models/auto/auto_factory.py\", line 383, in _get_model_class\n def _get_model_class(config, model_mapping):\n ^^^^^^^^^^^^^^^^^^^^^^\n File \"/workspace/repos/llm_leaderboard/transformers/src/transformers/models/auto/auto_factory.py\", line 734, in __getitem__\n model_name = self._model_mapping[model_type]\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n File \"/workspace/repos/llm_leaderboard/transformers/src/transformers/models/auto/auto_factory.py\", line 748, in _load_attr_from_module\n self._modules[module_name] = importlib.import_module(f\".{module_name}\", \"transformers.models\")\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n File \"/workspace/repos/llm_leaderboard/transformers/src/transformers/models/auto/auto_factory.py\", line 692, in getattribute_from_module\n return tuple(getattribute_from_module(module, a) for a in attr)\n ^^^^^^^^^^^^^^^^^^^^^\n File \"/workspace/repos/llm_leaderboard/transformers/src/transformers/utils/import_utils.py\", line 1505, in __getattr__\n Module class that surfaces all objects but only performs associated imports when the objects are requested.\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n File \"/workspace/repos/llm_leaderboard/transformers/src/transformers/utils/import_utils.py\", line 1517, in _get_module\n # Needed for autocompletion in an IDE\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\nRuntimeError: Failed to import transformers.models.mistral.modeling_mistral because of the following error (look up to see its traceback):\ncannot import name 'SlidingWindowCache' from 'transformers.cache_utils' (/workspace/repos/llm_leaderboard/transformers/src/transformers/cache_utils.py)\n"
|
19 |
}
|