{ "model": "Kukedlc/NeuralSynthesis-7b-v0.4-slerp", "base_model": "", "revision": "main", "private": false, "precision": "bfloat16", "params": 7.242, "architectures": "MistralForCausalLM", "weight_type": "Original", "main_language": "English", "status": "FAILED", "submitted_time": "2024-05-30T09:33:13Z", "model_type": "🤝 : base merges and moerges", "source": "leaderboard", "job_id": 784, "job_start_time": "2024-06-12T15-30-54.781938", "error_msg": "Failed to import transformers.models.mistral.modeling_mistral because of the following error (look up to see its traceback):\ncannot import name 'SlidingWindowCache' from 'transformers.cache_utils' (/workspace/repos/llm_leaderboard/transformers/src/transformers/cache_utils.py)", "traceback": "Traceback (most recent call last):\n File \"/workspace/repos/llm_leaderboard/transformers/src/transformers/utils/import_utils.py\", line 1515, in _get_module\n for value in values:\n ^^^^^^^^^^^^^^\n File \"/root/miniconda3/envs/torch21/lib/python3.11/importlib/__init__.py\", line 126, in import_module\n return _bootstrap._gcd_import(name[level:], package, level)\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n File \"\", line 1204, in _gcd_import\n File \"\", line 1176, in _find_and_load\n File \"\", line 1147, in _find_and_load_unlocked\n File \"\", line 690, in _load_unlocked\n File \"\", line 940, in exec_module\n File \"\", line 241, in _call_with_frames_removed\n File \"/workspace/repos/llm_leaderboard/transformers/src/transformers/models/mistral/modeling_mistral.py\", line 33, in \n from ...cache_utils import Cache, DynamicCache, SlidingWindowCache, StaticCache\nImportError: cannot import name 'SlidingWindowCache' from 'transformers.cache_utils' (/workspace/repos/llm_leaderboard/transformers/src/transformers/cache_utils.py)\n\nThe above exception was the direct cause of the following exception:\n\nTraceback (most recent call last):\n File \"/workspace/repos/llm_leaderboard/llm_leaderboard_eval_bot/evaluate_llms.py\", line 199, in wait_download_and_run_request\n run_request(\n File \"/workspace/repos/llm_leaderboard/llm_leaderboard_eval_bot/evaluate_llms.py\", line 71, in run_request\n results = run_eval_on_model(\n ^^^^^^^^^^^^^^^^^^\n File \"/workspace/repos/llm_leaderboard/llm_leaderboard_eval_bot/run_eval.py\", line 60, in run_eval_on_model\n result = evaluate(\n ^^^^^^^^^\n File \"/workspace/repos/llm_leaderboard/llm_leaderboard_eval_bot/lm_eval_util.py\", line 145, in evaluate\n results = evaluator.simple_evaluate(\n ^^^^^^^^^^^^^^^^^^^^^^^^^^\n File \"/workspace/repos/llm_leaderboard/lm-evaluation-harness-pt/lm_eval/utils.py\", line 419, in _wrapper\n return fn(*args, **kwargs)\n ^^^^^^^^^^^^^^^^^^^\n File \"/workspace/repos/llm_leaderboard/lm-evaluation-harness-pt/lm_eval/evaluator.py\", line 100, in simple_evaluate\n lm = lm_eval.api.registry.get_model(model).create_from_arg_string(\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n File \"/workspace/repos/llm_leaderboard/lm-evaluation-harness-pt/lm_eval/api/model.py\", line 134, in create_from_arg_string\n return cls(**args, **args2)\n ^^^^^^^^^^^^^^^^^^^^\n File \"/workspace/repos/llm_leaderboard/lm-evaluation-harness-pt/lm_eval/models/huggingface.py\", line 297, in __init__\n self._create_model(\n File \"/workspace/repos/llm_leaderboard/lm-evaluation-harness-pt/lm_eval/models/huggingface.py\", line 608, in _create_model\n self._model = self.AUTO_MODEL_CLASS.from_pretrained(\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n File \"/workspace/repos/llm_leaderboard/transformers/src/transformers/models/auto/auto_factory.py\", line 562, in from_pretrained\n elif type(config) in cls._model_mapping.keys():\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n File \"/workspace/repos/llm_leaderboard/transformers/src/transformers/models/auto/auto_factory.py\", line 383, in _get_model_class\n def _get_model_class(config, model_mapping):\n ^^^^^^^^^^^^^^^^^^^^^^\n File \"/workspace/repos/llm_leaderboard/transformers/src/transformers/models/auto/auto_factory.py\", line 734, in __getitem__\n model_name = self._model_mapping[model_type]\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n File \"/workspace/repos/llm_leaderboard/transformers/src/transformers/models/auto/auto_factory.py\", line 748, in _load_attr_from_module\n self._modules[module_name] = importlib.import_module(f\".{module_name}\", \"transformers.models\")\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n File \"/workspace/repos/llm_leaderboard/transformers/src/transformers/models/auto/auto_factory.py\", line 692, in getattribute_from_module\n return tuple(getattribute_from_module(module, a) for a in attr)\n ^^^^^^^^^^^^^^^^^^^^^\n File \"/workspace/repos/llm_leaderboard/transformers/src/transformers/utils/import_utils.py\", line 1505, in __getattr__\n Module class that surfaces all objects but only performs associated imports when the objects are requested.\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n File \"/workspace/repos/llm_leaderboard/transformers/src/transformers/utils/import_utils.py\", line 1517, in _get_module\n # Needed for autocompletion in an IDE\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\nRuntimeError: Failed to import transformers.models.mistral.modeling_mistral because of the following error (look up to see its traceback):\ncannot import name 'SlidingWindowCache' from 'transformers.cache_utils' (/workspace/repos/llm_leaderboard/transformers/src/transformers/cache_utils.py)\n" }