File size: 3,483 Bytes
d2cae87
 
 
 
 
 
 
 
 
a1d8129
d2cae87
 
 
 
a1d8129
 
 
d2cae87
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
{
    "model": "THUDM/glm-10b",
    "base_model": "",
    "revision": "main",
    "private": false,
    "precision": "float16",
    "params": 10.0,
    "architectures": "GLMModel",
    "weight_type": "Original",
    "status": "FAILED",
    "submitted_time": "2024-02-05T23:09:31Z",
    "model_type": "🟢 : pretrained",
    "source": "script",
    "job_id": -1,
    "job_start_time": null,
    "error_msg": "Failed to download and/or use the AutoModel class, trust_remote_code=True - Original Exception: Unrecognized configuration class <class 'transformers_modules.THUDM.glm-10b.696788d4f82ac96b90823555f547d1e754839ff4.configuration_glm.GLMConfig'> for this kind of AutoModel: AutoModelForCausalLM.\nModel type should be one of BartConfig, BertConfig, BertGenerationConfig, BigBirdConfig, BigBirdPegasusConfig, BioGptConfig, BlenderbotConfig, BlenderbotSmallConfig, BloomConfig, CamembertConfig, LlamaConfig, CodeGenConfig, CpmAntConfig, CTRLConfig, Data2VecTextConfig, ElectraConfig, ErnieConfig, FalconConfig, FuyuConfig, GitConfig, GPT2Config, GPT2Config, GPTBigCodeConfig, GPTNeoConfig, GPTNeoXConfig, GPTNeoXJapaneseConfig, GPTJConfig, LlamaConfig, MarianConfig, MBartConfig, MegaConfig, MegatronBertConfig, MistralConfig, MixtralConfig, MptConfig, MusicgenConfig, MvpConfig, OpenLlamaConfig, OpenAIGPTConfig, OPTConfig, PegasusConfig, PersimmonConfig, PhiConfig, PLBartConfig, ProphetNetConfig, QDQBertConfig, ReformerConfig, RemBertConfig, RobertaConfig, RobertaPreLayerNormConfig, RoCBertConfig, RoFormerConfig, RwkvConfig, Speech2Text2Config, TransfoXLConfig, TrOCRConfig, WhisperConfig, XGLMConfig, XLMConfig, XLMProphetNetConfig, XLMRobertaConfig, XLMRobertaXLConfig, XLNetConfig, XmodConfig, BaichuanConfig, BaichuanConfig, OrionConfig, DeepseekConfig.",
    "traceback": "Traceback (most recent call last):\n  File \"/workspace/repos/llm_leaderboard/llm_leaderboard_eval_bot/evaluate_llms.py\", line 184, in wait_download_and_run_request\n    raise Exception(f\"Failed to download and/or use the AutoModel class, trust_remote_code={TRUST_REMOTE_CODE} - Original Exception: {exception_msg}\")\nException: Failed to download and/or use the AutoModel class, trust_remote_code=True - Original Exception: Unrecognized configuration class <class 'transformers_modules.THUDM.glm-10b.696788d4f82ac96b90823555f547d1e754839ff4.configuration_glm.GLMConfig'> for this kind of AutoModel: AutoModelForCausalLM.\nModel type should be one of BartConfig, BertConfig, BertGenerationConfig, BigBirdConfig, BigBirdPegasusConfig, BioGptConfig, BlenderbotConfig, BlenderbotSmallConfig, BloomConfig, CamembertConfig, LlamaConfig, CodeGenConfig, CpmAntConfig, CTRLConfig, Data2VecTextConfig, ElectraConfig, ErnieConfig, FalconConfig, FuyuConfig, GitConfig, GPT2Config, GPT2Config, GPTBigCodeConfig, GPTNeoConfig, GPTNeoXConfig, GPTNeoXJapaneseConfig, GPTJConfig, LlamaConfig, MarianConfig, MBartConfig, MegaConfig, MegatronBertConfig, MistralConfig, MixtralConfig, MptConfig, MusicgenConfig, MvpConfig, OpenLlamaConfig, OpenAIGPTConfig, OPTConfig, PegasusConfig, PersimmonConfig, PhiConfig, PLBartConfig, ProphetNetConfig, QDQBertConfig, ReformerConfig, RemBertConfig, RobertaConfig, RobertaPreLayerNormConfig, RoCBertConfig, RoFormerConfig, RwkvConfig, Speech2Text2Config, TransfoXLConfig, TrOCRConfig, WhisperConfig, XGLMConfig, XLMConfig, XLMProphetNetConfig, XLMRobertaConfig, XLMRobertaXLConfig, XLNetConfig, XmodConfig, BaichuanConfig, BaichuanConfig, OrionConfig, DeepseekConfig.\n"
}