{ "_from_model_config": true, "_name_or_path": "openbmb/viscpmchat-bee-10b", "architectures": [ "VisCpmBeeForCausalLM" ], "auto_map": { "AutoConfig": "configuration_viscpmchatbee.VisCpmChatBeeConfig", "AutoModel": "modeling_cpmbee.VisCpmBeeForCausalLM", "AutoModelForCausalLM": "modeling_cpmbee.VisCpmBeeForCausalLM" }, "vocab_size": 86583, "hidden_size": 4096, "dim_ff" : 10240, "num_hidden_layers" : 48, "num_attention_heads": 32, "dim_head" : 128, "dropout_p" : 0.0, "position_bias_num_buckets" : 256, "position_bias_num_segment_buckets": 256, "position_bias_max_distance" : 2048, "vision_dim": 1024, "query_num": 64, "eps" : 1e-6, "half" : true, "model_type": "viscpmchatbee" }