{ "_name_or_path": "./checkpoints/llavastablelm-2b-other", "architectures": [ "MoELLaVAStablelmForCausalLM" ], "auto_map": { "AutoConfig": "stabilityai/stablelm-2-1_6b--configuration_stablelm_epoch.StableLMEpochConfig", "AutoModelForCausalLM": "stabilityai/stablelm-2-1_6b--modeling_stablelm_epoch.StableLMEpochForCausalLM" }, "bos_token_id": 100257, "eos_token_id": 100257, "freeze_mm_mlp_adapter": false, "hidden_act": "silu", "hidden_size": 2048, "image_aspect_ratio": "pad", "image_projector_type": "mlp2x_gelu", "initializer_range": 0.02, "intermediate_size": 5632, "max_position_embeddings": 4096, "mm_hidden_size": 1024, "mm_image_tower": "openai/clip-vit-large-patch14-336", "mm_projector_lr": null, "mm_use_im_patch_token": false, "mm_use_im_start_end": false, "mm_video_tower": null, "mm_vision_select_feature": "patch", "mm_vision_select_layer": -2, "model_type": "moe_llava_stablelm", "moe": { "capacity_factor": 1.5, "ep_size": 1, "eval_capacity_factor": 2.0, "min_capacity": 0, "moe_enable": true, "moe_layers_idx": [ 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22 ], "moe_mode": "sparse", "num_experts": [ 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4 ], "router_aux_loss_coef": 0.01, "top_k_experts": 2, "train_modules": [ "gate_proj", "up_proj", "down_proj", "wg" ], "use_residual": false }, "norm_eps": 1e-05, "num_attention_heads": 32, "num_heads": 32, "num_hidden_layers": 24, "num_key_value_heads": 32, "pad_token_id": 100280, "rope_pct": 0.25, "rope_theta": 10000, "rotary_scaling_factor": 1.0, "tie_word_embeddings": false, "tokenizer_padding_side": "right", "torch_dtype": "bfloat16", "transformers_version": "4.36.2", "tune_mm_mlp_adapter": false, "use_cache": true, "use_mm_proj": true, "use_qkv_bias": true, "video_global_proj": false, "video_projector_type": "linear", "video_spatial_proj": false, "video_temproal_proj": false, "vocab_size": 100352 }