my-model-repo3 / config.json
Qazim Bhat
Add initial model files
551b2c3
raw
history blame contribute delete
No virus
1.59 kB
{
"_name_or_path": "LLM360/CrystalChat",
"activation_function": "swiglu",
"architectures": [
"LlavaCrystalForCausalLM"
],
"attn_pdrop": 0.0,
"auto_map": {
"AutoConfig": "LLM360/CrystalChat--configuration_crystalcoder.CrystalCoderConfig",
"AutoModel": "LLM360/CrystalChat--modeling_crystalcoder.CrystalCoderModel",
"AutoModelForCausalLM": "LLM360/CrystalChat--modeling_crystalcoder.CrystalCoderLMHeadModel"
},
"bos_token_id": 1,
"embd_pdrop": 0.0,
"eos_token_id": 2,
"freeze_mm_mlp_adapter": false,
"image_aspect_ratio": "pad",
"initializer_range": 0.02,
"layer_norm_epsilon": 1e-05,
"mm_hidden_size": 1024,
"mm_projector_lr": null,
"mm_projector_type": "mlp2x_gelu",
"mm_use_im_patch_token": false,
"mm_use_im_start_end": false,
"mm_vision_select_feature": "patch",
"mm_vision_select_layer": -2,
"mm_vision_tower": "openai/clip-vit-large-patch14-336",
"model_type": "llava_crystal",
"mup_embeddings_scale": 14.6,
"mup_output_alpha": 2.22,
"mup_scale_qk_dot_by_d": true,
"mup_width_scale": 0.0625,
"n_embd": 4096,
"n_head": 32,
"n_inner": 10922,
"n_layer": 32,
"n_positions": 2048,
"position_embedding_type": "rotary",
"reorder_and_upcast_attn": false,
"resid_pdrop": 0.0,
"rotary_dim": 32,
"scale_attn_by_inverse_layer_idx": false,
"scale_attn_weights": true,
"tokenizer_model_max_length": 2048,
"tokenizer_padding_side": "right",
"torch_dtype": "bfloat16",
"transformers_version": "4.31.0",
"tune_mm_mlp_adapter": false,
"use_cache": true,
"use_mm_proj": true,
"vocab_size": 32032
}