| { | |
| "architectures": [ | |
| "Glm4MoeLiteForCausalLM" | |
| ], | |
| "attention_bias": false, | |
| "attention_dropout": 0.0, | |
| "bos_token_id": 0, | |
| "dtype": "bfloat16", | |
| "eos_token_id": [ | |
| 154820, | |
| 154827, | |
| 154829 | |
| ], | |
| "first_k_dense_replace": 1, | |
| "head_dim": 64, | |
| "hidden_act": "silu", | |
| "hidden_size": 2048, | |
| "initializer_range": 0.02, | |
| "intermediate_size": 10240, | |
| "kv_lora_rank": 512, | |
| "max_position_embeddings": 202752, | |
| "mlp_layer_types": [ | |
| "dense", | |
| "sparse", | |
| "sparse", | |
| "sparse", | |
| "sparse", | |
| "sparse", | |
| "sparse", | |
| "sparse", | |
| "sparse", | |
| "sparse", | |
| "sparse", | |
| "sparse", | |
| "sparse", | |
| "sparse", | |
| "sparse", | |
| "sparse", | |
| "sparse", | |
| "sparse", | |
| "sparse", | |
| "sparse", | |
| "sparse", | |
| "sparse", | |
| "sparse", | |
| "sparse", | |
| "sparse", | |
| "sparse", | |
| "sparse", | |
| "sparse", | |
| "sparse", | |
| "sparse", | |
| "sparse", | |
| "sparse", | |
| "sparse", | |
| "sparse", | |
| "sparse", | |
| "sparse", | |
| "sparse", | |
| "sparse", | |
| "sparse", | |
| "sparse", | |
| "sparse", | |
| "sparse", | |
| "sparse", | |
| "sparse", | |
| "sparse", | |
| "sparse", | |
| "sparse" | |
| ], | |
| "model_type": "glm4_moe_lite", | |
| "moe_intermediate_size": 1536, | |
| "n_group": 1, | |
| "n_routed_experts": 64, | |
| "n_shared_experts": 1, | |
| "norm_topk_prob": true, | |
| "num_attention_heads": 20, | |
| "num_experts_per_tok": 4, | |
| "num_hidden_layers": 47, | |
| "num_key_value_heads": 20, | |
| "num_nextn_predict_layers": 1, | |
| "pad_token_id": 154820, | |
| "partial_rotary_factor": 1.0, | |
| "pretraining_tp": 1, | |
| "q_lora_rank": 768, | |
| "qk_head_dim": 256, | |
| "qk_nope_head_dim": 192, | |
| "qk_rope_head_dim": 64, | |
| "rms_norm_eps": 1e-05, | |
| "rope_interleave": true, | |
| "rope_parameters": { | |
| "partial_rotary_factor": 1.0, | |
| "rope_theta": 1000000, | |
| "rope_type": "default" | |
| }, | |
| "routed_scaling_factor": 1.8, | |
| "tie_word_embeddings": false, | |
| "topk_group": 1, | |
| "topk_method": "noaux_tc", | |
| "transformers_version": "5.0.1.dev0", | |
| "use_cache": true, | |
| "v_head_dim": 256, | |
| "vocab_size": 154880, | |
| "quantization_config": { | |
| "config_groups": { | |
| "group_0": { | |
| "input_activations": { | |
| "dynamic": false, | |
| "num_bits": 8, | |
| "type": "float" | |
| }, | |
| "weights": { | |
| "dynamic": false, | |
| "num_bits": 8, | |
| "type": "float" | |
| }, | |
| "targets": [ | |
| "Linear" | |
| ] | |
| } | |
| }, | |
| "ignore": [ | |
| "lm_head", | |
| "model.layers.0.mlp.gate_proj", | |
| "model.layers.0.self_attn*", | |
| "model.layers.1.mlp.shared_experts.gate_proj", | |
| "model.layers.1.self_attn*", | |
| "model.layers.10.mlp.shared_experts.gate_proj", | |
| "model.layers.10.self_attn*", | |
| "model.layers.11.mlp.shared_experts.gate_proj", | |
| "model.layers.11.self_attn*", | |
| "model.layers.12.mlp.shared_experts.gate_proj", | |
| "model.layers.12.self_attn*", | |
| "model.layers.13.mlp.shared_experts.gate_proj", | |
| "model.layers.13.self_attn*", | |
| "model.layers.14.mlp.shared_experts.gate_proj", | |
| "model.layers.14.self_attn*", | |
| "model.layers.15.mlp.shared_experts.gate_proj", | |
| "model.layers.15.self_attn*", | |
| "model.layers.16.mlp.shared_experts.gate_proj", | |
| "model.layers.16.self_attn*", | |
| "model.layers.17.mlp.shared_experts.gate_proj", | |
| "model.layers.17.self_attn*", | |
| "model.layers.18.mlp.shared_experts.gate_proj", | |
| "model.layers.18.self_attn*", | |
| "model.layers.19.mlp.shared_experts.gate_proj", | |
| "model.layers.19.self_attn*", | |
| "model.layers.2.mlp.shared_experts.gate_proj", | |
| "model.layers.2.self_attn*", | |
| "model.layers.20.mlp.shared_experts.gate_proj", | |
| "model.layers.20.self_attn*", | |
| "model.layers.21.mlp.shared_experts.gate_proj", | |
| "model.layers.21.self_attn*", | |
| "model.layers.22.mlp.shared_experts.gate_proj", | |
| "model.layers.22.self_attn*", | |
| "model.layers.23.mlp.shared_experts.gate_proj", | |
| "model.layers.23.self_attn*", | |
| "model.layers.24.mlp.shared_experts.gate_proj", | |
| "model.layers.24.self_attn*", | |
| "model.layers.25.mlp.shared_experts.gate_proj", | |
| "model.layers.25.self_attn*", | |
| "model.layers.26.mlp.shared_experts.gate_proj", | |
| "model.layers.26.self_attn*", | |
| "model.layers.27.mlp.shared_experts.gate_proj", | |
| "model.layers.27.self_attn*", | |
| "model.layers.28.mlp.shared_experts.gate_proj", | |
| "model.layers.28.self_attn*", | |
| "model.layers.29.mlp.shared_experts.gate_proj", | |
| "model.layers.29.self_attn*", | |
| "model.layers.3.mlp.shared_experts.gate_proj", | |
| "model.layers.3.self_attn*", | |
| "model.layers.30.mlp.shared_experts.gate_proj", | |
| "model.layers.30.self_attn*", | |
| "model.layers.31.mlp.shared_experts.gate_proj", | |
| "model.layers.31.self_attn*", | |
| "model.layers.32.mlp.shared_experts.gate_proj", | |
| "model.layers.32.self_attn*", | |
| "model.layers.33.mlp.shared_experts.gate_proj", | |
| "model.layers.33.self_attn*", | |
| "model.layers.34.mlp.shared_experts.gate_proj", | |
| "model.layers.34.self_attn*", | |
| "model.layers.35.mlp.shared_experts.gate_proj", | |
| "model.layers.35.self_attn*", | |
| "model.layers.36.mlp.shared_experts.gate_proj", | |
| "model.layers.36.self_attn*", | |
| "model.layers.37.mlp.shared_experts.gate_proj", | |
| "model.layers.37.self_attn*", | |
| "model.layers.38.mlp.shared_experts.gate_proj", | |
| "model.layers.38.self_attn*", | |
| "model.layers.39.mlp.shared_experts.gate_proj", | |
| "model.layers.39.self_attn*", | |
| "model.layers.4.mlp.shared_experts.gate_proj", | |
| "model.layers.4.self_attn*", | |
| "model.layers.40.mlp.shared_experts.gate_proj", | |
| "model.layers.40.self_attn*", | |
| "model.layers.41.mlp.shared_experts.gate_proj", | |
| "model.layers.41.self_attn*", | |
| "model.layers.42.mlp.shared_experts.gate_proj", | |
| "model.layers.42.self_attn*", | |
| "model.layers.43.mlp.shared_experts.gate_proj", | |
| "model.layers.43.self_attn*", | |
| "model.layers.44.mlp.shared_experts.gate_proj", | |
| "model.layers.44.self_attn*", | |
| "model.layers.45.mlp.shared_experts.gate_proj", | |
| "model.layers.45.self_attn*", | |
| "model.layers.46.mlp.shared_experts.gate_proj", | |
| "model.layers.46.self_attn*", | |
| "model.layers.5.mlp.shared_experts.gate_proj", | |
| "model.layers.5.self_attn*", | |
| "model.layers.6.mlp.shared_experts.gate_proj", | |
| "model.layers.6.self_attn*", | |
| "model.layers.7.mlp.shared_experts.gate_proj", | |
| "model.layers.7.self_attn*", | |
| "model.layers.8.mlp.shared_experts.gate_proj", | |
| "model.layers.8.self_attn*", | |
| "model.layers.9.mlp.shared_experts.gate_proj", | |
| "model.layers.9.self_attn*" | |
| ], | |
| "quant_algo": "FP8", | |
| "producer": { | |
| "name": "modelopt", | |
| "version": "0.37.0" | |
| }, | |
| "quant_method": "modelopt" | |
| } | |
| } |