prasadsachin's picture
Upload folder using huggingface_hub
2f57005 verified
raw
history blame contribute delete
4.63 kB
{
"module": "keras_hub.src.models.qwen3_moe.qwen3_moe_causal_lm",
"class_name": "Qwen3MoeCausalLM",
"config": {
"backbone": {
"module": "keras_hub.src.models.qwen3_moe.qwen3_moe_backbone",
"class_name": "Qwen3MoeBackbone",
"config": {
"name": "qwen3_moe_backbone",
"trainable": true,
"dtype": {
"module": "keras",
"class_name": "DTypePolicy",
"config": {
"name": "float32"
},
"registered_name": null
},
"vocabulary_size": 151936,
"num_layers": 48,
"num_query_heads": 32,
"head_dim": 128,
"hidden_dim": 2048,
"intermediate_dim": 6144,
"moe_intermediate_dim": 768,
"rope_max_wavelength": 10000000,
"num_key_value_heads": 4,
"rope_scaling_factor": 1.0,
"layer_norm_epsilon": 1e-06,
"dropout": 0,
"tie_word_embeddings": false,
"sliding_window_size": null,
"num_experts": 128,
"top_k": 8,
"norm_top_k_prob": true,
"decoder_sparse_step": 1,
"mlp_only_layers": [],
"router_aux_loss_coefficient": 0.001
},
"registered_name": "keras_hub>Qwen3MoeBackbone"
},
"preprocessor": {
"module": "keras_hub.src.models.qwen3_moe.qwen3_moe_causal_lm_preprocessor",
"class_name": "Qwen3MoeCausalLMPreprocessor",
"config": {
"name": "qwen3_moe_causal_lm_preprocessor_2",
"trainable": true,
"dtype": {
"module": "keras",
"class_name": "DTypePolicy",
"config": {
"name": "float32"
},
"registered_name": null
},
"tokenizer": {
"module": "keras_hub.src.models.qwen3_moe.qwen3_moe_tokenizer",
"class_name": "Qwen3MoeTokenizer",
"config": {
"name": "qwen3_moe_tokenizer",
"trainable": true,
"dtype": {
"module": "keras",
"class_name": "DTypePolicy",
"config": {
"name": "int32"
},
"registered_name": null
},
"config_file": "tokenizer.json",
"sequence_length": null,
"add_prefix_space": false,
"unsplittable_tokens": [
"<tool_response>",
"</think>",
"<|box_end|>",
"</tool_response>",
"<|fim_suffix|>",
"<|quad_start|>",
"<tool_call>",
"<think>",
"<|vision_pad|>",
"<|file_sep|>",
"<|box_start|>",
"<|im_start|>",
"<|endoftext|>",
"</tool_call>",
"<|object_ref_start|>",
"<|repo_name|>",
"<|vision_start|>",
"<|vision_end|>",
"<|object_ref_end|>",
"<|quad_end|>",
"<|fim_middle|>",
"<|video_pad|>",
"<|im_end|>",
"<|image_pad|>",
"<|fim_prefix|>",
"<|fim_pad|>"
]
},
"registered_name": "keras_hub>Qwen3MoeTokenizer"
},
"config_file": "preprocessor.json",
"sequence_length": 1024,
"add_start_token": true,
"add_end_token": true
},
"registered_name": "keras_hub>Qwen3MoeCausalLMPreprocessor"
},
"name": "qwen3_moe_causal_lm"
},
"registered_name": "keras_hub>Qwen3MoeCausalLM"
}