strangervb's picture
Upload folder using huggingface_hub
430f9db verified
raw
history blame
3.22 kB
{
"version": "0.9.0.dev2024022000",
"pretrained_config": {
"architecture": "LlamaForCausalLM",
"dtype": "float16",
"logits_dtype": "float32",
"vocab_size": 32000,
"max_position_embeddings": 4096,
"hidden_size": 4096,
"num_hidden_layers": 32,
"num_attention_heads": 32,
"num_key_value_heads": 32,
"head_size": 128,
"hidden_act": "silu",
"intermediate_size": 11008,
"norm_epsilon": 1e-05,
"position_embedding_type": "rope_gpt_neox",
"use_prompt_tuning": false,
"use_parallel_embedding": true,
"embedding_sharding_dim": 0,
"share_embedding_table": false,
"mapping": {
"world_size": 2,
"tp_size": 2,
"pp_size": 1
},
"kv_dtype": "fp8",
"max_lora_rank": 64,
"producer": {
"name": "ammo",
"version": "0.7.3"
},
"mlp_bias": false,
"attn_bias": false,
"rotary_base": 10000.0,
"rotary_scaling": null,
"enable_pos_shift": false,
"dense_context_fmha": false,
"moe_num_experts": 0,
"moe_top_k": 0,
"moe_tp_mode": 2,
"moe_normalization_mode": 1,
"quantization": {
"quant_algo": "FP8",
"kv_cache_quant_algo": "FP8",
"group_size": 128,
"has_zero_point": false,
"pre_quant_scale": false,
"exclude_modules": null,
"sq_use_plugin": false
}
},
"build_config": {
"max_input_len": 100,
"max_output_len": 400,
"max_batch_size": 4,
"max_beam_width": 1,
"max_num_tokens": 400,
"max_prompt_embedding_table_size": 0,
"gather_context_logits": false,
"gather_generation_logits": false,
"strongly_typed": true,
"builder_opt": null,
"profiling_verbosity": "layer_names_only",
"enable_debug_output": false,
"max_draft_len": 0,
"plugin_config": {
"bert_attention_plugin": "float16",
"gpt_attention_plugin": "float16",
"gemm_plugin": "float16",
"smooth_quant_gemm_plugin": null,
"identity_plugin": null,
"layernorm_quantization_plugin": null,
"rmsnorm_quantization_plugin": null,
"nccl_plugin": "float16",
"lookup_plugin": null,
"lora_plugin": null,
"weight_only_groupwise_quant_matmul_plugin": null,
"weight_only_quant_matmul_plugin": null,
"quantize_per_token_plugin": false,
"quantize_tensor_plugin": false,
"moe_plugin": "float16",
"context_fmha": true,
"context_fmha_fp32_acc": false,
"paged_kv_cache": true,
"remove_input_padding": true,
"use_custom_all_reduce": true,
"multi_block_mode": false,
"enable_xqa": true,
"attention_qk_half_accumulation": false,
"tokens_per_block": 128,
"use_paged_context_fmha": false,
"use_context_fmha_for_generation": false
}
}
}