Satwik11's picture
Upload folder using huggingface_hub
c375d49 verified
{
"_name_or_path": "Qwen/QwQ-32B-Preview",
"architectures": [
"Qwen2ForCausalLM"
],
"attention_dropout": 0.0,
"bos_token_id": 151643,
"eos_token_id": 151645,
"hidden_act": "silu",
"hidden_size": 5120,
"initializer_range": 0.02,
"intermediate_size": 27648,
"max_position_embeddings": 32768,
"max_window_layers": 64,
"model_type": "qwen2",
"num_attention_heads": 40,
"num_hidden_layers": 64,
"num_key_value_heads": 8,
"quantization_config": {
"amp": true,
"autoround_version": "0.4.1",
"batch_size": 1,
"bits": 4,
"damp_percent": 0.01,
"data_type": "int",
"desc_act": false,
"enable_minmax_tuning": true,
"enable_norm_bias_tuning": false,
"enable_quanted_input": true,
"gradient_accumulate_steps": 1,
"group_size": 128,
"iters": 512,
"low_gpu_mem_usage": true,
"lr": 0.001953125,
"minmax_lr": 0.001953125,
"nsamples": 128,
"quant_method": "gptq",
"scale_dtype": "torch.float16",
"seqlen": 2048,
"sym": true,
"to_quant_block_names": [
[
"model.layers.0",
"model.layers.1",
"model.layers.2",
"model.layers.3",
"model.layers.4",
"model.layers.5",
"model.layers.6",
"model.layers.7",
"model.layers.8",
"model.layers.9",
"model.layers.10",
"model.layers.11",
"model.layers.12",
"model.layers.13",
"model.layers.14",
"model.layers.15",
"model.layers.16",
"model.layers.17",
"model.layers.18",
"model.layers.19",
"model.layers.20",
"model.layers.21",
"model.layers.22",
"model.layers.23",
"model.layers.24",
"model.layers.25",
"model.layers.26",
"model.layers.27",
"model.layers.28",
"model.layers.29",
"model.layers.30",
"model.layers.31",
"model.layers.32",
"model.layers.33",
"model.layers.34",
"model.layers.35",
"model.layers.36",
"model.layers.37",
"model.layers.38",
"model.layers.39",
"model.layers.40",
"model.layers.41",
"model.layers.42",
"model.layers.43",
"model.layers.44",
"model.layers.45",
"model.layers.46",
"model.layers.47",
"model.layers.48",
"model.layers.49",
"model.layers.50",
"model.layers.51",
"model.layers.52",
"model.layers.53",
"model.layers.54",
"model.layers.55",
"model.layers.56",
"model.layers.57",
"model.layers.58",
"model.layers.59",
"model.layers.60",
"model.layers.61",
"model.layers.62",
"model.layers.63"
]
],
"true_sequential": false
},
"rms_norm_eps": 1e-05,
"rope_scaling": null,
"rope_theta": 1000000.0,
"sliding_window": null,
"tie_word_embeddings": false,
"torch_dtype": "float16",
"transformers_version": "4.46.3",
"use_cache": true,
"use_sliding_window": false,
"vocab_size": 152064
}