{ "model_type": "ChatGLM2Model", "model_config": { "hidden_size": 4096, "inner_hidden_size": 13696, "head_hidden_size": 128, "num_multi_query_groups": 2, "num_attention_heads": 32, "num_layers": 28, "vocab_size": 65024, "dropout_rate": 0.0, "layernorm_epsilon": 1e-05, "max_sequence_length": 8192 }, "quant_type": "int8", "weight_files": [ "model_weights_0.safetensors", "model_weights_1.safetensors", "model_weights_2.safetensors" ], "tokenizer_file": "sentencepiece.model", "torch_dtype": "float16" }