{ "_from_model_config": true, "_name_or_path": "cpm-ant-10b", "architectures": [ "CPMAntForCausalLM" ], "bos_token_id": 6, "dim_ff": 10240, "dim_head": 128, "hidden_size": 4096, "dropout_p": 0.0, "eos_token_id": 7, "eps": 1e-06, "is_decoder": true, "mask_modules": null, "max_new_tokens": 50, "model_type": "cpmant", "num_beams": 3, "num_attention_heads": 32, "num_hidden_layers": 48, "pad_token_id": 0, "position_bias_max_distance": 2048, "position_bias_num_buckets": 512, "prompt_length": 32, "prompt_types": 32, "repetition_penalty": 1.2, "segment_types": 32, "tokenizer_class": "CPMAntTokenizer", "torch_dtype": "float32", "transformers_version": "4.26.0.dev0", "use_cache": true, "vocab_size": 30720 }