|
{ |
|
"_name_or_path": "mosaicml/mosaic-llama-redpajama-final-candidate", |
|
"alibi": true, |
|
"alibi_bias_max": 8, |
|
"architectures": [ |
|
"MosaicGPT" |
|
], |
|
"attn_clip_qkv": null, |
|
"attn_impl": "torch", |
|
"attn_pdrop": 0, |
|
"attn_qk_ln": true, |
|
"attn_uses_sequence_id": false, |
|
"auto_map": { |
|
"AutoConfig": "configuration_mosaic_gpt.MosaicGPTConfig", |
|
"AutoModelForCausalLM": "mosaic_gpt.MosaicGPT" |
|
}, |
|
"d_model": 2048, |
|
"emb_init_std": null, |
|
"emb_init_uniform_lim": null, |
|
"emb_pdrop": 0, |
|
"embedding_fraction": 1.0, |
|
"fan_mode": "fan_in", |
|
"init_device": "cpu", |
|
"init_div_is_residual": true, |
|
"init_gain": 0, |
|
"init_nonlinearity": "relu", |
|
"init_std": 0.02, |
|
"logit_scale": null, |
|
"low_precision_layernorm": true, |
|
"max_seq_len": 2048, |
|
"mlp_ratio": 4, |
|
"model_type": "mosaic_gpt", |
|
"n_heads": 16, |
|
"n_layers": 24, |
|
"no_bias": true, |
|
"param_init_fn": "kaiming_normal_", |
|
"prefix_lm": false, |
|
"resid_pdrop": 0, |
|
"softmax_scale": null, |
|
"tokenizer_name": "EleutherAI/gpt-neox-20b", |
|
"torch_dtype": "float32", |
|
"transformers_version": "4.27.4", |
|
"use_cache": false, |
|
"verbose": 0, |
|
"vocab_size": 50432 |
|
} |
|
|