{ "_name_or_path": "cerebras/btlm-3b-8k-base", "activation_function": "swiglu", "architectures": [ "GPTJXForCausalLM" ], "attn_pdrop": 0.0, "auto_map": { "AutoConfig": "configuration_gptjx.GPTJXConfig", "AutoModel": "modeling_gptjx.GPTJXModel", "AutoModelForCausalLM": "modeling_gptjx.GPTJXForCausalLM" }, "bos_token_id": 50256, "embd_pdrop": 0.0, "mup_embeddings_scale": 14.6, "eos_token_id": 50256, "initializer_range": 0.073, "layer_norm_epsilon": 1e-05, "model_type": "gpt_jx", "n_embd": 2560, "n_head": 32, "n_inner": 6826, "n_layer": 32, "n_positions": 8192, "mup_output_alpha": 2.2200000000000003, "position_embedding_type": "alibi", "reorder_and_upcast_attn": false, "resid_pdrop": 0.0, "scale_attn_by_inverse_layer_idx": false, "scale_attn_weights": true, "mup_scale_qk_dot_by_d": true, "torch_dtype": "bfloat16", "transformers_version": "4.30.0", "use_cache": true, "vocab_size": 50257, "mup_width_scale": 0.1 }