{ "n_embd": 4096, "tie_word_embeddings": true, "n_positions": 2048, "vocab_size": 50257, "n_layer": 32, "resid_pdrop": 0.0, "layer_norm_epsilon": 1e-05, "n_head": 32, "scale_attn_weights": true, "attn_pdrop": 0.0, "n_inner": 16384, "activation_function": "gelu", "embd_pdrop": 0.0 }