blaze-koneski / config.json
1
{
2
  "activation_function": "gelu_new",
3
  "architectures": [
4
    "GPT2LMHeadModel"
5
  ],
6
  "attn_pdrop": 0.1,
7
  "bos_token_id": 0,
8
  "embd_pdrop": 0.1,
9
  "eos_token_id": 2,
10
  "gradient_checkpointing": false,
11
  "initializer_range": 0.02,
12
  "layer_norm_epsilon": 1e-05,
13
  "model_type": "gpt2",
14
  "n_ctx": 1024,
15
  "n_embd": 768,
16
  "n_head": 12,
17
  "n_inner": null,
18
  "n_layer": 12,
19
  "n_positions": 1024,
20
  "resid_pdrop": 0.1,
21
  "scale_attn_weights": true,
22
  "summary_activation": null,
23
  "summary_first_dropout": 0.1,
24
  "summary_proj_to_labels": true,
25
  "summary_type": "cls_index",
26
  "summary_use_proj": true,
27
  "torch_dtype": "float32",
28
  "transformers_version": "4.9.1",
29
  "use_cache": true,
30
  "vocab_size": 52003
31
}
32