Large90 / config.json
Kalslice's picture
Upload 4 files
c56ccc8 verified
{
"_name_or_path": "gpt2-large",
"activation_function": "gelu_new",
"architectures": [
"GPT2LMHeadModel"
],
"attn_pdrop": 0.1,
"bos_token_id": 50256,
"embd_pdrop": 0.1,
"eos_token_id": 50256,
"initializer_range": 0.02,
"layer_norm_epsilon": 1e-05,
"model_type": "gpt2",
"n_ctx": 1024,
"n_embd": 1280,
"n_head": 20,
"n_inner": null,
"n_layer": 36,
"n_positions": 1024,
"output_attentions": true,
"pruned_heads": {
"8": [
1,
14
],
"10": [
1,
15
],
"11": [
19,
12,
13
],
"12": [
18,
3,
13
],
"13": [
4,
15,
6,
7
],
"15": [
1,
4,
17
],
"16": [
0,
2,
6,
9,
15
],
"17": [
16,
17,
4,
6,
11,
13
],
"18": [
9,
19,
4,
7
],
"19": [
17,
2,
18,
4,
8,
11
],
"20": [
0,
1,
4,
7,
10,
11,
15,
16,
17,
18,
19
],
"21": [
17,
19,
4,
5,
8,
12,
14,
15
],
"22": [
0,
18,
3,
4,
19,
13,
14
],
"23": [
0,
3,
5,
7,
8,
12,
13,
15
],
"24": [
1,
5,
6,
8,
11,
13,
14,
15,
16,
18,
19
],
"25": [
0,
3,
5,
6,
7,
10,
13,
14,
15,
16,
18,
19
],
"26": [
0,
1,
3,
6,
8,
10,
13,
14,
15,
16,
17
],
"27": [
0,
1,
2,
6,
7,
13,
14,
16,
17
],
"28": [
0,
1,
2,
3,
4,
6,
7,
8,
9,
10,
12,
14,
16,
17,
19
],
"29": [
0,
1,
2,
3,
5,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
19
],
"30": [
0,
1,
2,
3,
4,
7,
8,
9,
10,
11,
12,
14,
15,
16,
17,
18,
19
],
"31": [
0,
1,
2,
4,
5,
6,
7,
8,
9,
10,
11,
12,
14,
15,
16,
17,
18,
19
],
"32": [
0,
6,
7,
8,
9,
10,
12,
13,
14,
15,
16,
17,
19
],
"33": [
0,
1,
2,
3,
4,
6,
9,
10,
12,
13,
14,
15,
17,
18,
19
],
"34": [
18,
7
]
},
"reorder_and_upcast_attn": false,
"resid_pdrop": 0.1,
"scale_attn_by_inverse_layer_idx": false,
"scale_attn_weights": true,
"summary_activation": null,
"summary_first_dropout": 0.1,
"summary_proj_to_labels": true,
"summary_type": "cls_index",
"summary_use_proj": true,
"task_specific_params": {
"text-generation": {
"do_sample": true,
"max_length": 50
}
},
"torch_dtype": "float32",
"transformers_version": "4.36.2",
"use_cache": true,
"vocab_size": 50257
}