pegasus-big_patent / config.json
system
Update config.json a127b81
1
{
2
  "activation_dropout": 0.1,
3
  "activation_function": "relu",
4
  "add_bias_logits": false,
5
  "add_final_layer_norm": true,
6
  "architectures": [
7
    "PegasusForConditionalGeneration"
8
  ],
9
  "attention_dropout": 0.1,
10
  "bos_token_id": 0,
11
  "classif_dropout": 0.0,
12
  "d_model": 1024,
13
  "decoder_attention_heads": 16,
14
  "decoder_ffn_dim": 4096,
15
  "decoder_layerdrop": 0.0,
16
  "decoder_layers": 16,
17
  "dropout": 0.1,
18
  "encoder_attention_heads": 16,
19
  "encoder_ffn_dim": 4096,
20
  "encoder_layerdrop": 0.0,
21
  "encoder_layers": 16,
22
  "eos_token_id": 1,
23
  "extra_pos_embeddings": 1,
24
  "id2label": {
25
    "0": "LABEL_0",
26
    "1": "LABEL_1",
27
    "2": "LABEL_2"
28
  },
29
  "init_std": 0.02,
30
  "is_encoder_decoder": true,
31
  "label2id": {
32
    "LABEL_0": 0,
33
    "LABEL_1": 1,
34
    "LABEL_2": 2
35
  },
36
  "length_penalty": 0.7,
37
  "max_length": 256,
38
  "max_position_embeddings": 1024,
39
  "min_length": 32,
40
  "model_type": "pegasus",
41
  "normalize_before": true,
42
  "normalize_embedding": false,
43
  "num_beams": 8,
44
  "num_hidden_layers": 16,
45
  "pad_token_id": 0,
46
  "scale_embedding": true,
47
  "static_position_embeddings": true,
48
  "vocab_size": 96103
49
}