File size: 1,515 Bytes
ad16136
 
 
6af54f2
8c83c40
a4568fc
6af54f2
 
 
ad16136
 
 
e5a0499
ad16136
 
 
 
 
ebe714a
ad16136
c61364a
ad16136
 
 
 
6af54f2
ad16136
 
 
e5a0499
 
ad16136
 
 
 
 
 
 
 
 
 
 
 
c61364a
ad16136
c61364a
ad16136
 
a4568fc
8c83c40
c61364a
ad16136
 
 
c6922af
8c83c40
a4568fc
8c83c40
 
 
 
e5a0499
 
ad16136
a4568fc
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
{
  "_num_labels": 3,
  "activation_dropout": 0.0,
  "activation_function": "gelu",
  "add_bias_logits": false,
  "add_final_layer_norm": false,
  "architectures": [
    "BartForConditionalGeneration"
  ],
  "attention_dropout": 0.0,
  "bos_token_id": 0,
  "classif_dropout": 0.0,
  "classifier_dropout": 0.0,
  "d_model": 1024,
  "decoder_attention_heads": 16,
  "decoder_ffn_dim": 4096,
  "decoder_layerdrop": 0.0,
  "decoder_layers": 12,
  "decoder_start_token_id": 2,
  "dropout": 0.1,
  "early_stopping": true,
  "encoder_attention_heads": 16,
  "encoder_ffn_dim": 4096,
  "encoder_layerdrop": 0.0,
  "encoder_layers": 12,
  "eos_token_id": 2,
  "eos_token_ids": [
    2
  ],
  "forced_eos_token_id": 2,
  "gradient_checkpointing": false,
  "id2label": {
    "0": "LABEL_0",
    "1": "LABEL_1",
    "2": "LABEL_2"
  },
  "init_std": 0.02,
  "is_encoder_decoder": true,
  "label2id": {
    "LABEL_0": 0,
    "LABEL_1": 1,
    "LABEL_2": 2
  },
  "max_length": 62,
  "max_position_embeddings": 1024,
  "min_length": 11,
  "model_type": "bart",
  "no_repeat_ngram_size": 3,
  "normalize_before": false,
  "normalize_embedding": true,
  "num_beams": 6,
  "num_hidden_layers": 12,
  "output_past": true,
  "pad_token_id": 1,
  "prefix": " ",
  "replacing_rate": 0,
  "scale_embedding": false,
  "static_position_embeddings": false,
  "student_decoder_layers": null,
  "student_encoder_layers": null,
  "task_specific_params": {},
  "transformers_version": "4.7.0.dev0",
  "use_cache": true,
  "vocab_size": 50264
}