system commited on
Commit
d8b941e
1 Parent(s): 3abd1e2

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +1 -46
config.json CHANGED
@@ -1,46 +1 @@
1
- {
2
- "_num_labels": 3,
3
- "activation_dropout": 0.0,
4
- "activation_function": "swish",
5
- "add_bias_logits": false,
6
- "add_final_layer_norm": false,
7
- "architectures": [
8
- "MarianMTModel"
9
- ],
10
- "attention_dropout": 0.0,
11
- "bos_token_id": 0,
12
- "classif_dropout": 0.0,
13
- "d_model": 512,
14
- "decoder_attention_heads": 8,
15
- "decoder_ffn_dim": 2048,
16
- "decoder_layerdrop": 0.0,
17
- "decoder_layers": 6,
18
- "dropout": 0.1,
19
- "encoder_attention_heads": 8,
20
- "encoder_ffn_dim": 2048,
21
- "encoder_layerdrop": 0.0,
22
- "encoder_layers": 6,
23
- "eos_token_id": 0,
24
- "id2label": {
25
- "0": "LABEL_0",
26
- "1": "LABEL_1",
27
- "2": "LABEL_2"
28
- },
29
- "init_std": 0.02,
30
- "is_encoder_decoder": true,
31
- "label2id": {
32
- "LABEL_0": 0,
33
- "LABEL_1": 1,
34
- "LABEL_2": 2
35
- },
36
- "max_position_embeddings": 512,
37
- "model_type": "bart",
38
- "normalize_before": false,
39
- "normalize_embedding": false,
40
- "num_beams": 6,
41
- "num_hidden_layers": 6,
42
- "pad_token_id": 58100,
43
- "scale_embedding": true,
44
- "static_position_embeddings": true,
45
- "vocab_size": 58101
46
- }
1
+ {"_num_labels": 3, "activation_dropout": 0.0, "activation_function": "swish", "add_bias_logits": false, "add_final_layer_norm": false, "architectures": ["MarianMTModel"], "attention_dropout": 0.0, "bos_token_id": 0, "classif_dropout": 0.0, "d_model": 512, "decoder_attention_heads": 8, "decoder_ffn_dim": 2048, "decoder_layerdrop": 0.0, "decoder_layers": 6, "dropout": 0.1, "encoder_attention_heads": 8, "encoder_ffn_dim": 2048, "encoder_layerdrop": 0.0, "encoder_layers": 6, "eos_token_id": 0, "id2label": {"0": "LABEL_0", "1": "LABEL_1", "2": "LABEL_2"}, "init_std": 0.02, "is_encoder_decoder": true, "label2id": {"LABEL_0": 0, "LABEL_1": 1, "LABEL_2": 2}, "max_position_embeddings": 512, "model_type": "marian", "normalize_before": false, "normalize_embedding": false, "num_beams": 6, "num_hidden_layers": 6, "pad_token_id": 58100, "scale_embedding": true, "static_position_embeddings": true, "vocab_size": 58101}