nielsr HF staff commited on
Commit
80a7130
1 Parent(s): 5ffcad0
Files changed (2) hide show
  1. config.json +7 -7
  2. pytorch_model.bin +2 -2
config.json CHANGED
@@ -7,17 +7,17 @@
7
  "attention_dropout": 0.0,
8
  "bos_token_id": 0,
9
  "classifier_dropout": 0.0,
10
- "d_model": 768,
11
  "decoder_attention_heads": 16,
12
- "decoder_ffn_dim": 3072,
13
  "decoder_layerdrop": 0.0,
14
- "decoder_layers": 6,
15
  "decoder_start_token_id": 2,
16
  "dropout": 0.1,
17
  "encoder_attention_heads": 16,
18
- "encoder_ffn_dim": 3072,
19
  "encoder_layerdrop": 0.0,
20
- "encoder_layers": 6,
21
  "eos_token_id": 2,
22
  "forced_eos_token_id": 2,
23
  "id2label": {
@@ -34,11 +34,11 @@
34
  },
35
  "max_position_embeddings": 1024,
36
  "model_type": "bart",
37
- "num_hidden_layers": 6,
38
  "pad_token_id": 1,
39
  "scale_embedding": false,
40
  "torch_dtype": "float32",
41
  "transformers_version": "4.15.0",
42
  "use_cache": true,
43
- "vocab_size": 51201
44
  }
 
7
  "attention_dropout": 0.0,
8
  "bos_token_id": 0,
9
  "classifier_dropout": 0.0,
10
+ "d_model": 1024,
11
  "decoder_attention_heads": 16,
12
+ "decoder_ffn_dim": 4096,
13
  "decoder_layerdrop": 0.0,
14
+ "decoder_layers": 12,
15
  "decoder_start_token_id": 2,
16
  "dropout": 0.1,
17
  "encoder_attention_heads": 16,
18
+ "encoder_ffn_dim": 4096,
19
  "encoder_layerdrop": 0.0,
20
+ "encoder_layers": 12,
21
  "eos_token_id": 2,
22
  "forced_eos_token_id": 2,
23
  "id2label": {
 
34
  },
35
  "max_position_embeddings": 1024,
36
  "model_type": "bart",
37
+ "num_hidden_layers": 12,
38
  "pad_token_id": 1,
39
  "scale_embedding": false,
40
  "torch_dtype": "float32",
41
  "transformers_version": "4.15.0",
42
  "use_cache": true,
43
+ "vocab_size": 50264
44
  }
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:dee70cd453f183c1354b97650698814b89c85f00c3f87604d55ab2ea4ebe20e0
3
- size 560854649
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6a1e0914d8db6278122435c7c529c7b16931d2d00f427e3b691fc264da903f89
3
+ size 1625546049