flozi00 commited on
Commit
173cda2
1 Parent(s): c2c828a

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +7 -6
config.json CHANGED
@@ -9,22 +9,22 @@
9
  "attention_dropout": 0.0,
10
  "begin_suppress_tokens": [
11
  220,
12
- 50256
13
  ],
14
- "bos_token_id": 50256,
15
  "classifier_proj_size": 256,
16
  "d_model": 1280,
17
  "decoder_attention_heads": 20,
18
  "decoder_ffn_dim": 5120,
19
  "decoder_layerdrop": 0.0,
20
  "decoder_layers": 32,
21
- "decoder_start_token_id": 50257,
22
  "dropout": 0.0,
23
  "encoder_attention_heads": 20,
24
  "encoder_ffn_dim": 5120,
25
  "encoder_layerdrop": 0.0,
26
  "encoder_layers": 32,
27
- "eos_token_id": 50256,
28
  "init_std": 0.02,
29
  "is_encoder_decoder": true,
30
  "mask_feature_length": 10,
@@ -33,6 +33,7 @@
33
  "mask_time_length": 10,
34
  "mask_time_min_masks": 2,
35
  "mask_time_prob": 0.05,
 
36
  "max_source_positions": 1500,
37
  "max_target_positions": 448,
38
  "median_filter_width": 7,
@@ -42,8 +43,8 @@
42
  "pad_token_id": 50256,
43
  "scale_embedding": false,
44
  "torch_dtype": "float16",
45
- "transformers_version": "4.35.0",
46
  "use_cache": true,
47
  "use_weighted_layer_sum": false,
48
  "vocab_size": 51866
49
- }
 
9
  "attention_dropout": 0.0,
10
  "begin_suppress_tokens": [
11
  220,
12
+ 50257
13
  ],
14
+ "bos_token_id": 50257,
15
  "classifier_proj_size": 256,
16
  "d_model": 1280,
17
  "decoder_attention_heads": 20,
18
  "decoder_ffn_dim": 5120,
19
  "decoder_layerdrop": 0.0,
20
  "decoder_layers": 32,
21
+ "decoder_start_token_id": 50258,
22
  "dropout": 0.0,
23
  "encoder_attention_heads": 20,
24
  "encoder_ffn_dim": 5120,
25
  "encoder_layerdrop": 0.0,
26
  "encoder_layers": 32,
27
+ "eos_token_id": 50257,
28
  "init_std": 0.02,
29
  "is_encoder_decoder": true,
30
  "mask_feature_length": 10,
 
33
  "mask_time_length": 10,
34
  "mask_time_min_masks": 2,
35
  "mask_time_prob": 0.05,
36
+ "max_length": 448,
37
  "max_source_positions": 1500,
38
  "max_target_positions": 448,
39
  "median_filter_width": 7,
 
43
  "pad_token_id": 50256,
44
  "scale_embedding": false,
45
  "torch_dtype": "float16",
46
+ "transformers_version": "4.36.0",
47
  "use_cache": true,
48
  "use_weighted_layer_sum": false,
49
  "vocab_size": 51866
50
+ }