shahules786 commited on
Commit
3a6b70b
1 Parent(s): b4f2ae2

Upload config.json with huggingface_hub

Browse files
Files changed (1) hide show
  1. config.json +7 -5
config.json CHANGED
@@ -1,21 +1,21 @@
1
  {
2
  "_name_or_path": "t5-base",
3
  "architectures": [
4
- "T5ForConditionalGeneration"
5
  ],
6
  "d_ff": 3072,
7
  "d_kv": 64,
8
  "d_model": 768,
9
  "decoder_start_token_id": 0,
10
- "dense_act_fn": "relu",
11
  "dropout_rate": 0.1,
12
  "eos_token_id": 1,
13
- "feed_forward_proj": "relu",
14
  "initializer_factor": 1.0,
15
  "is_encoder_decoder": true,
16
- "is_gated_act": false,
17
  "layer_norm_epsilon": 1e-06,
18
- "model_type": "t5",
19
  "n_positions": 512,
20
  "num_decoder_layers": 12,
21
  "num_heads": 12,
@@ -53,6 +53,8 @@
53
  "prefix": "translate English to Romanian: "
54
  }
55
  },
 
 
56
  "torch_dtype": "float32",
57
  "transformers_version": "4.26.1",
58
  "use_cache": true,
 
1
  {
2
  "_name_or_path": "t5-base",
3
  "architectures": [
4
+ "MT5ForConditionalGeneration"
5
  ],
6
  "d_ff": 3072,
7
  "d_kv": 64,
8
  "d_model": 768,
9
  "decoder_start_token_id": 0,
10
+ "dense_act_fn": "gelu_new",
11
  "dropout_rate": 0.1,
12
  "eos_token_id": 1,
13
+ "feed_forward_proj": "gated-gelu",
14
  "initializer_factor": 1.0,
15
  "is_encoder_decoder": true,
16
+ "is_gated_act": true,
17
  "layer_norm_epsilon": 1e-06,
18
+ "model_type": "mt5",
19
  "n_positions": 512,
20
  "num_decoder_layers": 12,
21
  "num_heads": 12,
 
53
  "prefix": "translate English to Romanian: "
54
  }
55
  },
56
+ "tie_word_embeddings": false,
57
+ "tokenizer_class": "T5Tokenizer",
58
  "torch_dtype": "float32",
59
  "transformers_version": "4.26.1",
60
  "use_cache": true,