anzorq commited on
Commit
f8078a8
1 Parent(s): 946dc01
Files changed (2) hide show
  1. config.json +6 -3
  2. pytorch_model.bin +3 -0
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "kbd_lat-835k_ru-3M_t5-base/model/90000",
3
  "architectures": [
4
  "T5ForConditionalGeneration"
5
  ],
@@ -7,11 +7,13 @@
7
  "d_kv": 64,
8
  "d_model": 768,
9
  "decoder_start_token_id": 0,
 
10
  "dropout_rate": 0.1,
11
  "eos_token_id": 1,
12
  "feed_forward_proj": "gated-gelu",
13
  "initializer_factor": 1.0,
14
  "is_encoder_decoder": true,
 
15
  "layer_norm_epsilon": 1e-06,
16
  "model_type": "t5",
17
  "num_decoder_layers": 12,
@@ -19,10 +21,11 @@
19
  "num_layers": 12,
20
  "output_past": true,
21
  "pad_token_id": 0,
 
22
  "relative_attention_num_buckets": 32,
23
  "tie_word_embeddings": false,
24
  "torch_dtype": "float32",
25
- "transformers_version": "4.15.0",
26
  "use_cache": true,
27
- "vocab_size": 152
28
  }
 
1
  {
2
+ "_name_or_path": "./",
3
  "architectures": [
4
  "T5ForConditionalGeneration"
5
  ],
 
7
  "d_kv": 64,
8
  "d_model": 768,
9
  "decoder_start_token_id": 0,
10
+ "dense_act_fn": "gelu_new",
11
  "dropout_rate": 0.1,
12
  "eos_token_id": 1,
13
  "feed_forward_proj": "gated-gelu",
14
  "initializer_factor": 1.0,
15
  "is_encoder_decoder": true,
16
+ "is_gated_act": true,
17
  "layer_norm_epsilon": 1e-06,
18
  "model_type": "t5",
19
  "num_decoder_layers": 12,
 
21
  "num_layers": 12,
22
  "output_past": true,
23
  "pad_token_id": 0,
24
+ "relative_attention_max_distance": 128,
25
  "relative_attention_num_buckets": 32,
26
  "tie_word_embeddings": false,
27
  "torch_dtype": "float32",
28
+ "transformers_version": "4.21.0",
29
  "use_cache": true,
30
+ "vocab_size": 4000
31
  }
pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:70e95433b9eb04e6bd905ebdf8566be4d9c4d4dc79305adb17ad6867a01425cb
3
+ size 817584205