hhou435 commited on
Commit
08b8772
1 Parent(s): 1dc8472
Files changed (4) hide show
  1. config.json +8 -3
  2. pytorch_model.bin +2 -2
  3. tf_model.h5 +2 -2
  4. vocab.txt +1 -100
config.json CHANGED
@@ -1,4 +1,5 @@
1
  {
 
2
  "architectures": [
3
  "T5ForConditionalGeneration"
4
  ],
@@ -7,15 +8,19 @@
7
  "d_model": 768,
8
  "decoder_start_token_id": 101,
9
  "dropout_rate": 0.1,
 
 
10
  "initializer_factor": 1.0,
11
  "is_encoder_decoder": true,
12
  "layer_norm_epsilon": 1e-06,
13
  "model_type": "t5",
14
  "n_positions": 512,
 
15
  "num_heads": 12,
16
  "num_layers": 12,
17
  "pad_token_id": 0,
18
  "relative_attention_num_buckets": 32,
19
- "tokenizer_class": "BertTokenizer",
20
- "vocab_size": 21328
21
- }
 
1
  {
2
+ "_name_or_path": "t5",
3
  "architectures": [
4
  "T5ForConditionalGeneration"
5
  ],
8
  "d_model": 768,
9
  "decoder_start_token_id": 101,
10
  "dropout_rate": 0.1,
11
+ "eos_token_id": 1,
12
+ "feed_forward_proj": "relu",
13
  "initializer_factor": 1.0,
14
  "is_encoder_decoder": true,
15
  "layer_norm_epsilon": 1e-06,
16
  "model_type": "t5",
17
  "n_positions": 512,
18
+ "num_decoder_layers": 12,
19
  "num_heads": 12,
20
  "num_layers": 12,
21
  "pad_token_id": 0,
22
  "relative_attention_num_buckets": 32,
23
+ "transformers_version": "4.3.3",
24
+ "use_cache": true,
25
+ "vocab_size": 21228
26
+ }
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:51a392c459f2a44adafc7142676a9ae5127cd1a2de25b6a38017c1e000aa3f95
3
- size 858555703
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2cab3b7ebb0d6b80d710b38e84209c8bf9d9cfd04cba298bba2ed27aeea02579
3
+ size 858248503
tf_model.h5 CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:edb63e03b5674a55e4b3bf2f282478460b87f29a83cb107f3fd8151f6d5c65c5
3
- size 858966560
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e51002a63ffa2b63630e8d94adb7a76a4bb7b79dc142abb17d13993d49821bcb
3
+ size 858659360
vocab.txt CHANGED
@@ -21226,103 +21226,4 @@ extra96
21226
  extra97
21227
  extra98
21228
  extra99
21229
- extra100
21230
- extra101
21231
- extra102
21232
- extra103
21233
- extra104
21234
- extra105
21235
- extra106
21236
- extra107
21237
- extra108
21238
- extra109
21239
- extra110
21240
- extra111
21241
- extra112
21242
- extra113
21243
- extra114
21244
- extra115
21245
- extra116
21246
- extra117
21247
- extra118
21248
- extra119
21249
- extra120
21250
- extra121
21251
- extra122
21252
- extra123
21253
- extra124
21254
- extra125
21255
- extra126
21256
- extra127
21257
- extra128
21258
- extra129
21259
- extra130
21260
- extra131
21261
- extra132
21262
- extra133
21263
- extra134
21264
- extra135
21265
- extra136
21266
- extra137
21267
- extra138
21268
- extra139
21269
- extra140
21270
- extra141
21271
- extra142
21272
- extra143
21273
- extra144
21274
- extra145
21275
- extra146
21276
- extra147
21277
- extra148
21278
- extra149
21279
- extra150
21280
- extra151
21281
- extra152
21282
- extra153
21283
- extra154
21284
- extra155
21285
- extra156
21286
- extra157
21287
- extra158
21288
- extra159
21289
- extra160
21290
- extra161
21291
- extra162
21292
- extra163
21293
- extra164
21294
- extra165
21295
- extra166
21296
- extra167
21297
- extra168
21298
- extra169
21299
- extra170
21300
- extra171
21301
- extra172
21302
- extra173
21303
- extra174
21304
- extra175
21305
- extra176
21306
- extra177
21307
- extra178
21308
- extra179
21309
- extra180
21310
- extra181
21311
- extra182
21312
- extra183
21313
- extra184
21314
- extra185
21315
- extra186
21316
- extra187
21317
- extra188
21318
- extra189
21319
- extra190
21320
- extra191
21321
- extra192
21322
- extra193
21323
- extra194
21324
- extra195
21325
- extra196
21326
- extra197
21327
- extra198
21328
- extra199
21226
  extra97
21227
  extra98
21228
  extra99
21229
+