Maltehb commited on
Commit
a68ff20
1 Parent(s): e8e83dc

smaller files

Browse files
Files changed (4) hide show
  1. config.json +19 -0
  2. model.ckpt.index +0 -0
  3. model.ckpt.meta +0 -0
  4. vocab.txt +0 -0
config.json ADDED
@@ -0,0 +1,19 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "intermediate_size": 3072,
3
+ "pooler_type": "first_token_transform",
4
+ "initializer_range": 0.02,
5
+ "hidden_dropout_prob": 0.1,
6
+ "num_hidden_layers": 12,
7
+ "vocab_size": 32000,
8
+ "pooler_size_per_head": 128,
9
+ "attention_probs_dropout_prob": 0.1,
10
+ "pooler_num_attention_heads": 12,
11
+ "type_vocab_size": 2,
12
+ "num_attention_heads": 12,
13
+ "max_position_embeddings": 512,
14
+ "hidden_size": 768,
15
+ "directionality": "bidi",
16
+ "pooler_num_fc_layers": 3,
17
+ "hidden_act": "gelu",
18
+ "pooler_fc_size": 768
19
+ }
model.ckpt.index ADDED
Binary file (9.38 kB). View file
model.ckpt.meta ADDED
Binary file (4.63 MB). View file
vocab.txt ADDED
The diff for this file is too large to render. See raw diff