hjb commited on
Commit
5a75d81
1 Parent(s): 5fefffe
Files changed (2) hide show
  1. config.json +9 -5
  2. tf_model.h5 +3 -0
config.json CHANGED
@@ -1,4 +1,5 @@
1
  {
 
2
  "architectures": [
3
  "BertForTokenClassification"
4
  ],
@@ -23,15 +24,15 @@
23
  "initializer_range": 0.02,
24
  "intermediate_size": 3072,
25
  "label2id": {
26
- "O": 0,
27
- "B-PER": 1,
28
- "I-PER": 2,
29
  "B-LOC": 3,
30
- "I-LOC": 4,
31
  "B-ORG": 5,
 
 
32
  "I-ORG": 6,
33
- "[PAD]": 7,
 
34
  "[CLS]": 8,
 
35
  "[SEP]": 9
36
  },
37
  "layer_norm_eps": 1e-12,
@@ -45,6 +46,9 @@
45
  "pooler_num_fc_layers": 3,
46
  "pooler_size_per_head": 128,
47
  "pooler_type": "first_token_transform",
 
 
48
  "type_vocab_size": 2,
 
49
  "vocab_size": 32000
50
  }
1
  {
2
+ "_name_or_path": ".",
3
  "architectures": [
4
  "BertForTokenClassification"
5
  ],
24
  "initializer_range": 0.02,
25
  "intermediate_size": 3072,
26
  "label2id": {
 
 
 
27
  "B-LOC": 3,
 
28
  "B-ORG": 5,
29
+ "B-PER": 1,
30
+ "I-LOC": 4,
31
  "I-ORG": 6,
32
+ "I-PER": 2,
33
+ "O": 0,
34
  "[CLS]": 8,
35
+ "[PAD]": 7,
36
  "[SEP]": 9
37
  },
38
  "layer_norm_eps": 1e-12,
46
  "pooler_num_fc_layers": 3,
47
  "pooler_size_per_head": 128,
48
  "pooler_type": "first_token_transform",
49
+ "position_embedding_type": "absolute",
50
+ "transformers_version": "4.6.1",
51
  "type_vocab_size": 2,
52
+ "use_cache": true,
53
  "vocab_size": 32000
54
  }
tf_model.h5 ADDED
@@ -0,0 +1,3 @@
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:16c5dece23f9023cea9b1eacc35cadc4fa2b9bfba74b1d72262dd3df792e94cf
3
+ size 440411184