hjb commited on
Commit
f99d1fd
1 Parent(s): 21aea0a

added tf model

Browse files
Files changed (2) hide show
  1. config.json +7 -4
  2. tf_model.h5 +3 -0
config.json CHANGED
@@ -1,4 +1,5 @@
1
  {
 
2
  "architectures": [
3
  "ElectraForTokenClassification"
4
  ],
@@ -23,15 +24,15 @@
23
  "initializer_range": 0.02,
24
  "intermediate_size": 1024,
25
  "label2id": {
26
- "B-PER": 0,
27
- "I-PER": 1,
28
  "B-LOC": 2,
29
- "I-LOC": 3,
30
  "B-ORG": 4,
 
 
31
  "I-ORG": 5,
 
32
  "O": 6,
33
- "[PAD]": 7,
34
  "[CLS]": 8,
 
35
  "[SEP]": 9
36
  },
37
  "layer_norm_eps": 1e-12,
@@ -40,10 +41,12 @@
40
  "num_attention_heads": 4,
41
  "num_hidden_layers": 12,
42
  "pad_token_id": 0,
 
43
  "summary_activation": "gelu",
44
  "summary_last_dropout": 0.1,
45
  "summary_type": "first",
46
  "summary_use_proj": true,
 
47
  "type_vocab_size": 2,
48
  "vocab_size": 32000
49
  }
1
  {
2
+ "_name_or_path": ".",
3
  "architectures": [
4
  "ElectraForTokenClassification"
5
  ],
24
  "initializer_range": 0.02,
25
  "intermediate_size": 1024,
26
  "label2id": {
 
 
27
  "B-LOC": 2,
 
28
  "B-ORG": 4,
29
+ "B-PER": 0,
30
+ "I-LOC": 3,
31
  "I-ORG": 5,
32
+ "I-PER": 1,
33
  "O": 6,
 
34
  "[CLS]": 8,
35
+ "[PAD]": 7,
36
  "[SEP]": 9
37
  },
38
  "layer_norm_eps": 1e-12,
41
  "num_attention_heads": 4,
42
  "num_hidden_layers": 12,
43
  "pad_token_id": 0,
44
+ "position_embedding_type": "absolute",
45
  "summary_activation": "gelu",
46
  "summary_last_dropout": 0.1,
47
  "summary_type": "first",
48
  "summary_use_proj": true,
49
+ "transformers_version": "4.6.1",
50
  "type_vocab_size": 2,
51
  "vocab_size": 32000
52
  }
tf_model.h5 ADDED
@@ -0,0 +1,3 @@
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5bfd13fdfef7d6c872786193aa5608e8c42d6d2370fd0995363123dd30a13efd
3
+ size 54986216