adrianeboyd commited on
Commit
a8035b6
1 Parent(s): 383907d

Update spaCy pipeline

Browse files
README.md CHANGED
@@ -14,51 +14,51 @@ model-index:
14
  metrics:
15
  - name: NER Precision
16
  type: precision
17
- value: 0.8998444478
18
  - name: NER Recall
19
  type: recall
20
- value: 0.8980869391
21
  - name: NER F Score
22
  type: f_score
23
- value: 0.8989648344
24
  - task:
25
  name: TAG
26
  type: token-classification
27
  metrics:
28
  - name: TAG (XPOS) Accuracy
29
  type: accuracy
30
- value: 0.9780283894
31
  - task:
32
  name: UNLABELED_DEPENDENCIES
33
  type: token-classification
34
  metrics:
35
  - name: Unlabeled Attachment Score (UAS)
36
  type: f_score
37
- value: 0.9514229828
38
  - task:
39
  name: LABELED_DEPENDENCIES
40
  type: token-classification
41
  metrics:
42
  - name: Labeled Attachment Score (LAS)
43
  type: f_score
44
- value: 0.9379149652
45
  - task:
46
  name: SENTS
47
  type: token-classification
48
  metrics:
49
  - name: Sentences F-Score
50
  type: f_score
51
- value: 0.897621366
52
  ---
53
  ### Details: https://spacy.io/models/en#en_core_web_trf
54
 
55
- English transformer pipeline (roberta-base). Components: transformer, tagger, parser, ner, attribute_ruler, lemmatizer.
56
 
57
  | Feature | Description |
58
  | --- | --- |
59
  | **Name** | `en_core_web_trf` |
60
- | **Version** | `3.6.1` |
61
- | **spaCy** | `>=3.6.0,<3.7.0` |
62
  | **Default Pipeline** | `transformer`, `tagger`, `parser`, `attribute_ruler`, `lemmatizer`, `ner` |
63
  | **Components** | `transformer`, `tagger`, `parser`, `attribute_ruler`, `lemmatizer`, `ner` |
64
  | **Vectors** | 0 keys, 0 unique vectors (0 dimensions) |
@@ -88,12 +88,12 @@ English transformer pipeline (roberta-base). Components: transformer, tagger, pa
88
  | `TOKEN_P` | 99.57 |
89
  | `TOKEN_R` | 99.58 |
90
  | `TOKEN_F` | 99.57 |
91
- | `TAG_ACC` | 97.80 |
92
- | `SENTS_P` | 94.92 |
93
- | `SENTS_R` | 85.14 |
94
- | `SENTS_F` | 89.76 |
95
- | `DEP_UAS` | 95.14 |
96
- | `DEP_LAS` | 93.79 |
97
- | `ENTS_P` | 89.98 |
98
- | `ENTS_R` | 89.81 |
99
  | `ENTS_F` | 89.90 |
14
  metrics:
15
  - name: NER Precision
16
  type: precision
17
+ value: 0.8994886705
18
  - name: NER Recall
19
  type: recall
20
+ value: 0.8985877404
21
  - name: NER F Score
22
  type: f_score
23
+ value: 0.8990379798
24
  - task:
25
  name: TAG
26
  type: token-classification
27
  metrics:
28
  - name: TAG (XPOS) Accuracy
29
  type: accuracy
30
+ value: 0.9815755741
31
  - task:
32
  name: UNLABELED_DEPENDENCIES
33
  type: token-classification
34
  metrics:
35
  - name: Unlabeled Attachment Score (UAS)
36
  type: f_score
37
+ value: 0.9505463996
38
  - task:
39
  name: LABELED_DEPENDENCIES
40
  type: token-classification
41
  metrics:
42
  - name: Labeled Attachment Score (LAS)
43
  type: f_score
44
+ value: 0.9368643771
45
  - task:
46
  name: SENTS
47
  type: token-classification
48
  metrics:
49
  - name: Sentences F-Score
50
  type: f_score
51
+ value: 0.8881133524
52
  ---
53
  ### Details: https://spacy.io/models/en#en_core_web_trf
54
 
55
+ English transformer pipeline (Transformer(name='roberta-base', piece_encoder='byte-bpe', stride=104, type='roberta', width=768, window=144, vocab_size=50265)). Components: transformer, tagger, parser, ner, attribute_ruler, lemmatizer.
56
 
57
  | Feature | Description |
58
  | --- | --- |
59
  | **Name** | `en_core_web_trf` |
60
+ | **Version** | `3.7.2` |
61
+ | **spaCy** | `>=3.7.0,<3.8.0` |
62
  | **Default Pipeline** | `transformer`, `tagger`, `parser`, `attribute_ruler`, `lemmatizer`, `ner` |
63
  | **Components** | `transformer`, `tagger`, `parser`, `attribute_ruler`, `lemmatizer`, `ner` |
64
  | **Vectors** | 0 keys, 0 unique vectors (0 dimensions) |
88
  | `TOKEN_P` | 99.57 |
89
  | `TOKEN_R` | 99.58 |
90
  | `TOKEN_F` | 99.57 |
91
+ | `TAG_ACC` | 98.16 |
92
+ | `SENTS_P` | 94.29 |
93
+ | `SENTS_R` | 83.94 |
94
+ | `SENTS_F` | 88.81 |
95
+ | `DEP_UAS` | 95.05 |
96
+ | `DEP_LAS` | 93.69 |
97
+ | `ENTS_P` | 89.95 |
98
+ | `ENTS_R` | 89.86 |
99
  | `ENTS_F` | 89.90 |
accuracy.json CHANGED
@@ -3,328 +3,328 @@
3
  "token_p": 0.9956819193,
4
  "token_r": 0.9957659295,
5
  "token_f": 0.9957239226,
6
- "tag_acc": 0.9780283894,
7
- "sents_p": 0.9491799662,
8
- "sents_r": 0.8513754205,
9
- "sents_f": 0.897621366,
10
- "dep_uas": 0.9514229828,
11
- "dep_las": 0.9379149652,
12
  "dep_las_per_type": {
13
  "prep": {
14
- "p": 0.9221881274,
15
- "r": 0.92365775,
16
- "f": 0.9229223537
17
  },
18
  "det": {
19
- "p": 0.9891605542,
20
- "r": 0.9897251896,
21
- "f": 0.9894427913
22
  },
23
  "pobj": {
24
- "p": 0.982956549,
25
- "r": 0.9851953662,
26
- "f": 0.9840746842
27
  },
28
  "nsubj": {
29
- "p": 0.9779701023,
30
- "r": 0.9802409639,
31
- "f": 0.9791042164
32
  },
33
  "aux": {
34
- "p": 0.9882541377,
35
- "r": 0.9886940265,
36
- "f": 0.9884740332
37
  },
38
  "advmod": {
39
- "p": 0.8966186019,
40
- "r": 0.894581861,
41
- "f": 0.8955990735
42
  },
43
  "relcl": {
44
- "p": 0.8778409091,
45
- "r": 0.8969521045,
46
- "f": 0.8872936109
47
  },
48
  "root": {
49
- "p": 0.9657277341,
50
- "r": 0.8662180883,
51
- "f": 0.913270274
52
  },
53
  "xcomp": {
54
- "p": 0.9401586157,
55
- "r": 0.936109117,
56
- "f": 0.9381294964
57
  },
58
  "amod": {
59
- "p": 0.9486826583,
60
- "r": 0.9378036929,
61
- "f": 0.9432118073
62
  },
63
  "compound": {
64
- "p": 0.9488718462,
65
- "r": 0.9508799287,
66
- "f": 0.9498748261
67
  },
68
  "poss": {
69
- "p": 0.9859550562,
70
- "r": 0.9891304348,
71
- "f": 0.9875401929
72
  },
73
  "ccomp": {
74
- "p": 0.8469407085,
75
- "r": 0.9105906314,
76
- "f": 0.8776131122
77
  },
78
  "attr": {
79
- "p": 0.9512396694,
80
- "r": 0.9680403701,
81
- "f": 0.959566486
82
  },
83
  "case": {
84
- "p": 0.988065639,
85
- "r": 0.9944944945,
86
- "f": 0.9912696433
87
  },
88
  "mark": {
89
- "p": 0.9465128474,
90
- "r": 0.9565447801,
91
- "f": 0.9515023722
92
  },
93
  "intj": {
94
- "p": 0.6107055961,
95
- "r": 0.7355311355,
96
- "f": 0.6673313393
97
  },
98
  "advcl": {
99
- "p": 0.8050394502,
100
- "r": 0.7965248048,
101
- "f": 0.8007594937
102
  },
103
  "cc": {
104
- "p": 0.8882429707,
105
- "r": 0.8954670494,
106
- "f": 0.8918403812
107
  },
108
  "neg": {
109
- "p": 0.962406015,
110
- "r": 0.9633718013,
111
- "f": 0.962888666
112
  },
113
  "conj": {
114
- "p": 0.8574309097,
115
- "r": 0.9099949648,
116
- "f": 0.8829312977
117
  },
118
  "nsubjpass": {
119
- "p": 0.9603547209,
120
- "r": 0.9441025641,
121
- "f": 0.9521592966
122
  },
123
  "auxpass": {
124
- "p": 0.9705611776,
125
- "r": 0.9612756264,
126
- "f": 0.9658960861
127
  },
128
  "dobj": {
129
- "p": 0.9741234726,
130
- "r": 0.9719499562,
131
- "f": 0.9730355006
132
  },
133
  "nummod": {
134
- "p": 0.9544419134,
135
- "r": 0.9522727273,
136
- "f": 0.9533560865
137
  },
138
  "npadvmod": {
139
- "p": 0.8556701031,
140
- "r": 0.8255772647,
141
- "f": 0.8403543663
142
  },
143
  "prt": {
144
- "p": 0.8996415771,
145
- "r": 0.8996415771,
146
- "f": 0.8996415771
147
  },
148
  "pcomp": {
149
- "p": 0.9461979914,
150
- "r": 0.9236694678,
151
- "f": 0.9347980156
152
  },
153
  "expl": {
154
- "p": 0.9893617021,
155
  "r": 0.9957173448,
156
- "f": 0.992529349
157
  },
158
  "acl": {
159
- "p": 0.8503893215,
160
- "r": 0.8341516639,
161
- "f": 0.8421922335
162
  },
163
  "agent": {
164
- "p": 0.9573712256,
165
- "r": 0.9659498208,
166
- "f": 0.9616413916
167
  },
168
  "dative": {
169
- "p": 0.753875969,
170
- "r": 0.8922018349,
171
- "f": 0.8172268908
172
  },
173
  "acomp": {
174
- "p": 0.9285395266,
175
- "r": 0.9428571429,
176
- "f": 0.9356435644
177
  },
178
  "dep": {
179
- "p": 0.4055829228,
180
- "r": 0.400974026,
181
- "f": 0.4032653061
182
  },
183
  "csubj": {
184
- "p": 0.8742857143,
185
  "r": 0.9053254438,
186
- "f": 0.8895348837
187
  },
188
  "quantmod": {
189
- "p": 0.8656838657,
190
- "r": 0.8586515028,
191
- "f": 0.8621533442
192
  },
193
  "nmod": {
194
- "p": 0.8400566171,
195
- "r": 0.7233394272,
196
- "f": 0.7773411919
197
  },
198
  "appos": {
199
- "p": 0.8196095076,
200
- "r": 0.8377440347,
201
- "f": 0.8285775585
202
  },
203
  "predet": {
204
- "p": 0.8675213675,
205
- "r": 0.8712446352,
206
- "f": 0.869379015
207
  },
208
  "preconj": {
209
- "p": 0.66,
210
- "r": 0.7674418605,
211
- "f": 0.7096774194
212
  },
213
  "oprd": {
214
- "p": 0.8832807571,
215
- "r": 0.8358208955,
216
- "f": 0.8588957055
217
  },
218
  "parataxis": {
219
- "p": 0.605313093,
220
- "r": 0.6919739696,
221
- "f": 0.6457489879
222
  },
223
  "meta": {
224
- "p": 0.2361111111,
225
  "r": 0.6538461538,
226
- "f": 0.3469387755
227
  },
228
  "csubjpass": {
229
- "p": 0.8333333333,
230
  "r": 0.8333333333,
231
- "f": 0.8333333333
232
  }
233
  },
234
- "ents_p": 0.8998444478,
235
- "ents_r": 0.8980869391,
236
- "ents_f": 0.8989648344,
237
  "ents_per_type": {
238
  "DATE": {
239
- "p": 0.8931977114,
240
- "r": 0.8920634921,
241
- "f": 0.8926302414
242
  },
243
  "GPE": {
244
- "p": 0.9540487532,
245
- "r": 0.949790795,
246
- "f": 0.9519150126
247
  },
248
  "ORDINAL": {
249
- "p": 0.8063492063,
250
- "r": 0.7888198758,
251
- "f": 0.7974882261
252
  },
253
  "ORG": {
254
- "p": 0.901656868,
255
- "r": 0.894485684,
256
- "f": 0.8980569603
257
  },
258
  "FAC": {
259
- "p": 0.5964912281,
260
- "r": 0.7846153846,
261
- "f": 0.6777408638
262
  },
263
  "QUANTITY": {
264
- "p": 0.773255814,
265
- "r": 0.7307692308,
266
- "f": 0.7514124294
267
  },
268
  "LOC": {
269
- "p": 0.8446601942,
270
- "r": 0.8312101911,
271
- "f": 0.8378812199
272
  },
273
  "CARDINAL": {
274
- "p": 0.857225769,
275
- "r": 0.8781212842,
276
- "f": 0.8675477239
277
  },
278
  "PERSON": {
279
- "p": 0.9384712972,
280
- "r": 0.9657310705,
281
- "f": 0.951906064
282
  },
283
  "NORP": {
284
- "p": 0.9270998415,
285
- "r": 0.936,
286
- "f": 0.9315286624
287
  },
288
  "LAW": {
289
- "p": 0.609375,
290
  "r": 0.609375,
291
- "f": 0.609375
292
- },
293
- "MONEY": {
294
- "p": 0.9325153374,
295
- "r": 0.8972845336,
296
- "f": 0.9145607702
297
  },
298
  "TIME": {
299
- "p": 0.756302521,
300
- "r": 0.7894736842,
301
- "f": 0.7725321888
302
  },
303
  "EVENT": {
304
- "p": 0.7537313433,
305
- "r": 0.5804597701,
306
- "f": 0.6558441558
 
 
 
 
 
307
  },
308
  "WORK_OF_ART": {
309
- "p": 0.5767195767,
310
- "r": 0.5618556701,
311
- "f": 0.5691906005
312
  },
313
- "PERCENT": {
314
- "p": 0.9174603175,
315
- "r": 0.8851454824,
316
- "f": 0.9010132502
317
  },
318
- "PRODUCT": {
319
- "p": 0.7012987013,
320
- "r": 0.5118483412,
321
- "f": 0.5917808219
322
  },
323
  "LANGUAGE": {
324
- "p": 0.9583333333,
325
- "r": 0.71875,
326
- "f": 0.8214285714
327
  }
328
  },
329
- "speed": 4706.6408562715
330
  }
3
  "token_p": 0.9956819193,
4
  "token_r": 0.9957659295,
5
  "token_f": 0.9957239226,
6
+ "tag_acc": 0.9815755741,
7
+ "sents_p": 0.9428677288,
8
+ "sents_r": 0.8393693515,
9
+ "sents_f": 0.8881133524,
10
+ "dep_uas": 0.9505463996,
11
+ "dep_las": 0.9368643771,
12
  "dep_las_per_type": {
13
  "prep": {
14
+ "p": 0.9213844814,
15
+ "r": 0.9232024284,
16
+ "f": 0.922292559
17
  },
18
  "det": {
19
+ "p": 0.9907293964,
20
+ "r": 0.9891135937,
21
+ "f": 0.9899208357
22
  },
23
  "pobj": {
24
+ "p": 0.9835255354,
25
+ "r": 0.984645592,
26
+ "f": 0.984085245
27
  },
28
  "nsubj": {
29
+ "p": 0.9810607725,
30
+ "r": 0.9781380066,
31
+ "f": 0.9795972094
32
  },
33
  "aux": {
34
+ "p": 0.9886879843,
35
+ "r": 0.9881598861,
36
+ "f": 0.9884238646
37
  },
38
  "advmod": {
39
+ "p": 0.8934343434,
40
+ "r": 0.8929833417,
41
+ "f": 0.8932087857
42
  },
43
  "relcl": {
44
+ "p": 0.8716783217,
45
+ "r": 0.9045718433,
46
+ "f": 0.8878205128
47
  },
48
  "root": {
49
+ "p": 0.9662097073,
50
+ "r": 0.8601490864,
51
+ "f": 0.9100998115
52
  },
53
  "xcomp": {
54
+ "p": 0.9495982469,
55
+ "r": 0.9332376167,
56
+ "f": 0.9413468501
57
  },
58
  "amod": {
59
+ "p": 0.945387166,
60
+ "r": 0.942079689,
61
+ "f": 0.9437305296
62
  },
63
  "compound": {
64
+ "p": 0.9498658618,
65
+ "r": 0.9464802851,
66
+ "f": 0.9481700513
67
  },
68
  "poss": {
69
+ "p": 0.9851375778,
70
+ "r": 0.9873188406,
71
+ "f": 0.9862270031
72
  },
73
  "ccomp": {
74
+ "p": 0.8387701537,
75
+ "r": 0.9112016293,
76
+ "f": 0.8734869192
77
  },
78
  "attr": {
79
+ "p": 0.9384928717,
80
+ "r": 0.968881413,
81
+ "f": 0.9534450652
82
  },
83
  "case": {
84
+ "p": 0.9860904123,
85
+ "r": 0.9934934935,
86
+ "f": 0.9897781102
87
  },
88
  "mark": {
89
+ "p": 0.9469537815,
90
+ "r": 0.9554848967,
91
+ "f": 0.951200211
92
  },
93
  "intj": {
94
+ "p": 0.5938967136,
95
+ "r": 0.7413919414,
96
+ "f": 0.6594982079
97
  },
98
  "advcl": {
99
+ "p": 0.7986680328,
100
+ "r": 0.7851926467,
101
+ "f": 0.7918730159
102
  },
103
  "cc": {
104
+ "p": 0.8966257303,
105
+ "r": 0.8994139457,
106
+ "f": 0.8980176738
107
  },
108
  "neg": {
109
+ "p": 0.9603214465,
110
+ "r": 0.9593577521,
111
+ "f": 0.9598393574
112
  },
113
  "conj": {
114
+ "p": 0.8554216867,
115
+ "r": 0.9116314199,
116
+ "f": 0.8826325411
117
  },
118
  "nsubjpass": {
119
+ "p": 0.9484588176,
120
+ "r": 0.9625641026,
121
+ "f": 0.9554594044
122
  },
123
  "auxpass": {
124
+ "p": 0.9582407819,
125
+ "r": 0.9826879271,
126
+ "f": 0.9703103914
127
  },
128
  "dobj": {
129
+ "p": 0.9704970179,
130
+ "r": 0.9725077695,
131
+ "f": 0.9715013533
132
  },
133
  "nummod": {
134
+ "p": 0.9509357613,
135
+ "r": 0.9494949495,
136
+ "f": 0.9502148092
137
  },
138
  "npadvmod": {
139
+ "p": 0.8354978355,
140
+ "r": 0.8227353464,
141
+ "f": 0.8290674781
142
  },
143
  "prt": {
144
+ "p": 0.8928884987,
145
+ "r": 0.9112903226,
146
+ "f": 0.9019955654
147
  },
148
  "pcomp": {
149
+ "p": 0.9309372798,
150
+ "r": 0.925070028,
151
+ "f": 0.92799438
152
  },
153
  "expl": {
154
+ "p": 0.9914712154,
155
  "r": 0.9957173448,
156
+ "f": 0.9935897436
157
  },
158
  "acl": {
159
+ "p": 0.8481012658,
160
+ "r": 0.8406983088,
161
+ "f": 0.8443835616
162
  },
163
  "agent": {
164
+ "p": 0.9577464789,
165
+ "r": 0.9749103943,
166
+ "f": 0.9662522202
167
  },
168
  "dative": {
169
+ "p": 0.8218262806,
170
+ "r": 0.8463302752,
171
+ "f": 0.8338983051
172
  },
173
  "acomp": {
174
+ "p": 0.9484440316,
175
+ "r": 0.9260770975,
176
+ "f": 0.9371271225
177
  },
178
  "dep": {
179
+ "p": 0.4476744186,
180
+ "r": 0.375,
181
+ "f": 0.4081272085
182
  },
183
  "csubj": {
184
+ "p": 0.9,
185
  "r": 0.9053254438,
186
+ "f": 0.9026548673
187
  },
188
  "quantmod": {
189
+ "p": 0.8608624898,
190
+ "r": 0.8594638505,
191
+ "f": 0.8601626016
192
  },
193
  "nmod": {
194
+ "p": 0.818815331,
195
+ "r": 0.7160268129,
196
+ "f": 0.7639791938
197
  },
198
  "appos": {
199
+ "p": 0.781092437,
200
+ "r": 0.8065075922,
201
+ "f": 0.7935965848
202
  },
203
  "predet": {
204
+ "p": 0.8464566929,
205
+ "r": 0.9227467811,
206
+ "f": 0.8829568789
207
  },
208
  "preconj": {
209
+ "p": 0.6703296703,
210
+ "r": 0.7093023256,
211
+ "f": 0.6892655367
212
  },
213
  "oprd": {
214
+ "p": 0.8711656442,
215
+ "r": 0.847761194,
216
+ "f": 0.8593040847
217
  },
218
  "parataxis": {
219
+ "p": 0.5409836066,
220
+ "r": 0.6442516269,
221
+ "f": 0.5881188119
222
  },
223
  "meta": {
224
+ "p": 0.3063063063,
225
  "r": 0.6538461538,
226
+ "f": 0.4171779141
227
  },
228
  "csubjpass": {
229
+ "p": 1.0,
230
  "r": 0.8333333333,
231
+ "f": 0.9090909091
232
  }
233
  },
234
+ "ents_p": 0.8994886705,
235
+ "ents_r": 0.8985877404,
236
+ "ents_f": 0.8990379798,
237
  "ents_per_type": {
238
  "DATE": {
239
+ "p": 0.8913798548,
240
+ "r": 0.8961904762,
241
+ "f": 0.8937786924
242
  },
243
  "GPE": {
244
+ "p": 0.9586402266,
245
+ "r": 0.9439330544,
246
+ "f": 0.9512297962
247
  },
248
  "ORDINAL": {
249
+ "p": 0.8011869436,
250
+ "r": 0.8385093168,
251
+ "f": 0.8194233687
252
  },
253
  "ORG": {
254
+ "p": 0.9049542272,
255
+ "r": 0.8910392365,
256
+ "f": 0.8979428266
257
  },
258
  "FAC": {
259
+ "p": 0.6049382716,
260
+ "r": 0.7538461538,
261
+ "f": 0.6712328767
262
  },
263
  "QUANTITY": {
264
+ "p": 0.7831325301,
265
+ "r": 0.7142857143,
266
+ "f": 0.7471264368
267
  },
268
  "LOC": {
269
+ "p": 0.8184615385,
270
+ "r": 0.847133758,
271
+ "f": 0.8325508607
272
  },
273
  "CARDINAL": {
274
+ "p": 0.8476517755,
275
+ "r": 0.8799048751,
276
+ "f": 0.8634772462
277
  },
278
  "PERSON": {
279
+ "p": 0.9409898477,
280
+ "r": 0.9680156658,
281
+ "f": 0.9543114543
282
  },
283
  "NORP": {
284
+ "p": 0.9311183144,
285
+ "r": 0.9192,
286
+ "f": 0.9251207729
287
  },
288
  "LAW": {
289
+ "p": 0.527027027,
290
  "r": 0.609375,
291
+ "f": 0.5652173913
 
 
 
 
 
292
  },
293
  "TIME": {
294
+ "p": 0.7458100559,
295
+ "r": 0.7807017544,
296
+ "f": 0.7628571429
297
  },
298
  "EVENT": {
299
+ "p": 0.7407407407,
300
+ "r": 0.5747126437,
301
+ "f": 0.6472491909
302
+ },
303
+ "PRODUCT": {
304
+ "p": 0.6310160428,
305
+ "r": 0.5592417062,
306
+ "f": 0.5929648241
307
  },
308
  "WORK_OF_ART": {
309
+ "p": 0.6534090909,
310
+ "r": 0.5927835052,
311
+ "f": 0.6216216216
312
  },
313
+ "MONEY": {
314
+ "p": 0.9356796117,
315
+ "r": 0.9102715466,
316
+ "f": 0.9228007181
317
  },
318
+ "PERCENT": {
319
+ "p": 0.9146919431,
320
+ "r": 0.886676876,
321
+ "f": 0.900466563
322
  },
323
  "LANGUAGE": {
324
+ "p": 0.9642857143,
325
+ "r": 0.84375,
326
+ "f": 0.9
327
  }
328
  },
329
+ "speed": 3882.7148648089
330
  }
attribute_ruler/patterns CHANGED
Binary files a/attribute_ruler/patterns and b/attribute_ruler/patterns differ
config.cfg CHANGED
@@ -17,6 +17,7 @@ after_creation = null
17
  after_pipeline_creation = null
18
  batch_size = 64
19
  tokenizer = {"@tokenizers":"spacy.Tokenizer.v1"}
 
20
 
21
  [components]
22
 
@@ -49,10 +50,11 @@ use_upper = false
49
  nO = null
50
 
51
  [components.ner.model.tok2vec]
52
- @architectures = "spacy-transformers.TransformerListener.v1"
53
- grad_factor = 1.0
54
  upstream = "transformer"
55
  pooling = {"@layers":"reduce_mean.v1"}
 
56
 
57
  [components.parser]
58
  factory = "parser"
@@ -72,10 +74,11 @@ use_upper = false
72
  nO = null
73
 
74
  [components.parser.model.tok2vec]
75
- @architectures = "spacy-transformers.TransformerListener.v1"
76
- grad_factor = 1.0
77
  upstream = "transformer"
78
  pooling = {"@layers":"reduce_mean.v1"}
 
79
 
80
  [components.tagger]
81
  factory = "tagger"
@@ -90,32 +93,44 @@ nO = null
90
  normalize = false
91
 
92
  [components.tagger.model.tok2vec]
93
- @architectures = "spacy-transformers.TransformerListener.v1"
94
- grad_factor = 1.0
95
  upstream = "transformer"
96
  pooling = {"@layers":"reduce_mean.v1"}
 
97
 
98
  [components.transformer]
99
- factory = "transformer"
100
- max_batch_items = 4096
101
- set_extra_annotations = {"@annotation_setters":"spacy-transformers.null_annotation_setter.v1"}
102
 
103
  [components.transformer.model]
104
- name = "roberta-base"
105
- @architectures = "spacy-transformers.TransformerModel.v3"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
106
  mixed_precision = false
107
-
108
- [components.transformer.model.get_spans]
109
- @span_getters = "spacy-transformers.strided_spans.v1"
110
- window = 128
111
- stride = 96
112
 
113
  [components.transformer.model.grad_scaler_config]
114
 
115
- [components.transformer.model.tokenizer_config]
116
- use_fast = true
117
-
118
- [components.transformer.model.transformer_config]
 
119
 
120
  [corpora]
121
 
@@ -152,11 +167,11 @@ annotating_components = []
152
  before_update = null
153
 
154
  [training.batcher]
155
- @batchers = "spacy.batch_by_padded.v1"
156
- discard_oversize = true
157
- get_length = null
158
  size = 2000
159
- buffer = 256
 
160
 
161
  [training.logger]
162
  @loggers = "spacy.ConsoleLogger.v1"
@@ -225,6 +240,18 @@ require = false
225
  path = "corpus/labels/tagger.json"
226
  require = false
227
 
 
 
 
 
 
 
 
 
 
 
 
 
228
  [initialize.lookups]
229
  @misc = "spacy.LookupsDataLoader.v1"
230
  lang = ${nlp.lang}
17
  after_pipeline_creation = null
18
  batch_size = 64
19
  tokenizer = {"@tokenizers":"spacy.Tokenizer.v1"}
20
+ vectors = {"@vectors":"spacy.Vectors.v1"}
21
 
22
  [components]
23
 
50
  nO = null
51
 
52
  [components.ner.model.tok2vec]
53
+ @architectures = "spacy-curated-transformers.LastTransformerLayerListener.v1"
54
+ width = ${components.transformer.model.hidden_width}
55
  upstream = "transformer"
56
  pooling = {"@layers":"reduce_mean.v1"}
57
+ grad_factor = 1.0
58
 
59
  [components.parser]
60
  factory = "parser"
74
  nO = null
75
 
76
  [components.parser.model.tok2vec]
77
+ @architectures = "spacy-curated-transformers.LastTransformerLayerListener.v1"
78
+ width = ${components.transformer.model.hidden_width}
79
  upstream = "transformer"
80
  pooling = {"@layers":"reduce_mean.v1"}
81
+ grad_factor = 1.0
82
 
83
  [components.tagger]
84
  factory = "tagger"
93
  normalize = false
94
 
95
  [components.tagger.model.tok2vec]
96
+ @architectures = "spacy-curated-transformers.LastTransformerLayerListener.v1"
97
+ width = ${components.transformer.model.hidden_width}
98
  upstream = "transformer"
99
  pooling = {"@layers":"reduce_mean.v1"}
100
+ grad_factor = 1.0
101
 
102
  [components.transformer]
103
+ factory = "curated_transformer"
104
+ all_layer_outputs = false
105
+ frozen = false
106
 
107
  [components.transformer.model]
108
+ @architectures = "spacy-curated-transformers.RobertaTransformer.v1"
109
+ vocab_size = 50265
110
+ hidden_width = 768
111
+ piece_encoder = {"@architectures":"spacy-curated-transformers.ByteBpeEncoder.v1"}
112
+ attention_probs_dropout_prob = 0.1
113
+ hidden_act = "gelu"
114
+ hidden_dropout_prob = 0.1
115
+ intermediate_width = 3072
116
+ layer_norm_eps = 0.00001
117
+ max_position_embeddings = 514
118
+ model_max_length = 512
119
+ num_attention_heads = 12
120
+ num_hidden_layers = 12
121
+ padding_idx = 1
122
+ type_vocab_size = 1
123
+ torchscript = false
124
  mixed_precision = false
125
+ wrapped_listener = null
 
 
 
 
126
 
127
  [components.transformer.model.grad_scaler_config]
128
 
129
+ [components.transformer.model.with_spans]
130
+ @architectures = "spacy-curated-transformers.WithStridedSpans.v1"
131
+ stride = 104
132
+ window = 144
133
+ batch_size = 384
134
 
135
  [corpora]
136
 
167
  before_update = null
168
 
169
  [training.batcher]
170
+ @batchers = "spacy.batch_by_words.v1"
171
+ discard_oversize = false
 
172
  size = 2000
173
+ tolerance = 0.2
174
+ get_length = null
175
 
176
  [training.logger]
177
  @loggers = "spacy.ConsoleLogger.v1"
240
  path = "corpus/labels/tagger.json"
241
  require = false
242
 
243
+ [initialize.components.transformer]
244
+
245
+ [initialize.components.transformer.encoder_loader]
246
+ @model_loaders = "spacy-curated-transformers.HFTransformerEncoderLoader.v1"
247
+ name = "roberta-base"
248
+ revision = "main"
249
+
250
+ [initialize.components.transformer.piecer_loader]
251
+ @model_loaders = "spacy-curated-transformers.HFPieceEncoderLoader.v1"
252
+ name = "roberta-base"
253
+ revision = "main"
254
+
255
  [initialize.lookups]
256
  @misc = "spacy.LookupsDataLoader.v1"
257
  lang = ${nlp.lang}
en_core_web_trf-any-py3-none-any.whl CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:f9a1f0ae83954c49ebf92c0f7043ea02de51db59bf6cd3ed849d46928612c806
3
- size 460293616
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dd56d80640411c2e81c3b0cfcb6068156a459c259ae11a4ea51c625c751e3e0a
3
+ size 457388473
meta.json CHANGED
@@ -1,14 +1,14 @@
1
  {
2
  "lang":"en",
3
  "name":"core_web_trf",
4
- "version":"3.6.1",
5
- "description":"English transformer pipeline (roberta-base). Components: transformer, tagger, parser, ner, attribute_ruler, lemmatizer.",
6
  "author":"Explosion",
7
  "email":"contact@explosion.ai",
8
  "url":"https://explosion.ai",
9
  "license":"MIT",
10
- "spacy_version":">=3.6.0,<3.7.0",
11
- "spacy_git_version":"c067b5264",
12
  "vectors":{
13
  "width":0,
14
  "vectors":0,
@@ -168,330 +168,330 @@
168
  "token_p":0.9956819193,
169
  "token_r":0.9957659295,
170
  "token_f":0.9957239226,
171
- "tag_acc":0.9780283894,
172
- "sents_p":0.9491799662,
173
- "sents_r":0.8513754205,
174
- "sents_f":0.897621366,
175
- "dep_uas":0.9514229828,
176
- "dep_las":0.9379149652,
177
  "dep_las_per_type":{
178
  "prep":{
179
- "p":0.9221881274,
180
- "r":0.92365775,
181
- "f":0.9229223537
182
  },
183
  "det":{
184
- "p":0.9891605542,
185
- "r":0.9897251896,
186
- "f":0.9894427913
187
  },
188
  "pobj":{
189
- "p":0.982956549,
190
- "r":0.9851953662,
191
- "f":0.9840746842
192
  },
193
  "nsubj":{
194
- "p":0.9779701023,
195
- "r":0.9802409639,
196
- "f":0.9791042164
197
  },
198
  "aux":{
199
- "p":0.9882541377,
200
- "r":0.9886940265,
201
- "f":0.9884740332
202
  },
203
  "advmod":{
204
- "p":0.8966186019,
205
- "r":0.894581861,
206
- "f":0.8955990735
207
  },
208
  "relcl":{
209
- "p":0.8778409091,
210
- "r":0.8969521045,
211
- "f":0.8872936109
212
  },
213
  "root":{
214
- "p":0.9657277341,
215
- "r":0.8662180883,
216
- "f":0.913270274
217
  },
218
  "xcomp":{
219
- "p":0.9401586157,
220
- "r":0.936109117,
221
- "f":0.9381294964
222
  },
223
  "amod":{
224
- "p":0.9486826583,
225
- "r":0.9378036929,
226
- "f":0.9432118073
227
  },
228
  "compound":{
229
- "p":0.9488718462,
230
- "r":0.9508799287,
231
- "f":0.9498748261
232
  },
233
  "poss":{
234
- "p":0.9859550562,
235
- "r":0.9891304348,
236
- "f":0.9875401929
237
  },
238
  "ccomp":{
239
- "p":0.8469407085,
240
- "r":0.9105906314,
241
- "f":0.8776131122
242
  },
243
  "attr":{
244
- "p":0.9512396694,
245
- "r":0.9680403701,
246
- "f":0.959566486
247
  },
248
  "case":{
249
- "p":0.988065639,
250
- "r":0.9944944945,
251
- "f":0.9912696433
252
  },
253
  "mark":{
254
- "p":0.9465128474,
255
- "r":0.9565447801,
256
- "f":0.9515023722
257
  },
258
  "intj":{
259
- "p":0.6107055961,
260
- "r":0.7355311355,
261
- "f":0.6673313393
262
  },
263
  "advcl":{
264
- "p":0.8050394502,
265
- "r":0.7965248048,
266
- "f":0.8007594937
267
  },
268
  "cc":{
269
- "p":0.8882429707,
270
- "r":0.8954670494,
271
- "f":0.8918403812
272
  },
273
  "neg":{
274
- "p":0.962406015,
275
- "r":0.9633718013,
276
- "f":0.962888666
277
  },
278
  "conj":{
279
- "p":0.8574309097,
280
- "r":0.9099949648,
281
- "f":0.8829312977
282
  },
283
  "nsubjpass":{
284
- "p":0.9603547209,
285
- "r":0.9441025641,
286
- "f":0.9521592966
287
  },
288
  "auxpass":{
289
- "p":0.9705611776,
290
- "r":0.9612756264,
291
- "f":0.9658960861
292
  },
293
  "dobj":{
294
- "p":0.9741234726,
295
- "r":0.9719499562,
296
- "f":0.9730355006
297
  },
298
  "nummod":{
299
- "p":0.9544419134,
300
- "r":0.9522727273,
301
- "f":0.9533560865
302
  },
303
  "npadvmod":{
304
- "p":0.8556701031,
305
- "r":0.8255772647,
306
- "f":0.8403543663
307
  },
308
  "prt":{
309
- "p":0.8996415771,
310
- "r":0.8996415771,
311
- "f":0.8996415771
312
  },
313
  "pcomp":{
314
- "p":0.9461979914,
315
- "r":0.9236694678,
316
- "f":0.9347980156
317
  },
318
  "expl":{
319
- "p":0.9893617021,
320
  "r":0.9957173448,
321
- "f":0.992529349
322
  },
323
  "acl":{
324
- "p":0.8503893215,
325
- "r":0.8341516639,
326
- "f":0.8421922335
327
  },
328
  "agent":{
329
- "p":0.9573712256,
330
- "r":0.9659498208,
331
- "f":0.9616413916
332
  },
333
  "dative":{
334
- "p":0.753875969,
335
- "r":0.8922018349,
336
- "f":0.8172268908
337
  },
338
  "acomp":{
339
- "p":0.9285395266,
340
- "r":0.9428571429,
341
- "f":0.9356435644
342
  },
343
  "dep":{
344
- "p":0.4055829228,
345
- "r":0.400974026,
346
- "f":0.4032653061
347
  },
348
  "csubj":{
349
- "p":0.8742857143,
350
  "r":0.9053254438,
351
- "f":0.8895348837
352
  },
353
  "quantmod":{
354
- "p":0.8656838657,
355
- "r":0.8586515028,
356
- "f":0.8621533442
357
  },
358
  "nmod":{
359
- "p":0.8400566171,
360
- "r":0.7233394272,
361
- "f":0.7773411919
362
  },
363
  "appos":{
364
- "p":0.8196095076,
365
- "r":0.8377440347,
366
- "f":0.8285775585
367
  },
368
  "predet":{
369
- "p":0.8675213675,
370
- "r":0.8712446352,
371
- "f":0.869379015
372
  },
373
  "preconj":{
374
- "p":0.66,
375
- "r":0.7674418605,
376
- "f":0.7096774194
377
  },
378
  "oprd":{
379
- "p":0.8832807571,
380
- "r":0.8358208955,
381
- "f":0.8588957055
382
  },
383
  "parataxis":{
384
- "p":0.605313093,
385
- "r":0.6919739696,
386
- "f":0.6457489879
387
  },
388
  "meta":{
389
- "p":0.2361111111,
390
  "r":0.6538461538,
391
- "f":0.3469387755
392
  },
393
  "csubjpass":{
394
- "p":0.8333333333,
395
  "r":0.8333333333,
396
- "f":0.8333333333
397
  }
398
  },
399
- "ents_p":0.8998444478,
400
- "ents_r":0.8980869391,
401
- "ents_f":0.8989648344,
402
  "ents_per_type":{
403
  "DATE":{
404
- "p":0.8931977114,
405
- "r":0.8920634921,
406
- "f":0.8926302414
407
  },
408
  "GPE":{
409
- "p":0.9540487532,
410
- "r":0.949790795,
411
- "f":0.9519150126
412
  },
413
  "ORDINAL":{
414
- "p":0.8063492063,
415
- "r":0.7888198758,
416
- "f":0.7974882261
417
  },
418
  "ORG":{
419
- "p":0.901656868,
420
- "r":0.894485684,
421
- "f":0.8980569603
422
  },
423
  "FAC":{
424
- "p":0.5964912281,
425
- "r":0.7846153846,
426
- "f":0.6777408638
427
  },
428
  "QUANTITY":{
429
- "p":0.773255814,
430
- "r":0.7307692308,
431
- "f":0.7514124294
432
  },
433
  "LOC":{
434
- "p":0.8446601942,
435
- "r":0.8312101911,
436
- "f":0.8378812199
437
  },
438
  "CARDINAL":{
439
- "p":0.857225769,
440
- "r":0.8781212842,
441
- "f":0.8675477239
442
  },
443
  "PERSON":{
444
- "p":0.9384712972,
445
- "r":0.9657310705,
446
- "f":0.951906064
447
  },
448
  "NORP":{
449
- "p":0.9270998415,
450
- "r":0.936,
451
- "f":0.9315286624
452
  },
453
  "LAW":{
454
- "p":0.609375,
455
  "r":0.609375,
456
- "f":0.609375
457
- },
458
- "MONEY":{
459
- "p":0.9325153374,
460
- "r":0.8972845336,
461
- "f":0.9145607702
462
  },
463
  "TIME":{
464
- "p":0.756302521,
465
- "r":0.7894736842,
466
- "f":0.7725321888
467
  },
468
  "EVENT":{
469
- "p":0.7537313433,
470
- "r":0.5804597701,
471
- "f":0.6558441558
 
 
 
 
 
472
  },
473
  "WORK_OF_ART":{
474
- "p":0.5767195767,
475
- "r":0.5618556701,
476
- "f":0.5691906005
477
  },
478
- "PERCENT":{
479
- "p":0.9174603175,
480
- "r":0.8851454824,
481
- "f":0.9010132502
482
  },
483
- "PRODUCT":{
484
- "p":0.7012987013,
485
- "r":0.5118483412,
486
- "f":0.5917808219
487
  },
488
  "LANGUAGE":{
489
- "p":0.9583333333,
490
- "r":0.71875,
491
- "f":0.8214285714
492
  }
493
  },
494
- "speed":4706.6408562715
495
  },
496
  "sources":[
497
  {
@@ -520,6 +520,6 @@
520
  }
521
  ],
522
  "requirements":[
523
- "spacy-transformers>=1.2.2,<1.3.0"
524
  ]
525
  }
1
  {
2
  "lang":"en",
3
  "name":"core_web_trf",
4
+ "version":"3.7.2",
5
+ "description":"English transformer pipeline (Transformer(name='roberta-base', piece_encoder='byte-bpe', stride=104, type='roberta', width=768, window=144, vocab_size=50265)). Components: transformer, tagger, parser, ner, attribute_ruler, lemmatizer.",
6
  "author":"Explosion",
7
  "email":"contact@explosion.ai",
8
  "url":"https://explosion.ai",
9
  "license":"MIT",
10
+ "spacy_version":">=3.7.0,<3.8.0",
11
+ "spacy_git_version":"6b4f77441",
12
  "vectors":{
13
  "width":0,
14
  "vectors":0,
168
  "token_p":0.9956819193,
169
  "token_r":0.9957659295,
170
  "token_f":0.9957239226,
171
+ "tag_acc":0.9815755741,
172
+ "sents_p":0.9428677288,
173
+ "sents_r":0.8393693515,
174
+ "sents_f":0.8881133524,
175
+ "dep_uas":0.9505463996,
176
+ "dep_las":0.9368643771,
177
  "dep_las_per_type":{
178
  "prep":{
179
+ "p":0.9213844814,
180
+ "r":0.9232024284,
181
+ "f":0.922292559
182
  },
183
  "det":{
184
+ "p":0.9907293964,
185
+ "r":0.9891135937,
186
+ "f":0.9899208357
187
  },
188
  "pobj":{
189
+ "p":0.9835255354,
190
+ "r":0.984645592,
191
+ "f":0.984085245
192
  },
193
  "nsubj":{
194
+ "p":0.9810607725,
195
+ "r":0.9781380066,
196
+ "f":0.9795972094
197
  },
198
  "aux":{
199
+ "p":0.9886879843,
200
+ "r":0.9881598861,
201
+ "f":0.9884238646
202
  },
203
  "advmod":{
204
+ "p":0.8934343434,
205
+ "r":0.8929833417,
206
+ "f":0.8932087857
207
  },
208
  "relcl":{
209
+ "p":0.8716783217,
210
+ "r":0.9045718433,
211
+ "f":0.8878205128
212
  },
213
  "root":{
214
+ "p":0.9662097073,
215
+ "r":0.8601490864,
216
+ "f":0.9100998115
217
  },
218
  "xcomp":{
219
+ "p":0.9495982469,
220
+ "r":0.9332376167,
221
+ "f":0.9413468501
222
  },
223
  "amod":{
224
+ "p":0.945387166,
225
+ "r":0.942079689,
226
+ "f":0.9437305296
227
  },
228
  "compound":{
229
+ "p":0.9498658618,
230
+ "r":0.9464802851,
231
+ "f":0.9481700513
232
  },
233
  "poss":{
234
+ "p":0.9851375778,
235
+ "r":0.9873188406,
236
+ "f":0.9862270031
237
  },
238
  "ccomp":{
239
+ "p":0.8387701537,
240
+ "r":0.9112016293,
241
+ "f":0.8734869192
242
  },
243
  "attr":{
244
+ "p":0.9384928717,
245
+ "r":0.968881413,
246
+ "f":0.9534450652
247
  },
248
  "case":{
249
+ "p":0.9860904123,
250
+ "r":0.9934934935,
251
+ "f":0.9897781102
252
  },
253
  "mark":{
254
+ "p":0.9469537815,
255
+ "r":0.9554848967,
256
+ "f":0.951200211
257
  },
258
  "intj":{
259
+ "p":0.5938967136,
260
+ "r":0.7413919414,
261
+ "f":0.6594982079
262
  },
263
  "advcl":{
264
+ "p":0.7986680328,
265
+ "r":0.7851926467,
266
+ "f":0.7918730159
267
  },
268
  "cc":{
269
+ "p":0.8966257303,
270
+ "r":0.8994139457,
271
+ "f":0.8980176738
272
  },
273
  "neg":{
274
+ "p":0.9603214465,
275
+ "r":0.9593577521,
276
+ "f":0.9598393574
277
  },
278
  "conj":{
279
+ "p":0.8554216867,
280
+ "r":0.9116314199,
281
+ "f":0.8826325411
282
  },
283
  "nsubjpass":{
284
+ "p":0.9484588176,
285
+ "r":0.9625641026,
286
+ "f":0.9554594044
287
  },
288
  "auxpass":{
289
+ "p":0.9582407819,
290
+ "r":0.9826879271,
291
+ "f":0.9703103914
292
  },
293
  "dobj":{
294
+ "p":0.9704970179,
295
+ "r":0.9725077695,
296
+ "f":0.9715013533
297
  },
298
  "nummod":{
299
+ "p":0.9509357613,
300
+ "r":0.9494949495,
301
+ "f":0.9502148092
302
  },
303
  "npadvmod":{
304
+ "p":0.8354978355,
305
+ "r":0.8227353464,
306
+ "f":0.8290674781
307
  },
308
  "prt":{
309
+ "p":0.8928884987,
310
+ "r":0.9112903226,
311
+ "f":0.9019955654
312
  },
313
  "pcomp":{
314
+ "p":0.9309372798,
315
+ "r":0.925070028,
316
+ "f":0.92799438
317
  },
318
  "expl":{
319
+ "p":0.9914712154,
320
  "r":0.9957173448,
321
+ "f":0.9935897436
322
  },
323
  "acl":{
324
+ "p":0.8481012658,
325
+ "r":0.8406983088,
326
+ "f":0.8443835616
327
  },
328
  "agent":{
329
+ "p":0.9577464789,
330
+ "r":0.9749103943,
331
+ "f":0.9662522202
332
  },
333
  "dative":{
334
+ "p":0.8218262806,
335
+ "r":0.8463302752,
336
+ "f":0.8338983051
337
  },
338
  "acomp":{
339
+ "p":0.9484440316,
340
+ "r":0.9260770975,
341
+ "f":0.9371271225
342
  },
343
  "dep":{
344
+ "p":0.4476744186,
345
+ "r":0.375,
346
+ "f":0.4081272085
347
  },
348
  "csubj":{
349
+ "p":0.9,
350
  "r":0.9053254438,
351
+ "f":0.9026548673
352
  },
353
  "quantmod":{
354
+ "p":0.8608624898,
355
+ "r":0.8594638505,
356
+ "f":0.8601626016
357
  },
358
  "nmod":{
359
+ "p":0.818815331,
360
+ "r":0.7160268129,
361
+ "f":0.7639791938
362
  },
363
  "appos":{
364
+ "p":0.781092437,
365
+ "r":0.8065075922,
366
+ "f":0.7935965848
367
  },
368
  "predet":{
369
+ "p":0.8464566929,
370
+ "r":0.9227467811,
371
+ "f":0.8829568789
372
  },
373
  "preconj":{
374
+ "p":0.6703296703,
375
+ "r":0.7093023256,
376
+ "f":0.6892655367
377
  },
378
  "oprd":{
379
+ "p":0.8711656442,
380
+ "r":0.847761194,
381
+ "f":0.8593040847
382
  },
383
  "parataxis":{
384
+ "p":0.5409836066,
385
+ "r":0.6442516269,
386
+ "f":0.5881188119
387
  },
388
  "meta":{
389
+ "p":0.3063063063,
390
  "r":0.6538461538,
391
+ "f":0.4171779141
392
  },
393
  "csubjpass":{
394
+ "p":1.0,
395
  "r":0.8333333333,
396
+ "f":0.9090909091
397
  }
398
  },
399
+ "ents_p":0.8994886705,
400
+ "ents_r":0.8985877404,
401
+ "ents_f":0.8990379798,
402
  "ents_per_type":{
403
  "DATE":{
404
+ "p":0.8913798548,
405
+ "r":0.8961904762,
406
+ "f":0.8937786924
407
  },
408
  "GPE":{
409
+ "p":0.9586402266,
410
+ "r":0.9439330544,
411
+ "f":0.9512297962
412
  },
413
  "ORDINAL":{
414
+ "p":0.8011869436,
415
+ "r":0.8385093168,
416
+ "f":0.8194233687
417
  },
418
  "ORG":{
419
+ "p":0.9049542272,
420
+ "r":0.8910392365,
421
+ "f":0.8979428266
422
  },
423
  "FAC":{
424
+ "p":0.6049382716,
425
+ "r":0.7538461538,
426
+ "f":0.6712328767
427
  },
428
  "QUANTITY":{
429
+ "p":0.7831325301,
430
+ "r":0.7142857143,
431
+ "f":0.7471264368
432
  },
433
  "LOC":{
434
+ "p":0.8184615385,
435
+ "r":0.847133758,
436
+ "f":0.8325508607
437
  },
438
  "CARDINAL":{
439
+ "p":0.8476517755,
440
+ "r":0.8799048751,
441
+ "f":0.8634772462
442
  },
443
  "PERSON":{
444
+ "p":0.9409898477,
445
+ "r":0.9680156658,
446
+ "f":0.9543114543
447
  },
448
  "NORP":{
449
+ "p":0.9311183144,
450
+ "r":0.9192,
451
+ "f":0.9251207729
452
  },
453
  "LAW":{
454
+ "p":0.527027027,
455
  "r":0.609375,
456
+ "f":0.5652173913
 
 
 
 
 
457
  },
458
  "TIME":{
459
+ "p":0.7458100559,
460
+ "r":0.7807017544,
461
+ "f":0.7628571429
462
  },
463
  "EVENT":{
464
+ "p":0.7407407407,
465
+ "r":0.5747126437,
466
+ "f":0.6472491909
467
+ },
468
+ "PRODUCT":{
469
+ "p":0.6310160428,
470
+ "r":0.5592417062,
471
+ "f":0.5929648241
472
  },
473
  "WORK_OF_ART":{
474
+ "p":0.6534090909,
475
+ "r":0.5927835052,
476
+ "f":0.6216216216
477
  },
478
+ "MONEY":{
479
+ "p":0.9356796117,
480
+ "r":0.9102715466,
481
+ "f":0.9228007181
482
  },
483
+ "PERCENT":{
484
+ "p":0.9146919431,
485
+ "r":0.886676876,
486
+ "f":0.900466563
487
  },
488
  "LANGUAGE":{
489
+ "p":0.9642857143,
490
+ "r":0.84375,
491
+ "f":0.9
492
  }
493
  },
494
+ "speed":3882.7148648089
495
  },
496
  "sources":[
497
  {
520
  }
521
  ],
522
  "requirements":[
523
+ "spacy-curated-transformers>=0.2.0,<0.3.0"
524
  ]
525
  }
ner/model CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:baac59918ff3d48b195b884ead6098bacd2aaca43f7f376bb2a8fdb0238191e8
3
- size 313773
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:00b5b57be1a3766ad68d11e3e6abb06653bc9d768f6a8dea3831f21158cab0fa
3
+ size 313857
parser/model CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:427a951b3f6adfcbe5b8564780d6d339ce3267c20727baca6e1a6a4e1478655d
3
- size 639549
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:252cbce9c57a485ba0074fc0d2c494f50e39e4f77ab5bb16da2d4a34b3322eb8
3
+ size 639633
tagger/model CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:e3efc19c3f9d40ea82b00ff51d536c8db1818ae7aaef72fe3a43f90d2625b1fe
3
- size 151366
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:487171b7e08a8ae2e94c539f7f3af2a0f4fbe45200e580a463319044bf036920
3
+ size 151450
transformer/cfg CHANGED
@@ -1,3 +1,3 @@
1
  {
2
- "max_batch_items":4096
3
  }
1
  {
2
+
3
  }
transformer/model CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:1299436478208719ccde62165434d673da6810ae2d962d902e5f6aef8898c597
3
- size 502027901
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0f25fe24482dcc5c674abbe73b723562c9d6cdd1c3c24697d78ccf5a33a43b80
3
+ size 497342613
vocab/lookups.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:1ddd140ecac6a8c4592e9146d6e30074569ffaed97ee51edc9587dc510f8934c
3
- size 69982
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fce9c883c56165f29573cc938c2a1c9d417ac61bd8f56b671dd5f7996de70682
3
+ size 70040
vocab/strings.json CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:5584afb04f5a45fd5496792d1c737311e1f45d752e15f7be48b3ffbb6047c8eb
3
- size 1091213
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c7728f12b29cdb3908bd5d2b5ef0efac558033e58c30246e0d2ce86fd781ed67
3
+ size 1091498