joanllop commited on
Commit
8173542
1 Parent(s): 5fe29ca
README.md ADDED
@@ -0,0 +1,35 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Basic Spacy BioNER pipeline, with a RoBERTa-based model [bsc-bio-ehr-es] (https://huggingface.co/PlanTL-GOB-ES/bsc-bio-ehr-es) and a dataset, CANTEMIST, annotated with tumor morphology entities. For further information, check the [official website](https://temu.bsc.es/cantemist/). Visit our [GitHub repository](https://github.com/PlanTL-GOB-ES/lm-biomedical-clinical-es). This work was funded by the Spanish State Secretariat for Digitalization and Artificial Intelligence (SEDIA) within the framework of the Plan-TL
2
+
3
+ | Feature | Description |
4
+ | --- | --- |
5
+ | **Name** | `es_cantemist_ner_trf` |
6
+ | **Version** | `3.4.0` |
7
+ | **spaCy** | `>=3.4.0,<3.5.0` |
8
+ | **Default Pipeline** | `transformer`, `ner` |
9
+ | **Components** | `transformer`, `ner` |
10
+ | **Vectors** | 0 keys, 0 unique vectors (0 dimensions) |
11
+ | **Sources** | n/a |
12
+ | **License** | `[Apache License, Version 2.0](https://www.apache.org/licenses/LICENSE-2.0)` |
13
+ | **Author** | [The Text Mining Unit from Barcelona Supercomputing Center.](https://huggingface.co/PlanTL-GOB-ES) |
14
+
15
+ ### Label Scheme
16
+
17
+ <details>
18
+
19
+ <summary>View label scheme (1 labels for 1 components)</summary>
20
+
21
+ | Component | Labels |
22
+ | --- | --- |
23
+ | **`ner`** | `MORFOLOGIA_NEOPLASIA` |
24
+
25
+ </details>
26
+
27
+ ### Accuracy
28
+
29
+ | Type | Score |
30
+ | --- | --- |
31
+ | `ENTS_F` | 84.52 |
32
+ | `ENTS_P` | 84.88 |
33
+ | `ENTS_R` | 84.16 |
34
+ | `TRANSFORMER_LOSS` | 25646.78 |
35
+ | `NER_LOSS` | 9622.84 |
config.cfg ADDED
@@ -0,0 +1,145 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [paths]
2
+ train = "corpus/train.spacy"
3
+ dev = "corpus/dev.spacy"
4
+ vectors = null
5
+ init_tok2vec = null
6
+
7
+ [system]
8
+ gpu_allocator = "pytorch"
9
+ seed = 0
10
+
11
+ [nlp]
12
+ lang = "es"
13
+ pipeline = ["transformer","ner"]
14
+ batch_size = 128
15
+ disabled = []
16
+ before_creation = null
17
+ after_creation = null
18
+ after_pipeline_creation = null
19
+ tokenizer = {"@tokenizers":"spacy.Tokenizer.v1"}
20
+
21
+ [components]
22
+
23
+ [components.ner]
24
+ factory = "ner"
25
+ incorrect_spans_key = null
26
+ moves = null
27
+ scorer = {"@scorers":"spacy.ner_scorer.v1"}
28
+ update_with_oracle_cut_size = 100
29
+
30
+ [components.ner.model]
31
+ @architectures = "spacy.TransitionBasedParser.v2"
32
+ state_type = "ner"
33
+ extra_state_tokens = false
34
+ hidden_width = 64
35
+ maxout_pieces = 2
36
+ use_upper = false
37
+ nO = null
38
+
39
+ [components.ner.model.tok2vec]
40
+ @architectures = "spacy-transformers.TransformerListener.v1"
41
+ grad_factor = 1.0
42
+ pooling = {"@layers":"reduce_mean.v1"}
43
+ upstream = "*"
44
+
45
+ [components.transformer]
46
+ factory = "transformer"
47
+ max_batch_items = 4096
48
+ set_extra_annotations = {"@annotation_setters":"spacy-transformers.null_annotation_setter.v1"}
49
+
50
+ [components.transformer.model]
51
+ @architectures = "spacy-transformers.TransformerModel.v3"
52
+ name = "PlanTL-GOB-ES/bsc-bio-ehr-es"
53
+ mixed_precision = false
54
+
55
+ [components.transformer.model.get_spans]
56
+ @span_getters = "spacy-transformers.strided_spans.v1"
57
+ window = 128
58
+ stride = 96
59
+
60
+ [components.transformer.model.grad_scaler_config]
61
+
62
+ [components.transformer.model.tokenizer_config]
63
+ use_fast = true
64
+
65
+ [components.transformer.model.transformer_config]
66
+
67
+ [corpora]
68
+
69
+ [corpora.dev]
70
+ @readers = "spacy.Corpus.v1"
71
+ path = ${paths.dev}
72
+ max_length = 0
73
+ gold_preproc = false
74
+ limit = 0
75
+ augmenter = null
76
+
77
+ [corpora.train]
78
+ @readers = "spacy.Corpus.v1"
79
+ path = ${paths.train}
80
+ max_length = 0
81
+ gold_preproc = false
82
+ limit = 0
83
+ augmenter = null
84
+
85
+ [training]
86
+ accumulate_gradient = 3
87
+ dev_corpus = "corpora.dev"
88
+ train_corpus = "corpora.train"
89
+ seed = ${system.seed}
90
+ gpu_allocator = ${system.gpu_allocator}
91
+ dropout = 0.1
92
+ patience = 1600
93
+ max_epochs = 0
94
+ max_steps = 20000
95
+ eval_frequency = 200
96
+ frozen_components = []
97
+ annotating_components = []
98
+ before_to_disk = null
99
+
100
+ [training.batcher]
101
+ @batchers = "spacy.batch_by_padded.v1"
102
+ discard_oversize = true
103
+ size = 2000
104
+ buffer = 256
105
+ get_length = null
106
+
107
+ [training.logger]
108
+ @loggers = "spacy.ConsoleLogger.v1"
109
+ progress_bar = false
110
+
111
+ [training.optimizer]
112
+ @optimizers = "Adam.v1"
113
+ beta1 = 0.9
114
+ beta2 = 0.999
115
+ L2_is_weight_decay = true
116
+ L2 = 0.01
117
+ grad_clip = 1.0
118
+ use_averages = false
119
+ eps = 0.00000001
120
+
121
+ [training.optimizer.learn_rate]
122
+ @schedules = "warmup_linear.v1"
123
+ warmup_steps = 250
124
+ total_steps = 20000
125
+ initial_rate = 0.00005
126
+
127
+ [training.score_weights]
128
+ ents_f = 1.0
129
+ ents_p = 0.0
130
+ ents_r = 0.0
131
+ ents_per_type = null
132
+
133
+ [pretraining]
134
+
135
+ [initialize]
136
+ vectors = ${paths.vectors}
137
+ init_tok2vec = ${paths.init_tok2vec}
138
+ vocab_data = null
139
+ lookups = null
140
+ before_init = null
141
+ after_init = null
142
+
143
+ [initialize.components]
144
+
145
+ [initialize.tokenizer]
es_cantemist_ner_trf-3.4.0-py3-none-any.whl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a89c8acad991e8592f109e76f974abcec7c341b48bc7a6ef5d3407fb7bfe2ab0
3
+ size 441486346
meta.json ADDED
@@ -0,0 +1,54 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "lang":"es",
3
+ "name":"cantemist_ner_trf",
4
+ "version":"3.4.0",
5
+ "description":"Basic Spacy BioNER pipeline, with a RoBERTa-based model [bsc-bio-ehr-es] (https://huggingface.co/PlanTL-GOB-ES/bsc-bio-ehr-es) and a dataset, CANTEMIST, annotated with tumor morphology entities. For further information, check the [official website](https://temu.bsc.es/cantemist/). Visit our [GitHub repository](https://github.com/PlanTL-GOB-ES/lm-biomedical-clinical-es). This work was funded by the Spanish State Secretariat for Digitalization and Artificial Intelligence (SEDIA) within the framework of the Plan-TL",
6
+ "author":"The Text Mining Unit from Barcelona Supercomputing Center.",
7
+ "email":"plantl-gob-es@bsc.es",
8
+ "url":"https://huggingface.co/PlanTL-GOB-ES",
9
+ "license":"[Apache License, Version 2.0](https://www.apache.org/licenses/LICENSE-2.0)",
10
+ "spacy_version":">=3.4.0,<3.5.0",
11
+ "spacy_git_version":"Unknown",
12
+ "vectors":{
13
+ "width":0,
14
+ "vectors":0,
15
+ "keys":0,
16
+ "name":null
17
+ },
18
+ "labels":{
19
+ "transformer":[
20
+
21
+ ],
22
+ "ner":[
23
+ "MORFOLOGIA_NEOPLASIA"
24
+ ]
25
+ },
26
+ "pipeline":[
27
+ "transformer",
28
+ "ner"
29
+ ],
30
+ "components":[
31
+ "transformer",
32
+ "ner"
33
+ ],
34
+ "disabled":[
35
+
36
+ ],
37
+ "performance":{
38
+ "ents_f":0.8451798075,
39
+ "ents_p":0.8487622923,
40
+ "ents_r":0.8416274378,
41
+ "ents_per_type":{
42
+ "MORFOLOGIA_NEOPLASIA":{
43
+ "p":0.8487622923,
44
+ "r":0.8416274378,
45
+ "f":0.8451798075
46
+ }
47
+ },
48
+ "transformer_loss":256.4677646511,
49
+ "ner_loss":96.2283862055
50
+ },
51
+ "requirements":[
52
+ "spacy-transformers>=1.1.8,<1.2.0"
53
+ ]
54
+ }
ner/cfg ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "moves":null,
3
+ "update_with_oracle_cut_size":100,
4
+ "multitasks":[
5
+
6
+ ],
7
+ "min_action_freq":1,
8
+ "learn_tokens":false,
9
+ "beam_width":1,
10
+ "beam_density":0.0,
11
+ "beam_update_prob":0.0,
12
+ "incorrect_spans_key":null
13
+ }
ner/model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3dff10568a451ec1d3ef8a9e4ba695ab798f9a557e8fbf7a8d90e085f7717e37
3
+ size 207145
ner/moves ADDED
@@ -0,0 +1 @@
 
 
1
+ ��moves٤{"0":{},"1":{"MORFOLOGIA_NEOPLASIA":14889},"2":{"MORFOLOGIA_NEOPLASIA":14889},"3":{"MORFOLOGIA_NEOPLASIA":14889},"4":{"MORFOLOGIA_NEOPLASIA":14889,"":1},"5":{"":1}}�cfg��neg_key�
tokenizer ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c453330d14b61214d60b17c4c5a47c4acf3027c9b32c452dd3b26f66c5b28169
3
+ size 36836
transformer/cfg ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ {
2
+ "max_batch_items":4096
3
+ }
transformer/model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e45adc31db0ea71505cf438a94ec2f6ce0c328dc2935771d46f3928a3bfedc7c
3
+ size 502280552
vocab/key2row ADDED
@@ -0,0 +1 @@
 
 
1
+
vocab/lookups.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:76be8b528d0075f7aae98d6fa57a6d3c83ae480a8469e668d7b0af968995ac71
3
+ size 1
vocab/strings.json ADDED
The diff for this file is too large to render. See raw diff
 
vocab/vectors ADDED
Binary file (128 Bytes). View file
 
vocab/vectors.cfg ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ {
2
+ "mode":"default"
3
+ }