Files changed (2) hide show
  1. README.md +4 -5
  2. config.json +6 -6
README.md CHANGED
@@ -26,7 +26,7 @@ The model is based on the [ClinicalBERT - Bio + Discharge Summary BERT Model](ht
26
 
27
  You can load the model via the transformers library:
28
  ```
29
- from transformers import AutoTokenizer, AutoModelForSequenceClassification
30
  tokenizer = AutoTokenizer.from_pretrained("bvanaken/clinical-assertion-negation-bert")
31
  model = AutoModelForSequenceClassification.from_pretrained("bvanaken/clinical-assertion-negation-bert")
32
 
@@ -38,11 +38,10 @@ Example input and inference:
38
  ```
39
  input = "The patient recovered during the night and now denies any [entity] shortness of breath [entity]."
40
 
41
- tokenized_input = tokenizer(input, return_tensors="pt")
42
- output = model(**tokenized_input)
43
 
44
- import numpy as np
45
- predicted_label = np.argmax(output.logits.detach().numpy()) ## 1 == ABSENT
46
  ```
47
 
48
  ### Cite
 
26
 
27
  You can load the model via the transformers library:
28
  ```
29
+ from transformers import AutoTokenizer, AutoModelForSequenceClassification, TextClassificationPipeline
30
  tokenizer = AutoTokenizer.from_pretrained("bvanaken/clinical-assertion-negation-bert")
31
  model = AutoModelForSequenceClassification.from_pretrained("bvanaken/clinical-assertion-negation-bert")
32
 
 
38
  ```
39
  input = "The patient recovered during the night and now denies any [entity] shortness of breath [entity]."
40
 
41
+ classifier = TextClassificationPipeline(model=model, tokenizer=tokenizer)
 
42
 
43
+ classification = classifier(input)
44
+ # [{'label': 'ABSENT', 'score': 0.9842607378959656}]
45
  ```
46
 
47
  ### Cite
config.json CHANGED
@@ -9,16 +9,16 @@
9
  "hidden_dropout_prob": 0.1,
10
  "hidden_size": 768,
11
  "id2label": {
12
- "0": "0",
13
- "1": "1",
14
- "2": "2"
15
  },
16
  "initializer_range": 0.02,
17
  "intermediate_size": 3072,
18
  "label2id": {
19
- "0": 0,
20
- "1": 1,
21
- "2": 2
22
  },
23
  "language": "english",
24
  "layer_norm_eps": 1e-12,
 
9
  "hidden_dropout_prob": 0.1,
10
  "hidden_size": 768,
11
  "id2label": {
12
+ "0": "PRESENT",
13
+ "1": "ABSENT",
14
+ "2": "POSSIBLE"
15
  },
16
  "initializer_range": 0.02,
17
  "intermediate_size": 3072,
18
  "label2id": {
19
+ "PRESENT": 0,
20
+ "ABSENT": 1,
21
+ "POSSIBLE": 2
22
  },
23
  "language": "english",
24
  "layer_norm_eps": 1e-12,