Mojtaba aka Omid Rohanian commited on
Commit
3fa9cbf
1 Parent(s): ebc0591

ClinicalDistilBERT model and tokeniser

Browse files
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "clinical-models/models/ClinicalDistilBERT/final/model",
3
  "activation": "gelu",
4
  "adapters": {
5
  "adapters": {},
@@ -26,6 +26,6 @@
26
  "sinusoidal_pos_embds": false,
27
  "tie_weights_": true,
28
  "torch_dtype": "float32",
29
- "transformers_version": "4.17.0",
30
  "vocab_size": 28996
31
  }
 
1
  {
2
+ "_name_or_path": "/users/engs2263/clinical-models/models/ClinicalDistilBERT/final/model",
3
  "activation": "gelu",
4
  "adapters": {
5
  "adapters": {},
 
26
  "sinusoidal_pos_embds": false,
27
  "tie_weights_": true,
28
  "torch_dtype": "float32",
29
+ "transformers_version": "4.27.0.dev0",
30
  "vocab_size": 28996
31
  }
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:eb1dc80bc92fa1693723ff05a194d3e5f8700dbc66758a100be42ff7ec0fda90
3
- size 263292335
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fda1b5c485698ea0792e89873108ec3e0f51d0dd00780a8c9f7ff6646029a1e5
3
+ size 263281391
special_tokens_map.json CHANGED
@@ -1 +1,7 @@
1
- {"unk_token": "[UNK]", "sep_token": "[SEP]", "pad_token": "[PAD]", "cls_token": "[CLS]", "mask_token": "[MASK]"}
 
 
 
 
 
 
 
1
+ {
2
+ "cls_token": "[CLS]",
3
+ "mask_token": "[MASK]",
4
+ "pad_token": "[PAD]",
5
+ "sep_token": "[SEP]",
6
+ "unk_token": "[UNK]"
7
+ }
tokenizer_config.json CHANGED
@@ -1 +1,14 @@
1
- {"do_lower_case": false, "unk_token": "[UNK]", "sep_token": "[SEP]", "pad_token": "[PAD]", "cls_token": "[CLS]", "mask_token": "[MASK]", "tokenize_chinese_chars": true, "strip_accents": null, "model_max_length": 512, "special_tokens_map_file": null, "name_or_path": "bert-base-cased", "tokenizer_class": "BertTokenizer"}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cls_token": "[CLS]",
3
+ "do_lower_case": false,
4
+ "mask_token": "[MASK]",
5
+ "model_max_length": 512,
6
+ "name_or_path": "bert-base-cased",
7
+ "pad_token": "[PAD]",
8
+ "sep_token": "[SEP]",
9
+ "special_tokens_map_file": null,
10
+ "strip_accents": null,
11
+ "tokenize_chinese_chars": true,
12
+ "tokenizer_class": "BertTokenizer",
13
+ "unk_token": "[UNK]"
14
+ }