adigo commited on
Commit
ca0a0d3
·
verified ·
1 Parent(s): fbddc73

Training in progress, epoch 1

Browse files
config.json CHANGED
@@ -1,10 +1,11 @@
1
  {
2
- "_name_or_path": "emilyalsentzer/Bio_ClinicalBERT",
3
  "architectures": [
4
  "BertForTokenClassification"
5
  ],
6
  "attention_probs_dropout_prob": 0.1,
7
  "classifier_dropout": null,
 
8
  "hidden_act": "gelu",
9
  "hidden_dropout_prob": 0.1,
10
  "hidden_size": 768,
 
1
  {
2
+ "_name_or_path": "bert-base-cased",
3
  "architectures": [
4
  "BertForTokenClassification"
5
  ],
6
  "attention_probs_dropout_prob": 0.1,
7
  "classifier_dropout": null,
8
+ "gradient_checkpointing": false,
9
  "hidden_act": "gelu",
10
  "hidden_dropout_prob": 0.1,
11
  "hidden_size": 768,
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:d8f71c3c16f6db678bafa5961a03a762230c1b0fa1dd1bcb3027042b8f909b42
3
  size 430911284
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:330b2875947a8bb6fbf7edfcb8b32fc4f0792b9bb1f89e7645889cf11cb76851
3
  size 430911284
runs/Sep16_21-48-07_f72b346c1a48/events.out.tfevents.1726523311.f72b346c1a48.1491.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5fadc2f408e49589ffbd22c1383af1b18d4da185b6befc6313474bdba27bac17
3
+ size 5732
tokenizer.json CHANGED
@@ -1,6 +1,11 @@
1
  {
2
  "version": "1.0",
3
- "truncation": null,
 
 
 
 
 
4
  "padding": null,
5
  "added_tokens": [
6
  {
@@ -54,7 +59,7 @@
54
  "clean_text": true,
55
  "handle_chinese_chars": true,
56
  "strip_accents": null,
57
- "lowercase": true
58
  },
59
  "pre_tokenizer": {
60
  "type": "BertPreTokenizer"
 
1
  {
2
  "version": "1.0",
3
+ "truncation": {
4
+ "direction": "Right",
5
+ "max_length": 512,
6
+ "strategy": "LongestFirst",
7
+ "stride": 0
8
+ },
9
  "padding": null,
10
  "added_tokens": [
11
  {
 
59
  "clean_text": true,
60
  "handle_chinese_chars": true,
61
  "strip_accents": null,
62
+ "lowercase": false
63
  },
64
  "pre_tokenizer": {
65
  "type": "BertPreTokenizer"
tokenizer_config.json CHANGED
@@ -43,11 +43,9 @@
43
  },
44
  "clean_up_tokenization_spaces": true,
45
  "cls_token": "[CLS]",
46
- "do_basic_tokenize": true,
47
- "do_lower_case": true,
48
  "mask_token": "[MASK]",
49
- "model_max_length": 1000000000000000019884624838656,
50
- "never_split": null,
51
  "pad_token": "[PAD]",
52
  "sep_token": "[SEP]",
53
  "strip_accents": null,
 
43
  },
44
  "clean_up_tokenization_spaces": true,
45
  "cls_token": "[CLS]",
46
+ "do_lower_case": false,
 
47
  "mask_token": "[MASK]",
48
+ "model_max_length": 512,
 
49
  "pad_token": "[PAD]",
50
  "sep_token": "[SEP]",
51
  "strip_accents": null,
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:be373d0d9eaf565b8a517dfdd7aa4e5edf5e805140bf8d4ef53f97715c56174f
3
  size 5176
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1722768e6fe0d375266c9f43e27a8be3b10638703cb25ea3dbea0c8ada49d84f
3
  size 5176