annafavaro commited on
Commit
20ff799
1 Parent(s): 45c1471

Training in progress, epoch 1

Browse files
Files changed (22) hide show
  1. .gitignore +1 -0
  2. config.json +51 -0
  3. merges.txt +0 -0
  4. pytorch_model.bin +3 -0
  5. runs/Apr13_21-17-55_d2f6cadd9fb9/1681420781.403019/events.out.tfevents.1681420781.d2f6cadd9fb9.170.1 +3 -0
  6. runs/Apr13_21-17-55_d2f6cadd9fb9/events.out.tfevents.1681420781.d2f6cadd9fb9.170.0 +3 -0
  7. runs/Apr13_22-05-39_d2f6cadd9fb9/1681423547.570079/events.out.tfevents.1681423547.d2f6cadd9fb9.170.3 +3 -0
  8. runs/Apr13_22-05-39_d2f6cadd9fb9/events.out.tfevents.1681423547.d2f6cadd9fb9.170.2 +3 -0
  9. runs/Apr13_22-38-44_d2f6cadd9fb9/1681425548.189409/events.out.tfevents.1681425548.d2f6cadd9fb9.170.5 +3 -0
  10. runs/Apr13_22-38-44_d2f6cadd9fb9/events.out.tfevents.1681425548.d2f6cadd9fb9.170.4 +3 -0
  11. runs/Apr13_23-08-34_d2f6cadd9fb9/1681427329.0418153/events.out.tfevents.1681427329.d2f6cadd9fb9.32845.1 +3 -0
  12. runs/Apr13_23-08-34_d2f6cadd9fb9/1681427365.1016428/events.out.tfevents.1681427365.d2f6cadd9fb9.32845.3 +3 -0
  13. runs/Apr13_23-08-34_d2f6cadd9fb9/1681427395.4606037/events.out.tfevents.1681427395.d2f6cadd9fb9.32845.5 +3 -0
  14. runs/Apr13_23-08-34_d2f6cadd9fb9/events.out.tfevents.1681427329.d2f6cadd9fb9.32845.0 +3 -0
  15. runs/Apr13_23-08-34_d2f6cadd9fb9/events.out.tfevents.1681427365.d2f6cadd9fb9.32845.2 +3 -0
  16. runs/Apr13_23-08-34_d2f6cadd9fb9/events.out.tfevents.1681427395.d2f6cadd9fb9.32845.4 +3 -0
  17. runs/Apr13_23-26-15_d2f6cadd9fb9/1681428386.1686995/events.out.tfevents.1681428386.d2f6cadd9fb9.37676.1 +3 -0
  18. runs/Apr13_23-26-15_d2f6cadd9fb9/events.out.tfevents.1681428386.d2f6cadd9fb9.37676.0 +3 -0
  19. special_tokens_map.json +7 -0
  20. tokenizer_config.json +10 -0
  21. training_args.bin +3 -0
  22. vocab.json +0 -0
.gitignore ADDED
@@ -0,0 +1 @@
 
 
1
+ checkpoint-*/
config.json ADDED
@@ -0,0 +1,51 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "annafavaro/BIO_GPT_NER_FINETUNED_NEW_2",
3
+ "activation_dropout": 0.0,
4
+ "activation_function": "gelu_new",
5
+ "architectures": [
6
+ "GPT2ForTokenClassification"
7
+ ],
8
+ "attention_probs_dropout_prob": 0.1,
9
+ "attn_pdrop": 0.1,
10
+ "bos_token_id": 0,
11
+ "embd_pdrop": 0.1,
12
+ "eos_token_id": 2,
13
+ "hidden_act": "gelu",
14
+ "hidden_dropout_prob": 0.1,
15
+ "id2label": {
16
+ "0": "O",
17
+ "1": "I-ADR",
18
+ "2": "B-ADR"
19
+ },
20
+ "initializer_range": 0.02,
21
+ "intermediate_size": 4096,
22
+ "label2id": {
23
+ "B-ADR": 2,
24
+ "I-ADR": 1,
25
+ "O": 0
26
+ },
27
+ "layer_norm_eps": 1e-12,
28
+ "layer_norm_epsilon": 1e-05,
29
+ "layerdrop": 0.0,
30
+ "model_type": "gpt2",
31
+ "n_embd": 1024,
32
+ "n_head": 16,
33
+ "n_inner": null,
34
+ "n_layer": 24,
35
+ "n_positions": 1024,
36
+ "pad_token_id": 1,
37
+ "reorder_and_upcast_attn": false,
38
+ "resid_pdrop": 0.1,
39
+ "scale_attn_by_inverse_layer_idx": false,
40
+ "scale_attn_weights": true,
41
+ "scale_embedding": true,
42
+ "summary_activation": null,
43
+ "summary_first_dropout": 0.1,
44
+ "summary_proj_to_labels": true,
45
+ "summary_type": "cls_index",
46
+ "summary_use_proj": true,
47
+ "torch_dtype": "float32",
48
+ "transformers_version": "4.28.0",
49
+ "use_cache": true,
50
+ "vocab_size": 42384
51
+ }
merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d17fbd0433d66f8ccbc63b919b1c6b2f71dc296006f39446c5625a5ef1d892c8
3
+ size 1412334373
runs/Apr13_21-17-55_d2f6cadd9fb9/1681420781.403019/events.out.tfevents.1681420781.d2f6cadd9fb9.170.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3f704e6bb0041746912f228abc9f2a2022b525b02d981e47aacae85f1bf91d6f
3
+ size 5936
runs/Apr13_21-17-55_d2f6cadd9fb9/events.out.tfevents.1681420781.d2f6cadd9fb9.170.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1e98e160f8d29f717529af3fe00f76914c197a1c97747554cbf9a1c0008fa091
3
+ size 5395
runs/Apr13_22-05-39_d2f6cadd9fb9/1681423547.570079/events.out.tfevents.1681423547.d2f6cadd9fb9.170.3 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:909f61de57aa98bf3dbd70052cd780852c33dd6198ea16c34d18ba642b562a49
3
+ size 5936
runs/Apr13_22-05-39_d2f6cadd9fb9/events.out.tfevents.1681423547.d2f6cadd9fb9.170.2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:192af310a01f47b687ec79ad51913c3310749cd23c07b8fb308dc11579ccd396
3
+ size 5395
runs/Apr13_22-38-44_d2f6cadd9fb9/1681425548.189409/events.out.tfevents.1681425548.d2f6cadd9fb9.170.5 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f727a91a1dd1b842c7b82456a87d903d3c361c8cff28d045a89400e41754055f
3
+ size 5936
runs/Apr13_22-38-44_d2f6cadd9fb9/events.out.tfevents.1681425548.d2f6cadd9fb9.170.4 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3b381e58bd13b83c343030044ece930b983c5797aab97c89937f71fc1f68adec
3
+ size 4417
runs/Apr13_23-08-34_d2f6cadd9fb9/1681427329.0418153/events.out.tfevents.1681427329.d2f6cadd9fb9.32845.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fb7ca42e82b693a88546a1b10cd7aa0cca74b6598557960049926bfec0b42707
3
+ size 5936
runs/Apr13_23-08-34_d2f6cadd9fb9/1681427365.1016428/events.out.tfevents.1681427365.d2f6cadd9fb9.32845.3 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:315bfa0d07561ceefde766537ce32bd627c00a2da5dc868bfa4dbe2c02f54b6d
3
+ size 5936
runs/Apr13_23-08-34_d2f6cadd9fb9/1681427395.4606037/events.out.tfevents.1681427395.d2f6cadd9fb9.32845.5 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5d7f10b522f4e5e4393bd4b7df743c483bc58adfb415c9d8c4521b5f6f0ba5a7
3
+ size 5936
runs/Apr13_23-08-34_d2f6cadd9fb9/events.out.tfevents.1681427329.d2f6cadd9fb9.32845.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:34af317f34ddfa7dda57c4a5378b180bcb01f40730edf0b57681c8bfef4d13c1
3
+ size 4610
runs/Apr13_23-08-34_d2f6cadd9fb9/events.out.tfevents.1681427365.d2f6cadd9fb9.32845.2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:48faf9dbf9a027cb40f0c3864d0bc8ae21f490aab52f5e4dd358cc674e71e75e
3
+ size 4184
runs/Apr13_23-08-34_d2f6cadd9fb9/events.out.tfevents.1681427395.d2f6cadd9fb9.32845.4 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9d3987560b9cd3c05a15fef6389e378f927ac82ccf6754eab4c891f98785dd36
3
+ size 4184
runs/Apr13_23-26-15_d2f6cadd9fb9/1681428386.1686995/events.out.tfevents.1681428386.d2f6cadd9fb9.37676.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c1d60922ab007873e2c7ef872261b03263e7f37d4c340e00f5dab162f3334002
3
+ size 5936
runs/Apr13_23-26-15_d2f6cadd9fb9/events.out.tfevents.1681428386.d2f6cadd9fb9.37676.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bf3b9de798ad5d5da27c28a88593153cc0687879ab78c4e2fdf5fc0a0af387ee
3
+ size 5867
special_tokens_map.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": "<s>",
3
+ "eos_token": "</s>",
4
+ "pad_token": "<pad>",
5
+ "sep_token": "</s>",
6
+ "unk_token": "<unk>"
7
+ }
tokenizer_config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": "<s>",
3
+ "clean_up_tokenization_spaces": true,
4
+ "eos_token": "</s>",
5
+ "model_max_length": 1024,
6
+ "pad_token": "<pad>",
7
+ "sep_token": "</s>",
8
+ "tokenizer_class": "BioGptTokenizer",
9
+ "unk_token": "<unk>"
10
+ }
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4dbf0b924436bbd15ac0ea67a49a05a2340a5d4b3996c2edc29c09a5fef791c4
3
+ size 3643
vocab.json ADDED
The diff for this file is too large to render. See raw diff