pierreguillou commited on
Commit
541b089
1 Parent(s): c3d2988

Training in progress, step 400

Browse files
Files changed (36) hide show
  1. .gitattributes +0 -1
  2. {checkpoint-100 → checkpoint-200}/config.json +0 -0
  3. {checkpoint-100 → checkpoint-200}/optimizer.pt +1 -1
  4. {checkpoint-100 → checkpoint-200}/pytorch_model.bin +1 -1
  5. {checkpoint-100 → checkpoint-200}/rng_state.pth +1 -1
  6. {checkpoint-100 → checkpoint-200}/scaler.pt +1 -1
  7. {checkpoint-100 → checkpoint-200}/scheduler.pt +1 -1
  8. {checkpoint-100 → checkpoint-200}/special_tokens_map.json +0 -0
  9. {checkpoint-100 → checkpoint-200}/tokenizer.json +0 -0
  10. {checkpoint-100 → checkpoint-200}/tokenizer_config.json +0 -0
  11. {checkpoint-100 → checkpoint-200}/trainer_state.json +17 -5
  12. {checkpoint-100 → checkpoint-200}/training_args.bin +0 -0
  13. checkpoint-300/config.json +56 -0
  14. checkpoint-300/optimizer.pt +3 -0
  15. checkpoint-300/pytorch_model.bin +3 -0
  16. checkpoint-300/rng_state.pth +3 -0
  17. checkpoint-300/scaler.pt +3 -0
  18. checkpoint-300/scheduler.pt +3 -0
  19. checkpoint-300/special_tokens_map.json +15 -0
  20. checkpoint-300/tokenizer.json +3 -0
  21. checkpoint-300/tokenizer_config.json +20 -0
  22. checkpoint-300/trainer_state.json +52 -0
  23. checkpoint-300/training_args.bin +3 -0
  24. checkpoint-400/config.json +56 -0
  25. checkpoint-400/optimizer.pt +3 -0
  26. checkpoint-400/pytorch_model.bin +3 -0
  27. checkpoint-400/rng_state.pth +3 -0
  28. checkpoint-400/scaler.pt +3 -0
  29. checkpoint-400/scheduler.pt +3 -0
  30. checkpoint-400/special_tokens_map.json +15 -0
  31. checkpoint-400/tokenizer.json +3 -0
  32. checkpoint-400/tokenizer_config.json +20 -0
  33. checkpoint-400/trainer_state.json +64 -0
  34. checkpoint-400/training_args.bin +3 -0
  35. pytorch_model.bin +1 -1
  36. runs/Feb15_07-53-26_e220f522c880/events.out.tfevents.1676447621.e220f522c880.495.0 +2 -2
.gitattributes CHANGED
@@ -32,5 +32,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
32
  *.zip filter=lfs diff=lfs merge=lfs -text
33
  *.zst filter=lfs diff=lfs merge=lfs -text
34
  *tfevents* filter=lfs diff=lfs merge=lfs -text
35
- checkpoint-100/tokenizer.json filter=lfs diff=lfs merge=lfs -text
36
  tokenizer.json filter=lfs diff=lfs merge=lfs -text
 
32
  *.zip filter=lfs diff=lfs merge=lfs -text
33
  *.zst filter=lfs diff=lfs merge=lfs -text
34
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
35
  tokenizer.json filter=lfs diff=lfs merge=lfs -text
{checkpoint-100 → checkpoint-200}/config.json RENAMED
File without changes
{checkpoint-100 → checkpoint-200}/optimizer.pt RENAMED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:3c3bb7f798b7faa9ffe19d5e1bb8395f1909a3e12768cc1876e800e16f389c7a
3
  size 2265827717
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c7cdf79d3146238ae659f3724cb61a9f3eef1d94bdc25b29615f00c8f24590c2
3
  size 2265827717
{checkpoint-100 → checkpoint-200}/pytorch_model.bin RENAMED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:8ddccc8aa50bfb082d45e1d9486a1480ff83d9b46e9e73c0609138717d6e917e
3
  size 1134425553
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d22e589804912ea5de135343295f3d1d679cdb01de0862367236991e3a9e8f5b
3
  size 1134425553
{checkpoint-100 → checkpoint-200}/rng_state.pth RENAMED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:b3f5812fac2dd8ce54f144d17e5819925f261f779dbbc7a7fe69a7fcaf2e80f8
3
  size 14575
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:19a5fbfb1d8b3fb183af663f8903b6b2d98760688828d8a5abb54552e6de1d6f
3
  size 14575
{checkpoint-100 → checkpoint-200}/scaler.pt RENAMED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:49275945ecfd0346ebdc51411033b3b40ac9d7fd86faa2315017693f738a1151
3
  size 557
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fc28ad4ba690d88b4a64a84b0eb679f0ae2052dd18f66a37a2ec7425463d2024
3
  size 557
{checkpoint-100 → checkpoint-200}/scheduler.pt RENAMED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:26589aff2b7c88281509e1e9d0ce9b63736104f150596456f9ee599b4187ae0d
3
  size 627
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f06cf77a584fcbc5eec1e5ddee45214561dae318ae26edcbcc04f742aac3e73b
3
  size 627
{checkpoint-100 → checkpoint-200}/special_tokens_map.json RENAMED
File without changes
{checkpoint-100 → checkpoint-200}/tokenizer.json RENAMED
File without changes
{checkpoint-100 → checkpoint-200}/tokenizer_config.json RENAMED
File without changes
{checkpoint-100 → checkpoint-200}/trainer_state.json RENAMED
@@ -1,8 +1,8 @@
1
  {
2
- "best_metric": 0.6585185702735884,
3
- "best_model_checkpoint": "DocLayNet/lilt-xlm-roberta-base-finetuned-DocLayNet-base_paragraphs_ml512-v5/checkpoint-100",
4
- "epoch": 0.053304904051172705,
5
- "global_step": 100,
6
  "is_hyper_param_search": false,
7
  "is_local_process_zero": true,
8
  "is_world_process_zero": true,
@@ -18,11 +18,23 @@
18
  "eval_samples_per_second": 33.717,
19
  "eval_steps_per_second": 2.119,
20
  "step": 100
 
 
 
 
 
 
 
 
 
 
 
 
21
  }
22
  ],
23
  "max_steps": 1876,
24
  "num_train_epochs": 1,
25
- "total_flos": 222537791078400.0,
26
  "trial_name": null,
27
  "trial_params": null
28
  }
 
1
  {
2
+ "best_metric": 0.7551279108067913,
3
+ "best_model_checkpoint": "DocLayNet/lilt-xlm-roberta-base-finetuned-DocLayNet-base_paragraphs_ml512-v5/checkpoint-200",
4
+ "epoch": 0.10660980810234541,
5
+ "global_step": 200,
6
  "is_hyper_param_search": false,
7
  "is_local_process_zero": true,
8
  "is_world_process_zero": true,
 
18
  "eval_samples_per_second": 33.717,
19
  "eval_steps_per_second": 2.119,
20
  "step": 100
21
+ },
22
+ {
23
+ "epoch": 0.11,
24
+ "eval_accuracy": 0.7551279108067913,
25
+ "eval_f1": 0.7551279108067913,
26
+ "eval_loss": 0.7886354327201843,
27
+ "eval_precision": 0.7551279108067913,
28
+ "eval_recall": 0.7551279108067913,
29
+ "eval_runtime": 48.4163,
30
+ "eval_samples_per_second": 33.191,
31
+ "eval_steps_per_second": 2.086,
32
+ "step": 200
33
  }
34
  ],
35
  "max_steps": 1876,
36
  "num_train_epochs": 1,
37
+ "total_flos": 445075582156800.0,
38
  "trial_name": null,
39
  "trial_params": null
40
  }
{checkpoint-100 → checkpoint-200}/training_args.bin RENAMED
File without changes
checkpoint-300/config.json ADDED
@@ -0,0 +1,56 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "nielsr/lilt-xlm-roberta-base",
3
+ "architectures": [
4
+ "LiltForTokenClassification"
5
+ ],
6
+ "attention_probs_dropout_prob": 0.1,
7
+ "bos_token_id": 0,
8
+ "channel_shrink_ratio": 4,
9
+ "classifier_dropout": null,
10
+ "eos_token_id": 2,
11
+ "hidden_act": "gelu",
12
+ "hidden_dropout_prob": 0.1,
13
+ "hidden_size": 768,
14
+ "id2label": {
15
+ "0": "Caption",
16
+ "1": "Footnote",
17
+ "2": "Formula",
18
+ "3": "List-item",
19
+ "4": "Page-footer",
20
+ "5": "Page-header",
21
+ "6": "Picture",
22
+ "7": "Section-header",
23
+ "8": "Table",
24
+ "9": "Text",
25
+ "10": "Title"
26
+ },
27
+ "initializer_range": 0.02,
28
+ "intermediate_size": 3072,
29
+ "label2id": {
30
+ "Caption": 0,
31
+ "Footnote": 1,
32
+ "Formula": 2,
33
+ "List-item": 3,
34
+ "Page-footer": 4,
35
+ "Page-header": 5,
36
+ "Picture": 6,
37
+ "Section-header": 7,
38
+ "Table": 8,
39
+ "Text": 9,
40
+ "Title": 10
41
+ },
42
+ "layer_norm_eps": 1e-05,
43
+ "max_2d_position_embeddings": 1024,
44
+ "max_position_embeddings": 514,
45
+ "model_type": "lilt",
46
+ "num_attention_heads": 12,
47
+ "num_hidden_layers": 12,
48
+ "output_past": true,
49
+ "pad_token_id": 1,
50
+ "position_embedding_type": "absolute",
51
+ "torch_dtype": "float32",
52
+ "transformers_version": "4.26.1",
53
+ "type_vocab_size": 1,
54
+ "use_cache": true,
55
+ "vocab_size": 250002
56
+ }
checkpoint-300/optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5b57c47892e2c979ead028ed133a8e6310573791bcb0ce4b62a32dc49850683e
3
+ size 2265828101
checkpoint-300/pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:880a92376813495eb9c614b219a5f3739441875a9dd423f5a5a4c0f194747462
3
+ size 1134425553
checkpoint-300/rng_state.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2edaa8158eaf8be7cc7f6c6471898dae049ef076acac0f3f1cc542415dc6dfca
3
+ size 14575
checkpoint-300/scaler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:35d89833ad47769c3fd687316799e7acd03e729c4180e72e9a36d6ac51bf9656
3
+ size 557
checkpoint-300/scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:40904ad5451576d85b4d1b180d481a37f60db63c24eb116a2d70a90d013a25fd
3
+ size 627
checkpoint-300/special_tokens_map.json ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": "<s>",
3
+ "cls_token": "<s>",
4
+ "eos_token": "</s>",
5
+ "mask_token": {
6
+ "content": "<mask>",
7
+ "lstrip": true,
8
+ "normalized": false,
9
+ "rstrip": false,
10
+ "single_word": false
11
+ },
12
+ "pad_token": "<pad>",
13
+ "sep_token": "</s>",
14
+ "unk_token": "<unk>"
15
+ }
checkpoint-300/tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:62c24cdc13d4c9952d63718d6c9fa4c287974249e16b7ade6d5a85e7bbb75626
3
+ size 17082660
checkpoint-300/tokenizer_config.json ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": "<s>",
3
+ "cls_token": "<s>",
4
+ "eos_token": "</s>",
5
+ "mask_token": {
6
+ "__type": "AddedToken",
7
+ "content": "<mask>",
8
+ "lstrip": true,
9
+ "normalized": true,
10
+ "rstrip": false,
11
+ "single_word": false
12
+ },
13
+ "model_max_length": 512,
14
+ "name_or_path": "nielsr/lilt-xlm-roberta-base",
15
+ "pad_token": "<pad>",
16
+ "sep_token": "</s>",
17
+ "special_tokens_map_file": null,
18
+ "tokenizer_class": "XLMRobertaTokenizer",
19
+ "unk_token": "<unk>"
20
+ }
checkpoint-300/trainer_state.json ADDED
@@ -0,0 +1,52 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_metric": 0.8248275724395382,
3
+ "best_model_checkpoint": "DocLayNet/lilt-xlm-roberta-base-finetuned-DocLayNet-base_paragraphs_ml512-v5/checkpoint-300",
4
+ "epoch": 0.15991471215351813,
5
+ "global_step": 300,
6
+ "is_hyper_param_search": false,
7
+ "is_local_process_zero": true,
8
+ "is_world_process_zero": true,
9
+ "log_history": [
10
+ {
11
+ "epoch": 0.05,
12
+ "eval_accuracy": 0.6585185702735884,
13
+ "eval_f1": 0.6585185702735884,
14
+ "eval_loss": 0.9875321388244629,
15
+ "eval_precision": 0.6585185702735884,
16
+ "eval_recall": 0.6585185702735884,
17
+ "eval_runtime": 47.6616,
18
+ "eval_samples_per_second": 33.717,
19
+ "eval_steps_per_second": 2.119,
20
+ "step": 100
21
+ },
22
+ {
23
+ "epoch": 0.11,
24
+ "eval_accuracy": 0.7551279108067913,
25
+ "eval_f1": 0.7551279108067913,
26
+ "eval_loss": 0.7886354327201843,
27
+ "eval_precision": 0.7551279108067913,
28
+ "eval_recall": 0.7551279108067913,
29
+ "eval_runtime": 48.4163,
30
+ "eval_samples_per_second": 33.191,
31
+ "eval_steps_per_second": 2.086,
32
+ "step": 200
33
+ },
34
+ {
35
+ "epoch": 0.16,
36
+ "eval_accuracy": 0.8248275724395381,
37
+ "eval_f1": 0.8248275724395382,
38
+ "eval_loss": 0.5894176363945007,
39
+ "eval_precision": 0.8248275724395381,
40
+ "eval_recall": 0.8248275724395381,
41
+ "eval_runtime": 47.5388,
42
+ "eval_samples_per_second": 33.804,
43
+ "eval_steps_per_second": 2.125,
44
+ "step": 300
45
+ }
46
+ ],
47
+ "max_steps": 1876,
48
+ "num_train_epochs": 1,
49
+ "total_flos": 667613373235200.0,
50
+ "trial_name": null,
51
+ "trial_params": null
52
+ }
checkpoint-300/training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:575d482dfbe4b5795db81627b295d36a31851f66bb801524d15bc53043f733c4
3
+ size 3707
checkpoint-400/config.json ADDED
@@ -0,0 +1,56 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "nielsr/lilt-xlm-roberta-base",
3
+ "architectures": [
4
+ "LiltForTokenClassification"
5
+ ],
6
+ "attention_probs_dropout_prob": 0.1,
7
+ "bos_token_id": 0,
8
+ "channel_shrink_ratio": 4,
9
+ "classifier_dropout": null,
10
+ "eos_token_id": 2,
11
+ "hidden_act": "gelu",
12
+ "hidden_dropout_prob": 0.1,
13
+ "hidden_size": 768,
14
+ "id2label": {
15
+ "0": "Caption",
16
+ "1": "Footnote",
17
+ "2": "Formula",
18
+ "3": "List-item",
19
+ "4": "Page-footer",
20
+ "5": "Page-header",
21
+ "6": "Picture",
22
+ "7": "Section-header",
23
+ "8": "Table",
24
+ "9": "Text",
25
+ "10": "Title"
26
+ },
27
+ "initializer_range": 0.02,
28
+ "intermediate_size": 3072,
29
+ "label2id": {
30
+ "Caption": 0,
31
+ "Footnote": 1,
32
+ "Formula": 2,
33
+ "List-item": 3,
34
+ "Page-footer": 4,
35
+ "Page-header": 5,
36
+ "Picture": 6,
37
+ "Section-header": 7,
38
+ "Table": 8,
39
+ "Text": 9,
40
+ "Title": 10
41
+ },
42
+ "layer_norm_eps": 1e-05,
43
+ "max_2d_position_embeddings": 1024,
44
+ "max_position_embeddings": 514,
45
+ "model_type": "lilt",
46
+ "num_attention_heads": 12,
47
+ "num_hidden_layers": 12,
48
+ "output_past": true,
49
+ "pad_token_id": 1,
50
+ "position_embedding_type": "absolute",
51
+ "torch_dtype": "float32",
52
+ "transformers_version": "4.26.1",
53
+ "type_vocab_size": 1,
54
+ "use_cache": true,
55
+ "vocab_size": 250002
56
+ }
checkpoint-400/optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7bea0ae9c91632ba73324243f693237eedfde07c60552f721ae85502620f6663
3
+ size 2265828101
checkpoint-400/pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4727566eb4ffd9fac82a769c6b877c66e5ed779dfa933929c682a8e7337e5fa1
3
+ size 1134425553
checkpoint-400/rng_state.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6084e517c898c5f3679c65138c00f6e8c42eb251e79e5c86866ef9d34d8a6959
3
+ size 14575
checkpoint-400/scaler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8894add6cf5ff4d4049868d7614fbae0eb98e7ffb617edb8d94b4cd7e15ebd0d
3
+ size 557
checkpoint-400/scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:510282233ecb669cc6a91c195a0954011c19ca6b777845a850e4192cef7447aa
3
+ size 627
checkpoint-400/special_tokens_map.json ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": "<s>",
3
+ "cls_token": "<s>",
4
+ "eos_token": "</s>",
5
+ "mask_token": {
6
+ "content": "<mask>",
7
+ "lstrip": true,
8
+ "normalized": false,
9
+ "rstrip": false,
10
+ "single_word": false
11
+ },
12
+ "pad_token": "<pad>",
13
+ "sep_token": "</s>",
14
+ "unk_token": "<unk>"
15
+ }
checkpoint-400/tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:62c24cdc13d4c9952d63718d6c9fa4c287974249e16b7ade6d5a85e7bbb75626
3
+ size 17082660
checkpoint-400/tokenizer_config.json ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": "<s>",
3
+ "cls_token": "<s>",
4
+ "eos_token": "</s>",
5
+ "mask_token": {
6
+ "__type": "AddedToken",
7
+ "content": "<mask>",
8
+ "lstrip": true,
9
+ "normalized": true,
10
+ "rstrip": false,
11
+ "single_word": false
12
+ },
13
+ "model_max_length": 512,
14
+ "name_or_path": "nielsr/lilt-xlm-roberta-base",
15
+ "pad_token": "<pad>",
16
+ "sep_token": "</s>",
17
+ "special_tokens_map_file": null,
18
+ "tokenizer_class": "XLMRobertaTokenizer",
19
+ "unk_token": "<unk>"
20
+ }
checkpoint-400/trainer_state.json ADDED
@@ -0,0 +1,64 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_metric": 0.8395799854272509,
3
+ "best_model_checkpoint": "DocLayNet/lilt-xlm-roberta-base-finetuned-DocLayNet-base_paragraphs_ml512-v5/checkpoint-400",
4
+ "epoch": 0.21321961620469082,
5
+ "global_step": 400,
6
+ "is_hyper_param_search": false,
7
+ "is_local_process_zero": true,
8
+ "is_world_process_zero": true,
9
+ "log_history": [
10
+ {
11
+ "epoch": 0.05,
12
+ "eval_accuracy": 0.6585185702735884,
13
+ "eval_f1": 0.6585185702735884,
14
+ "eval_loss": 0.9875321388244629,
15
+ "eval_precision": 0.6585185702735884,
16
+ "eval_recall": 0.6585185702735884,
17
+ "eval_runtime": 47.6616,
18
+ "eval_samples_per_second": 33.717,
19
+ "eval_steps_per_second": 2.119,
20
+ "step": 100
21
+ },
22
+ {
23
+ "epoch": 0.11,
24
+ "eval_accuracy": 0.7551279108067913,
25
+ "eval_f1": 0.7551279108067913,
26
+ "eval_loss": 0.7886354327201843,
27
+ "eval_precision": 0.7551279108067913,
28
+ "eval_recall": 0.7551279108067913,
29
+ "eval_runtime": 48.4163,
30
+ "eval_samples_per_second": 33.191,
31
+ "eval_steps_per_second": 2.086,
32
+ "step": 200
33
+ },
34
+ {
35
+ "epoch": 0.16,
36
+ "eval_accuracy": 0.8248275724395381,
37
+ "eval_f1": 0.8248275724395382,
38
+ "eval_loss": 0.5894176363945007,
39
+ "eval_precision": 0.8248275724395381,
40
+ "eval_recall": 0.8248275724395381,
41
+ "eval_runtime": 47.5388,
42
+ "eval_samples_per_second": 33.804,
43
+ "eval_steps_per_second": 2.125,
44
+ "step": 300
45
+ },
46
+ {
47
+ "epoch": 0.21,
48
+ "eval_accuracy": 0.839579985427251,
49
+ "eval_f1": 0.8395799854272509,
50
+ "eval_loss": 0.479428768157959,
51
+ "eval_precision": 0.839579985427251,
52
+ "eval_recall": 0.839579985427251,
53
+ "eval_runtime": 45.3857,
54
+ "eval_samples_per_second": 35.408,
55
+ "eval_steps_per_second": 2.225,
56
+ "step": 400
57
+ }
58
+ ],
59
+ "max_steps": 1876,
60
+ "num_train_epochs": 1,
61
+ "total_flos": 890151164313600.0,
62
+ "trial_name": null,
63
+ "trial_params": null
64
+ }
checkpoint-400/training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:575d482dfbe4b5795db81627b295d36a31851f66bb801524d15bc53043f733c4
3
+ size 3707
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:8ddccc8aa50bfb082d45e1d9486a1480ff83d9b46e9e73c0609138717d6e917e
3
  size 1134425553
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4727566eb4ffd9fac82a769c6b877c66e5ed779dfa933929c682a8e7337e5fa1
3
  size 1134425553
runs/Feb15_07-53-26_e220f522c880/events.out.tfevents.1676447621.e220f522c880.495.0 CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:5505a2df10be119c29733e09f107dae909925262ce13ec35de03e8b1cf862391
3
- size 5136
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2c9463d47ba3115fd44ba28de90fc0000cfcf2825b962b5b23499351a1fc9e21
3
+ size 6552