bathmaraj commited on
Commit
f10910c
1 Parent(s): 970d63b

End of training

Browse files
README.md CHANGED
@@ -15,14 +15,14 @@ should probably proofread and complete it, then remove this comment. -->
15
 
16
  This model is a fine-tuned version of [SCUT-DLVCLab/lilt-roberta-en-base](https://huggingface.co/SCUT-DLVCLab/lilt-roberta-en-base) on an unknown dataset.
17
  It achieves the following results on the evaluation set:
18
- - Loss: 1.8277
19
- - Answer: {'precision': 0.8632183908045977, 'recall': 0.9192166462668299, 'f1': 0.890337877889745, 'number': 817}
20
- - Header: {'precision': 0.6407766990291263, 'recall': 0.5546218487394958, 'f1': 0.5945945945945947, 'number': 119}
21
- - Question: {'precision': 0.9023941068139963, 'recall': 0.9099350046425255, 'f1': 0.9061488673139159, 'number': 1077}
22
- - Overall Precision: 0.8728
23
- - Overall Recall: 0.8927
24
- - Overall F1: 0.8826
25
- - Overall Accuracy: 0.7970
26
 
27
  ## Model description
28
 
@@ -52,20 +52,20 @@ The following hyperparameters were used during training:
52
 
53
  ### Training results
54
 
55
- | Training Loss | Epoch | Step | Validation Loss | Answer | Header | Question | Overall Precision | Overall Recall | Overall F1 | Overall Accuracy |
56
- |:-------------:|:--------:|:----:|:---------------:|:--------------------------------------------------------------------------------------------------------:|:--------------------------------------------------------------------------------------------------------:|:---------------------------------------------------------------------------------------------------------:|:-----------------:|:--------------:|:----------:|:----------------:|
57
- | 0.0423 | 10.5263 | 200 | 1.4194 | {'precision': 0.8633093525179856, 'recall': 0.8812729498164015, 'f1': 0.872198667474258, 'number': 817} | {'precision': 0.49624060150375937, 'recall': 0.5546218487394958, 'f1': 0.523809523809524, 'number': 119} | {'precision': 0.8954372623574145, 'recall': 0.8746518105849582, 'f1': 0.8849224988257399, 'number': 1077} | 0.8559 | 0.8584 | 0.8571 | 0.8065 |
58
- | 0.0157 | 21.0526 | 400 | 1.4352 | {'precision': 0.8376259798432251, 'recall': 0.9155446756425949, 'f1': 0.8748538011695907, 'number': 817} | {'precision': 0.631578947368421, 'recall': 0.5042016806722689, 'f1': 0.5607476635514018, 'number': 119} | {'precision': 0.9015009380863039, 'recall': 0.8922934076137419, 'f1': 0.8968735417638825, 'number': 1077} | 0.8612 | 0.8788 | 0.8699 | 0.8038 |
59
- | 0.0068 | 31.5789 | 600 | 1.5303 | {'precision': 0.8661137440758294, 'recall': 0.8947368421052632, 'f1': 0.8801926550270921, 'number': 817} | {'precision': 0.5655737704918032, 'recall': 0.5798319327731093, 'f1': 0.5726141078838175, 'number': 119} | {'precision': 0.8872593950504125, 'recall': 0.8987929433611885, 'f1': 0.8929889298892989, 'number': 1077} | 0.8595 | 0.8783 | 0.8688 | 0.7898 |
60
- | 0.0026 | 42.1053 | 800 | 1.4908 | {'precision': 0.8418141592920354, 'recall': 0.9314565483476133, 'f1': 0.8843695525857059, 'number': 817} | {'precision': 0.6595744680851063, 'recall': 0.5210084033613446, 'f1': 0.5821596244131456, 'number': 119} | {'precision': 0.8848594741613781, 'recall': 0.9062209842154132, 'f1': 0.8954128440366973, 'number': 1077} | 0.8563 | 0.8937 | 0.8746 | 0.8026 |
61
- | 0.0016 | 52.6316 | 1000 | 1.7469 | {'precision': 0.8781664656212304, 'recall': 0.8910648714810282, 'f1': 0.8845686512758201, 'number': 817} | {'precision': 0.5981308411214953, 'recall': 0.5378151260504201, 'f1': 0.5663716814159291, 'number': 119} | {'precision': 0.8772401433691757, 'recall': 0.9090064995357474, 'f1': 0.8928408572731419, 'number': 1077} | 0.8631 | 0.8798 | 0.8713 | 0.7846 |
62
- | 0.0047 | 63.1579 | 1200 | 1.9284 | {'precision': 0.8626309662398137, 'recall': 0.9069767441860465, 'f1': 0.8842482100238663, 'number': 817} | {'precision': 0.6777777777777778, 'recall': 0.5126050420168067, 'f1': 0.5837320574162679, 'number': 119} | {'precision': 0.8809310653536258, 'recall': 0.9136490250696379, 'f1': 0.8969917958067456, 'number': 1077} | 0.8645 | 0.8872 | 0.8757 | 0.7934 |
63
- | 0.0009 | 73.6842 | 1400 | 2.0302 | {'precision': 0.8406593406593407, 'recall': 0.9363525091799265, 'f1': 0.8859293572669368, 'number': 817} | {'precision': 0.4928571428571429, 'recall': 0.5798319327731093, 'f1': 0.5328185328185329, 'number': 119} | {'precision': 0.9024390243902439, 'recall': 0.8588672237697307, 'f1': 0.8801141769743102, 'number': 1077} | 0.8477 | 0.8738 | 0.8606 | 0.7793 |
64
- | 0.0006 | 84.2105 | 1600 | 1.9236 | {'precision': 0.861271676300578, 'recall': 0.9118727050183598, 'f1': 0.8858501783590963, 'number': 817} | {'precision': 0.6126126126126126, 'recall': 0.5714285714285714, 'f1': 0.591304347826087, 'number': 119} | {'precision': 0.9030470914127424, 'recall': 0.9080779944289693, 'f1': 0.9055555555555556, 'number': 1077} | 0.8698 | 0.8897 | 0.8797 | 0.7870 |
65
- | 0.0003 | 94.7368 | 1800 | 1.8036 | {'precision': 0.8496583143507973, 'recall': 0.9130966952264382, 'f1': 0.88023598820059, 'number': 817} | {'precision': 0.6238532110091743, 'recall': 0.5714285714285714, 'f1': 0.5964912280701754, 'number': 119} | {'precision': 0.8934802571166207, 'recall': 0.903435468895079, 'f1': 0.8984302862419206, 'number': 1077} | 0.8608 | 0.8877 | 0.8741 | 0.7940 |
66
- | 0.0003 | 105.2632 | 2000 | 1.8317 | {'precision': 0.8684516880093132, 'recall': 0.9130966952264382, 'f1': 0.8902147971360381, 'number': 817} | {'precision': 0.6442307692307693, 'recall': 0.5630252100840336, 'f1': 0.600896860986547, 'number': 119} | {'precision': 0.9025069637883009, 'recall': 0.9025069637883009, 'f1': 0.9025069637883009, 'number': 1077} | 0.875 | 0.8867 | 0.8808 | 0.7946 |
67
- | 0.0001 | 115.7895 | 2200 | 1.8277 | {'precision': 0.8632183908045977, 'recall': 0.9192166462668299, 'f1': 0.890337877889745, 'number': 817} | {'precision': 0.6407766990291263, 'recall': 0.5546218487394958, 'f1': 0.5945945945945947, 'number': 119} | {'precision': 0.9023941068139963, 'recall': 0.9099350046425255, 'f1': 0.9061488673139159, 'number': 1077} | 0.8728 | 0.8927 | 0.8826 | 0.7970 |
68
- | 0.0001 | 126.3158 | 2400 | 1.8411 | {'precision': 0.8540723981900452, 'recall': 0.9241126070991432, 'f1': 0.8877131099353323, 'number': 817} | {'precision': 0.6285714285714286, 'recall': 0.5546218487394958, 'f1': 0.5892857142857143, 'number': 119} | {'precision': 0.9072356215213359, 'recall': 0.9080779944289693, 'f1': 0.9076566125290023, 'number': 1077} | 0.8703 | 0.8937 | 0.8819 | 0.7935 |
69
 
70
 
71
  ### Framework versions
 
15
 
16
  This model is a fine-tuned version of [SCUT-DLVCLab/lilt-roberta-en-base](https://huggingface.co/SCUT-DLVCLab/lilt-roberta-en-base) on an unknown dataset.
17
  It achieves the following results on the evaluation set:
18
+ - Loss: 1.7177
19
+ - Answer: {'precision': 0.8729411764705882, 'recall': 0.9082007343941249, 'f1': 0.8902219556088783, 'number': 817}
20
+ - Header: {'precision': 0.6421052631578947, 'recall': 0.5126050420168067, 'f1': 0.5700934579439252, 'number': 119}
21
+ - Question: {'precision': 0.906021897810219, 'recall': 0.9220055710306406, 'f1': 0.9139438564196963, 'number': 1077}
22
+ - Overall Precision: 0.8800
23
+ - Overall Recall: 0.8922
24
+ - Overall F1: 0.8860
25
+ - Overall Accuracy: 0.8064
26
 
27
  ## Model description
28
 
 
52
 
53
  ### Training results
54
 
55
+ | Training Loss | Epoch | Step | Validation Loss | Answer | Header | Question | Overall Precision | Overall Recall | Overall F1 | Overall Accuracy |
56
+ |:-------------:|:--------:|:----:|:---------------:|:--------------------------------------------------------------------------------------------------------:|:---------------------------------------------------------------------------------------------------------:|:---------------------------------------------------------------------------------------------------------:|:-----------------:|:--------------:|:----------:|:----------------:|
57
+ | 0.4044 | 10.5263 | 200 | 1.1266 | {'precision': 0.8246606334841629, 'recall': 0.8922888616891065, 'f1': 0.8571428571428571, 'number': 817} | {'precision': 0.3495575221238938, 'recall': 0.6638655462184874, 'f1': 0.4579710144927537, 'number': 119} | {'precision': 0.8747609942638623, 'recall': 0.8495821727019499, 'f1': 0.8619877531794631, 'number': 1077} | 0.7992 | 0.8559 | 0.8266 | 0.7671 |
58
+ | 0.0518 | 21.0526 | 400 | 1.2142 | {'precision': 0.8138006571741512, 'recall': 0.9094247246022031, 'f1': 0.8589595375722543, 'number': 817} | {'precision': 0.49612403100775193, 'recall': 0.5378151260504201, 'f1': 0.5161290322580645, 'number': 119} | {'precision': 0.8705234159779615, 'recall': 0.8802228412256268, 'f1': 0.8753462603878117, 'number': 1077} | 0.8236 | 0.8718 | 0.8470 | 0.8011 |
59
+ | 0.0137 | 31.5789 | 600 | 1.5789 | {'precision': 0.8478513356562137, 'recall': 0.8935128518971848, 'f1': 0.8700834326579261, 'number': 817} | {'precision': 0.5588235294117647, 'recall': 0.4789915966386555, 'f1': 0.5158371040723981, 'number': 119} | {'precision': 0.8839528558476881, 'recall': 0.9052924791086351, 'f1': 0.8944954128440367, 'number': 1077} | 0.8529 | 0.8753 | 0.8639 | 0.7932 |
60
+ | 0.008 | 42.1053 | 800 | 1.5466 | {'precision': 0.8540478905359179, 'recall': 0.9167686658506732, 'f1': 0.8842975206611571, 'number': 817} | {'precision': 0.528169014084507, 'recall': 0.6302521008403361, 'f1': 0.5747126436781609, 'number': 119} | {'precision': 0.899074074074074, 'recall': 0.9015784586815228, 'f1': 0.9003245248029671, 'number': 1077} | 0.8552 | 0.8917 | 0.8731 | 0.7876 |
61
+ | 0.0047 | 52.6316 | 1000 | 1.5218 | {'precision': 0.8712029161603888, 'recall': 0.8776009791921665, 'f1': 0.874390243902439, 'number': 817} | {'precision': 0.5882352941176471, 'recall': 0.5042016806722689, 'f1': 0.5429864253393665, 'number': 119} | {'precision': 0.9080459770114943, 'recall': 0.8802228412256268, 'f1': 0.8939179632248938, 'number': 1077} | 0.8761 | 0.8569 | 0.8664 | 0.8023 |
62
+ | 0.0026 | 63.1579 | 1200 | 1.6588 | {'precision': 0.8784596871239471, 'recall': 0.8935128518971848, 'f1': 0.8859223300970873, 'number': 817} | {'precision': 0.532258064516129, 'recall': 0.5546218487394958, 'f1': 0.54320987654321, 'number': 119} | {'precision': 0.8739946380697051, 'recall': 0.9080779944289693, 'f1': 0.8907103825136613, 'number': 1077} | 0.8554 | 0.8813 | 0.8681 | 0.7971 |
63
+ | 0.0013 | 73.6842 | 1400 | 1.6428 | {'precision': 0.903822441430333, 'recall': 0.8971848225214198, 'f1': 0.9004914004914006, 'number': 817} | {'precision': 0.6166666666666667, 'recall': 0.6218487394957983, 'f1': 0.6192468619246863, 'number': 119} | {'precision': 0.9017132551848512, 'recall': 0.9285051067780873, 'f1': 0.9149130832570905, 'number': 1077} | 0.8858 | 0.8977 | 0.8917 | 0.8127 |
64
+ | 0.0009 | 84.2105 | 1600 | 1.6516 | {'precision': 0.8909090909090909, 'recall': 0.8996328029375765, 'f1': 0.8952496954933008, 'number': 817} | {'precision': 0.6132075471698113, 'recall': 0.5462184873949579, 'f1': 0.5777777777777778, 'number': 119} | {'precision': 0.9070837166513339, 'recall': 0.9155060352831941, 'f1': 0.911275415896488, 'number': 1077} | 0.8850 | 0.8872 | 0.8861 | 0.8116 |
65
+ | 0.0007 | 94.7368 | 1800 | 1.7017 | {'precision': 0.8470319634703196, 'recall': 0.9082007343941249, 'f1': 0.8765505020673362, 'number': 817} | {'precision': 0.6521739130434783, 'recall': 0.5042016806722689, 'f1': 0.5687203791469194, 'number': 119} | {'precision': 0.8938547486033519, 'recall': 0.8913649025069638, 'f1': 0.8926080892608089, 'number': 1077} | 0.8629 | 0.8753 | 0.8691 | 0.8004 |
66
+ | 0.0004 | 105.2632 | 2000 | 1.7304 | {'precision': 0.8624708624708625, 'recall': 0.9057527539779682, 'f1': 0.8835820895522388, 'number': 817} | {'precision': 0.6095238095238096, 'recall': 0.5378151260504201, 'f1': 0.5714285714285715, 'number': 119} | {'precision': 0.906046511627907, 'recall': 0.904363974001857, 'f1': 0.9052044609665427, 'number': 1077} | 0.8724 | 0.8833 | 0.8778 | 0.8019 |
67
+ | 0.0003 | 115.7895 | 2200 | 1.7230 | {'precision': 0.8723404255319149, 'recall': 0.9033047735618115, 'f1': 0.8875526157546603, 'number': 817} | {'precision': 0.6702127659574468, 'recall': 0.5294117647058824, 'f1': 0.5915492957746479, 'number': 119} | {'precision': 0.8992740471869328, 'recall': 0.9201485608170845, 'f1': 0.9095915557595228, 'number': 1077} | 0.8776 | 0.8902 | 0.8838 | 0.8049 |
68
+ | 0.0002 | 126.3158 | 2400 | 1.7177 | {'precision': 0.8729411764705882, 'recall': 0.9082007343941249, 'f1': 0.8902219556088783, 'number': 817} | {'precision': 0.6421052631578947, 'recall': 0.5126050420168067, 'f1': 0.5700934579439252, 'number': 119} | {'precision': 0.906021897810219, 'recall': 0.9220055710306406, 'f1': 0.9139438564196963, 'number': 1077} | 0.8800 | 0.8922 | 0.8860 | 0.8064 |
69
 
70
 
71
  ### Framework versions
logs/events.out.tfevents.1715844384.ae89546c7677.2272.0 CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:3bdc977c9ad8486348227e3b20a417b538df16af642c398031831f1f05900d99
3
- size 13646
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:14fc7cb56dde2ad593b97bc51bb550f21042a95dad5c44d42a318bf91f2281aa
3
+ size 14000
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:6b1d79c0e3a0d9736489ba0e8ca55b655e242b170fcf000d59981ab29e84b3ee
3
  size 520727564
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e340af3d9418d4b71484fa45e5770c1a8cbc73cbd1a537312725a87259e07bf9
3
  size 520727564
preprocessor_config.json CHANGED
@@ -25,7 +25,7 @@
25
  0.5,
26
  0.5
27
  ],
28
- "image_processor_type": "LayoutLMv3ImageProcessor",
29
  "image_std": [
30
  0.5,
31
  0.5,
 
25
  0.5,
26
  0.5
27
  ],
28
+ "image_processor_type": "LayoutLMv3FeatureExtractor",
29
  "image_std": [
30
  0.5,
31
  0.5,
tokenizer.json CHANGED
@@ -1,21 +1,7 @@
1
  {
2
  "version": "1.0",
3
- "truncation": {
4
- "direction": "Right",
5
- "max_length": 512,
6
- "strategy": "LongestFirst",
7
- "stride": 0
8
- },
9
- "padding": {
10
- "strategy": {
11
- "Fixed": 512
12
- },
13
- "direction": "Right",
14
- "pad_to_multiple_of": null,
15
- "pad_id": 1,
16
- "pad_type_id": 0,
17
- "pad_token": "<pad>"
18
- },
19
  "added_tokens": [
20
  {
21
  "id": 0,
 
1
  {
2
  "version": "1.0",
3
+ "truncation": null,
4
+ "padding": null,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5
  "added_tokens": [
6
  {
7
  "id": 0,
tokenizer_config.json CHANGED
@@ -54,10 +54,8 @@
54
  "eos_token": "</s>",
55
  "errors": "replace",
56
  "mask_token": "<mask>",
57
- "max_length": 512,
58
  "model_max_length": 512,
59
  "only_label_first_subword": true,
60
- "pad_to_multiple_of": null,
61
  "pad_token": "<pad>",
62
  "pad_token_box": [
63
  0,
@@ -66,8 +64,6 @@
66
  0
67
  ],
68
  "pad_token_label": -100,
69
- "pad_token_type_id": 0,
70
- "padding_side": "right",
71
  "processor_class": "LayoutLMv3Processor",
72
  "sep_token": "</s>",
73
  "sep_token_box": [
@@ -76,10 +72,7 @@
76
  0,
77
  0
78
  ],
79
- "stride": 0,
80
  "tokenizer_class": "LayoutLMv3Tokenizer",
81
  "trim_offsets": true,
82
- "truncation_side": "right",
83
- "truncation_strategy": "longest_first",
84
  "unk_token": "<unk>"
85
  }
 
54
  "eos_token": "</s>",
55
  "errors": "replace",
56
  "mask_token": "<mask>",
 
57
  "model_max_length": 512,
58
  "only_label_first_subword": true,
 
59
  "pad_token": "<pad>",
60
  "pad_token_box": [
61
  0,
 
64
  0
65
  ],
66
  "pad_token_label": -100,
 
 
67
  "processor_class": "LayoutLMv3Processor",
68
  "sep_token": "</s>",
69
  "sep_token_box": [
 
72
  0,
73
  0
74
  ],
 
75
  "tokenizer_class": "LayoutLMv3Tokenizer",
76
  "trim_offsets": true,
 
 
77
  "unk_token": "<unk>"
78
  }