Haios141 commited on
Commit
ec6c57b
1 Parent(s): a0a0e36

End of training

Browse files
README.md CHANGED
@@ -16,14 +16,14 @@ should probably proofread and complete it, then remove this comment. -->
16
 
17
  This model is a fine-tuned version of [microsoft/layoutlm-base-uncased](https://huggingface.co/microsoft/layoutlm-base-uncased) on the funsd dataset.
18
  It achieves the following results on the evaluation set:
19
- - Loss: 1.3293
20
- - Answer: {'precision': 0.11451135241855874, 'recall': 0.1433868974042027, 'f1': 0.12733260153677278, 'number': 809}
21
  - Header: {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 119}
22
- - Question: {'precision': 0.41704374057315236, 'recall': 0.5192488262910798, 'f1': 0.46256796319531585, 'number': 1065}
23
- - Overall Precision: 0.2860
24
- - Overall Recall: 0.3357
25
- - Overall F1: 0.3089
26
- - Overall Accuracy: 0.5623
27
 
28
  ## Model description
29
 
@@ -52,28 +52,28 @@ The following hyperparameters were used during training:
52
 
53
  ### Training results
54
 
55
- | Training Loss | Epoch | Step | Validation Loss | Answer | Header | Question | Overall Precision | Overall Recall | Overall F1 | Overall Accuracy |
56
- |:-------------:|:-----:|:----:|:---------------:|:-------------------------------------------------------------------------------------------------------------:|:---------------------------------------------------------------------------------------------------------------:|:------------------------------------------------------------------------------------------------------------:|:-----------------:|:--------------:|:----------:|:----------------:|
57
- | 1.9774 | 1.0 | 10 | 1.9285 | {'precision': 0.018331226295828066, 'recall': 0.03584672435105068, 'f1': 0.024257632789627767, 'number': 809} | {'precision': 0.00787878787878788, 'recall': 0.1092436974789916, 'f1': 0.014697569248162805, 'number': 119} | {'precision': 0.06559356136820925, 'recall': 0.15305164319248826, 'f1': 0.09183098591549295, 'number': 1065} | 0.0359 | 0.1029 | 0.0532 | 0.1843 |
58
- | 1.8918 | 2.0 | 20 | 1.8488 | {'precision': 0.02769385699899295, 'recall': 0.06798516687268233, 'f1': 0.03935599284436494, 'number': 809} | {'precision': 0.003703703703703704, 'recall': 0.008403361344537815, 'f1': 0.0051413881748071984, 'number': 119} | {'precision': 0.07554585152838428, 'recall': 0.1624413145539906, 'f1': 0.10312965722801788, 'number': 1065} | 0.0504 | 0.1149 | 0.0700 | 0.2606 |
59
- | 1.8117 | 3.0 | 30 | 1.7797 | {'precision': 0.02564102564102564, 'recall': 0.0580964153275649, 'f1': 0.03557910673732021, 'number': 809} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 119} | {'precision': 0.0943496801705757, 'recall': 0.16619718309859155, 'f1': 0.120367222033322, 'number': 1065} | 0.0601 | 0.1124 | 0.0783 | 0.3026 |
60
- | 1.7441 | 4.0 | 40 | 1.7198 | {'precision': 0.019028871391076115, 'recall': 0.03584672435105068, 'f1': 0.024860694384912133, 'number': 809} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 119} | {'precision': 0.12127512127512127, 'recall': 0.1643192488262911, 'f1': 0.13955342902711323, 'number': 1065} | 0.0686 | 0.1024 | 0.0822 | 0.3324 |
61
- | 1.6818 | 5.0 | 50 | 1.6641 | {'precision': 0.0196078431372549, 'recall': 0.03337453646477132, 'f1': 0.024702653247941447, 'number': 809} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 119} | {'precision': 0.15128593040847202, 'recall': 0.18779342723004694, 'f1': 0.16757436112274823, 'number': 1065} | 0.0841 | 0.1139 | 0.0968 | 0.3537 |
62
- | 1.6335 | 6.0 | 60 | 1.6097 | {'precision': 0.02643171806167401, 'recall': 0.04449938195302843, 'f1': 0.03316444035006909, 'number': 809} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 119} | {'precision': 0.18782870022539444, 'recall': 0.2347417840375587, 'f1': 0.20868113522537562, 'number': 1065} | 0.1062 | 0.1435 | 0.1221 | 0.3821 |
63
- | 1.5742 | 7.0 | 70 | 1.5578 | {'precision': 0.033409263477600606, 'recall': 0.054388133498145856, 'f1': 0.04139228598306679, 'number': 809} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 119} | {'precision': 0.22088068181818182, 'recall': 0.292018779342723, 'f1': 0.2515163768701982, 'number': 1065} | 0.1303 | 0.1781 | 0.1505 | 0.4189 |
64
- | 1.5302 | 8.0 | 80 | 1.5083 | {'precision': 0.0456656346749226, 'recall': 0.07292954264524104, 'f1': 0.05616373155640171, 'number': 809} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 119} | {'precision': 0.24610169491525424, 'recall': 0.3408450704225352, 'f1': 0.2858267716535433, 'number': 1065} | 0.1525 | 0.2117 | 0.1773 | 0.4559 |
65
- | 1.4774 | 9.0 | 90 | 1.4639 | {'precision': 0.05325914149443561, 'recall': 0.08281829419035847, 'f1': 0.0648282535074988, 'number': 809} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 119} | {'precision': 0.28843537414965986, 'recall': 0.39812206572769954, 'f1': 0.33451676528599605, 'number': 1065} | 0.1800 | 0.2464 | 0.2080 | 0.4889 |
66
- | 1.4389 | 10.0 | 100 | 1.4263 | {'precision': 0.059574468085106386, 'recall': 0.0865265760197775, 'f1': 0.07056451612903225, 'number': 809} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 119} | {'precision': 0.32748948106591863, 'recall': 0.4384976525821596, 'f1': 0.3749498193496587, 'number': 1065} | 0.2065 | 0.2694 | 0.2338 | 0.5120 |
67
- | 1.4007 | 11.0 | 110 | 1.3933 | {'precision': 0.07123534715960325, 'recall': 0.09765142150803462, 'f1': 0.08237747653806049, 'number': 809} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 119} | {'precision': 0.360773085182534, 'recall': 0.4732394366197183, 'f1': 0.40942323314378554, 'number': 1065} | 0.2326 | 0.2925 | 0.2592 | 0.5334 |
68
- | 1.3866 | 12.0 | 120 | 1.3665 | {'precision': 0.09439252336448598, 'recall': 0.12484548825710753, 'f1': 0.10750399148483236, 'number': 809} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 119} | {'precision': 0.38648052902277735, 'recall': 0.49389671361502346, 'f1': 0.4336356141797197, 'number': 1065} | 0.2579 | 0.3146 | 0.2835 | 0.5428 |
69
- | 1.3482 | 13.0 | 130 | 1.3469 | {'precision': 0.10622009569377991, 'recall': 0.13720642768850433, 'f1': 0.11974110032362459, 'number': 809} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 119} | {'precision': 0.40044411547002223, 'recall': 0.507981220657277, 'f1': 0.44784768211920534, 'number': 1065} | 0.2721 | 0.3271 | 0.2971 | 0.5537 |
70
- | 1.3355 | 14.0 | 140 | 1.3345 | {'precision': 0.11078431372549019, 'recall': 0.13967861557478367, 'f1': 0.12356478950246036, 'number': 809} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 119} | {'precision': 0.4114114114114114, 'recall': 0.5145539906103287, 'f1': 0.4572382144347101, 'number': 1065} | 0.2810 | 0.3317 | 0.3043 | 0.5588 |
71
- | 1.3066 | 15.0 | 150 | 1.3293 | {'precision': 0.11451135241855874, 'recall': 0.1433868974042027, 'f1': 0.12733260153677278, 'number': 809} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 119} | {'precision': 0.41704374057315236, 'recall': 0.5192488262910798, 'f1': 0.46256796319531585, 'number': 1065} | 0.2860 | 0.3357 | 0.3089 | 0.5623 |
72
 
73
 
74
  ### Framework versions
75
 
76
  - Transformers 4.31.0
77
- - Pytorch 2.0.1+cu118
78
  - Datasets 2.14.4
79
  - Tokenizers 0.13.3
 
16
 
17
  This model is a fine-tuned version of [microsoft/layoutlm-base-uncased](https://huggingface.co/microsoft/layoutlm-base-uncased) on the funsd dataset.
18
  It achieves the following results on the evaluation set:
19
+ - Loss: 1.3057
20
+ - Answer: {'precision': 0.09480519480519481, 'recall': 0.09023485784919653, 'f1': 0.09246358454718177, 'number': 809}
21
  - Header: {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 119}
22
+ - Question: {'precision': 0.4032534246575342, 'recall': 0.4422535211267606, 'f1': 0.4218540080609046, 'number': 1065}
23
+ - Overall Precision: 0.2807
24
+ - Overall Recall: 0.2730
25
+ - Overall F1: 0.2768
26
+ - Overall Accuracy: 0.5691
27
 
28
  ## Model description
29
 
 
52
 
53
  ### Training results
54
 
55
+ | Training Loss | Epoch | Step | Validation Loss | Answer | Header | Question | Overall Precision | Overall Recall | Overall F1 | Overall Accuracy |
56
+ |:-------------:|:-----:|:----:|:---------------:|:--------------------------------------------------------------------------------------------------------------:|:--------------------------------------------------------------------------------------------------------------:|:------------------------------------------------------------------------------------------------------------:|:-----------------:|:--------------:|:----------:|:----------------:|
57
+ | 1.9048 | 1.0 | 10 | 1.8492 | {'precision': 0.02683982683982684, 'recall': 0.07663782447466007, 'f1': 0.039756332157742866, 'number': 809} | {'precision': 0.003424657534246575, 'recall': 0.008403361344537815, 'f1': 0.004866180048661801, 'number': 119} | {'precision': 0.08558262014483213, 'recall': 0.12206572769953052, 'f1': 0.10061919504643962, 'number': 1065} | 0.0468 | 0.0968 | 0.0631 | 0.2625 |
58
+ | 1.8261 | 2.0 | 20 | 1.7805 | {'precision': 0.02488425925925926, 'recall': 0.05315203955500618, 'f1': 0.03389830508474576, 'number': 809} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 119} | {'precision': 0.11639344262295082, 'recall': 0.13333333333333333, 'f1': 0.12428884026258205, 'number': 1065} | 0.0620 | 0.0928 | 0.0744 | 0.3314 |
59
+ | 1.7557 | 3.0 | 30 | 1.7197 | {'precision': 0.018808777429467086, 'recall': 0.029666254635352288, 'f1': 0.02302158273381295, 'number': 809} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 119} | {'precision': 0.15336134453781514, 'recall': 0.13708920187793427, 'f1': 0.14476945959345563, 'number': 1065} | 0.0763 | 0.0853 | 0.0805 | 0.3579 |
60
+ | 1.7002 | 4.0 | 40 | 1.6648 | {'precision': 0.019029495718363463, 'recall': 0.024721878862793572, 'f1': 0.02150537634408602, 'number': 809} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 119} | {'precision': 0.19602977667493796, 'recall': 0.14835680751173708, 'f1': 0.16889363976483165, 'number': 1065} | 0.0959 | 0.0893 | 0.0925 | 0.3775 |
61
+ | 1.645 | 5.0 | 50 | 1.6121 | {'precision': 0.019801980198019802, 'recall': 0.024721878862793572, 'f1': 0.021990104452996154, 'number': 809} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 119} | {'precision': 0.22172452407614782, 'recall': 0.18591549295774648, 'f1': 0.20224719101123598, 'number': 1065} | 0.1146 | 0.1094 | 0.1119 | 0.4091 |
62
+ | 1.5951 | 6.0 | 60 | 1.5596 | {'precision': 0.029411764705882353, 'recall': 0.037082818294190356, 'f1': 0.032804811372334604, 'number': 809} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 119} | {'precision': 0.23694779116465864, 'recall': 0.2215962441314554, 'f1': 0.2290150412421155, 'number': 1065} | 0.1319 | 0.1335 | 0.1327 | 0.4421 |
63
+ | 1.5418 | 7.0 | 70 | 1.5109 | {'precision': 0.040755467196819085, 'recall': 0.05067985166872682, 'f1': 0.04517906336088154, 'number': 809} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 119} | {'precision': 0.27926267281105993, 'recall': 0.28450704225352114, 'f1': 0.2818604651162791, 'number': 1065} | 0.1645 | 0.1726 | 0.1685 | 0.4719 |
64
+ | 1.4954 | 8.0 | 80 | 1.4653 | {'precision': 0.050359712230215826, 'recall': 0.06056860321384425, 'f1': 0.05499438832772166, 'number': 809} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 119} | {'precision': 0.3016421780466724, 'recall': 0.3276995305164319, 'f1': 0.31413141314131415, 'number': 1065} | 0.1869 | 0.1997 | 0.1931 | 0.4973 |
65
+ | 1.4558 | 9.0 | 90 | 1.4245 | {'precision': 0.054140127388535034, 'recall': 0.0630407911001236, 'f1': 0.05825242718446602, 'number': 809} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 119} | {'precision': 0.3177966101694915, 'recall': 0.352112676056338, 'f1': 0.3340757238307349, 'number': 1065} | 0.2008 | 0.2137 | 0.2070 | 0.5168 |
66
+ | 1.4126 | 10.0 | 100 | 1.3893 | {'precision': 0.07432432432432433, 'recall': 0.0815822002472188, 'f1': 0.07778432527990571, 'number': 809} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 119} | {'precision': 0.33669185558354325, 'recall': 0.37652582159624415, 'f1': 0.3554964539007092, 'number': 1065} | 0.2246 | 0.2343 | 0.2294 | 0.5339 |
67
+ | 1.3759 | 11.0 | 110 | 1.3592 | {'precision': 0.08333333333333333, 'recall': 0.0865265760197775, 'f1': 0.08489993935718616, 'number': 809} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 119} | {'precision': 0.3618807724601176, 'recall': 0.40469483568075115, 'f1': 0.38209219858156024, 'number': 1065} | 0.2467 | 0.2514 | 0.2490 | 0.5470 |
68
+ | 1.3663 | 12.0 | 120 | 1.3358 | {'precision': 0.08531994981179424, 'recall': 0.08405438813349815, 'f1': 0.08468244084682441, 'number': 809} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 119} | {'precision': 0.37638062871707734, 'recall': 0.415962441314554, 'f1': 0.39518287243532557, 'number': 1065} | 0.2589 | 0.2564 | 0.2576 | 0.5545 |
69
+ | 1.3323 | 13.0 | 130 | 1.3192 | {'precision': 0.0916030534351145, 'recall': 0.08899876390605686, 'f1': 0.090282131661442, 'number': 809} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 119} | {'precision': 0.38649789029535864, 'recall': 0.4300469483568075, 'f1': 0.40711111111111115, 'number': 1065} | 0.2689 | 0.2659 | 0.2674 | 0.5635 |
70
+ | 1.3268 | 14.0 | 140 | 1.3094 | {'precision': 0.09585492227979274, 'recall': 0.09147095179233622, 'f1': 0.09361163820366855, 'number': 809} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 119} | {'precision': 0.3974358974358974, 'recall': 0.43661971830985913, 'f1': 0.4161073825503355, 'number': 1065} | 0.2775 | 0.2704 | 0.2740 | 0.5671 |
71
+ | 1.2988 | 15.0 | 150 | 1.3057 | {'precision': 0.09480519480519481, 'recall': 0.09023485784919653, 'f1': 0.09246358454718177, 'number': 809} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 119} | {'precision': 0.4032534246575342, 'recall': 0.4422535211267606, 'f1': 0.4218540080609046, 'number': 1065} | 0.2807 | 0.2730 | 0.2768 | 0.5691 |
72
 
73
 
74
  ### Framework versions
75
 
76
  - Transformers 4.31.0
77
+ - Pytorch 2.0.1+cu117
78
  - Datasets 2.14.4
79
  - Tokenizers 0.13.3
logs/events.out.tfevents.1691751886.56f85528113b.15684.0 CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:397f40fc982d06a5c84fa4ec40691a98e9b6ca84112c9dd1e74a983706fda4a4
3
- size 14072
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4de90d603379769284dba2bb266204a6e797e7783c46c7e0e0325497b29eaa9d
3
+ size 14426
tokenizer.json CHANGED
@@ -1,21 +1,7 @@
1
  {
2
  "version": "1.0",
3
- "truncation": {
4
- "direction": "Right",
5
- "max_length": 512,
6
- "strategy": "LongestFirst",
7
- "stride": 0
8
- },
9
- "padding": {
10
- "strategy": {
11
- "Fixed": 512
12
- },
13
- "direction": "Right",
14
- "pad_to_multiple_of": null,
15
- "pad_id": 0,
16
- "pad_type_id": 0,
17
- "pad_token": "[PAD]"
18
- },
19
  "added_tokens": [
20
  {
21
  "id": 0,
 
1
  {
2
  "version": "1.0",
3
+ "truncation": null,
4
+ "padding": null,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5
  "added_tokens": [
6
  {
7
  "id": 0,