AKASHR commited on
Commit
01660b0
1 Parent(s): e5a0aa0

End of training

Browse files
README.md ADDED
@@ -0,0 +1,130 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ tags:
3
+ - generated_from_trainer
4
+ model-index:
5
+ - name: layoutlm-donut-own
6
+ results: []
7
+ ---
8
+
9
+ <!-- This model card has been generated automatically according to the information the Trainer had access to. You
10
+ should probably proofread and complete it, then remove this comment. -->
11
+
12
+ # layoutlm-donut-own
13
+
14
+ This model is a fine-tuned version of [microsoft/layoutlm-base-uncased](https://huggingface.co/microsoft/layoutlm-base-uncased) on an unknown dataset.
15
+ It achieves the following results on the evaluation set:
16
+ - Loss: 2.3438
17
+ - Ban: {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 1}
18
+ - Eader:client: {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 44}
19
+ - Eader:client Tax Id: {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 44}
20
+ - Eader:iban: {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 44}
21
+ - Eader:invoice Date: {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 44}
22
+ - Eader:invoice No: {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 44}
23
+ - Eader:seller: {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 44}
24
+ - Eader:seller Tax Id: {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 43}
25
+ - Eller: {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 1}
26
+ - Eller Tax Id: {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 1}
27
+ - Lient: {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 1}
28
+ - Lient Tax Id: {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 1}
29
+ - Nvoice Date: {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 1}
30
+ - Nvoice No: {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 1}
31
+ - Otal Gross Worth: {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 1}
32
+ - Otal Net Worth: {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 1}
33
+ - Otal Vat: {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 1}
34
+ - Tem Desc: {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 3}
35
+ - Tem Gross Worth: {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 3}
36
+ - Tem Net Price: {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 3}
37
+ - Tem Net Worth: {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 3}
38
+ - Tem Qty: {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 3}
39
+ - Tem Vat: {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 3}
40
+ - Tems Row1:item Desc: {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 44}
41
+ - Tems Row1:item Gross Worth: {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 44}
42
+ - Tems Row1:item Net Price: {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 44}
43
+ - Tems Row1:item Net Worth: {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 43}
44
+ - Tems Row1:item Qty: {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 45}
45
+ - Tems Row1:item Vat: {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 43}
46
+ - Tems Row1:seller Tax Id: {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 1}
47
+ - Tems Row2:item Desc: {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 39}
48
+ - Tems Row2:item Gross Worth: {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 39}
49
+ - Tems Row2:item Net Price: {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 38}
50
+ - Tems Row2:item Net Worth: {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 39}
51
+ - Tems Row2:item Qty: {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 40}
52
+ - Tems Row2:item Vat: {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 38}
53
+ - Tems Row3:item Desc: {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 32}
54
+ - Tems Row3:item Gross Worth: {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 32}
55
+ - Tems Row3:item Net Price: {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 32}
56
+ - Tems Row3:item Net Worth: {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 32}
57
+ - Tems Row3:item Qty: {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 33}
58
+ - Tems Row3:item Vat: {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 31}
59
+ - Tems Row4:item Desc: {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 26}
60
+ - Tems Row4:item Gross Worth: {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 26}
61
+ - Tems Row4:item Net Price: {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 26}
62
+ - Tems Row4:item Net Worth: {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 26}
63
+ - Tems Row4:item Qty: {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 27}
64
+ - Tems Row4:item Vat: {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 25}
65
+ - Tems Row5:item Desc: {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 21}
66
+ - Tems Row5:item Gross Worth: {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 21}
67
+ - Tems Row5:item Net Price: {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 21}
68
+ - Tems Row5:item Net Worth: {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 21}
69
+ - Tems Row5:item Qty: {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 22}
70
+ - Tems Row5:item Vat: {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 20}
71
+ - Tems Row6:item Desc: {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 17}
72
+ - Tems Row6:item Gross Worth: {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 17}
73
+ - Tems Row6:item Net Price: {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 17}
74
+ - Tems Row6:item Net Worth: {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 17}
75
+ - Tems Row6:item Qty: {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 17}
76
+ - Tems Row6:item Vat: {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 17}
77
+ - Tems Row7:item Desc: {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 11}
78
+ - Tems Row7:item Gross Worth: {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 11}
79
+ - Tems Row7:item Net Price: {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 11}
80
+ - Tems Row7:item Net Worth: {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 11}
81
+ - Tems Row7:item Qty: {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 11}
82
+ - Tems Row7:item Vat: {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 10}
83
+ - Ther: {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 609}
84
+ - Ummary:total Gross Worth: {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 44}
85
+ - Ummary:total Net Worth: {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 44}
86
+ - Ummary:total Vat: {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 44}
87
+ - Overall Precision: 0.0
88
+ - Overall Recall: 0.0
89
+ - Overall F1: 0.0
90
+ - Overall Accuracy: 0.5689
91
+
92
+ ## Model description
93
+
94
+ More information needed
95
+
96
+ ## Intended uses & limitations
97
+
98
+ More information needed
99
+
100
+ ## Training and evaluation data
101
+
102
+ More information needed
103
+
104
+ ## Training procedure
105
+
106
+ ### Training hyperparameters
107
+
108
+ The following hyperparameters were used during training:
109
+ - learning_rate: 3e-05
110
+ - train_batch_size: 16
111
+ - eval_batch_size: 8
112
+ - seed: 42
113
+ - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
114
+ - lr_scheduler_type: linear
115
+ - num_epochs: 2
116
+
117
+ ### Training results
118
+
119
+ | Training Loss | Epoch | Step | Validation Loss | Ban | Eader:client | Eader:client Tax Id | Eader:iban | Eader:invoice Date | Eader:invoice No | Eader:seller | Eader:seller Tax Id | Eller | Eller Tax Id | Lient | Lient Tax Id | Nvoice Date | Nvoice No | Otal Gross Worth | Otal Net Worth | Otal Vat | Tem Desc | Tem Gross Worth | Tem Net Price | Tem Net Worth | Tem Qty | Tem Vat | Tems Row1:item Desc | Tems Row1:item Gross Worth | Tems Row1:item Net Price | Tems Row1:item Net Worth | Tems Row1:item Qty | Tems Row1:item Vat | Tems Row1:seller Tax Id | Tems Row2:item Desc | Tems Row2:item Gross Worth | Tems Row2:item Net Price | Tems Row2:item Net Worth | Tems Row2:item Qty | Tems Row2:item Vat | Tems Row3:item Desc | Tems Row3:item Gross Worth | Tems Row3:item Net Price | Tems Row3:item Net Worth | Tems Row3:item Qty | Tems Row3:item Vat | Tems Row4:item Desc | Tems Row4:item Gross Worth | Tems Row4:item Net Price | Tems Row4:item Net Worth | Tems Row4:item Qty | Tems Row4:item Vat | Tems Row5:item Desc | Tems Row5:item Gross Worth | Tems Row5:item Net Price | Tems Row5:item Net Worth | Tems Row5:item Qty | Tems Row5:item Vat | Tems Row6:item Desc | Tems Row6:item Gross Worth | Tems Row6:item Net Price | Tems Row6:item Net Worth | Tems Row6:item Qty | Tems Row6:item Vat | Tems Row7:item Desc | Tems Row7:item Gross Worth | Tems Row7:item Net Price | Tems Row7:item Net Worth | Tems Row7:item Qty | Tems Row7:item Vat | Ther | Ummary:total Gross Worth | Ummary:total Net Worth | Ummary:total Vat | Overall Precision | Overall Recall | Overall F1 | Overall Accuracy |
120
+ |:-------------:|:-----:|:----:|:---------------:|:---------------------------------------------------------:|:----------------------------------------------------------:|:----------------------------------------------------------:|:----------------------------------------------------------:|:----------------------------------------------------------:|:----------------------------------------------------------:|:----------------------------------------------------------:|:----------------------------------------------------------:|:---------------------------------------------------------:|:---------------------------------------------------------:|:---------------------------------------------------------:|:---------------------------------------------------------:|:---------------------------------------------------------:|:---------------------------------------------------------:|:---------------------------------------------------------:|:---------------------------------------------------------:|:---------------------------------------------------------:|:---------------------------------------------------------:|:---------------------------------------------------------:|:---------------------------------------------------------:|:---------------------------------------------------------:|:---------------------------------------------------------:|:---------------------------------------------------------:|:----------------------------------------------------------:|:----------------------------------------------------------:|:----------------------------------------------------------:|:----------------------------------------------------------:|:----------------------------------------------------------:|:----------------------------------------------------------:|:---------------------------------------------------------:|:----------------------------------------------------------:|:----------------------------------------------------------:|:----------------------------------------------------------:|:----------------------------------------------------------:|:----------------------------------------------------------:|:----------------------------------------------------------:|:----------------------------------------------------------:|:----------------------------------------------------------:|:----------------------------------------------------------:|:----------------------------------------------------------:|:----------------------------------------------------------:|:----------------------------------------------------------:|:----------------------------------------------------------:|:----------------------------------------------------------:|:----------------------------------------------------------:|:----------------------------------------------------------:|:----------------------------------------------------------:|:----------------------------------------------------------:|:----------------------------------------------------------:|:----------------------------------------------------------:|:----------------------------------------------------------:|:----------------------------------------------------------:|:----------------------------------------------------------:|:----------------------------------------------------------:|:----------------------------------------------------------:|:----------------------------------------------------------:|:----------------------------------------------------------:|:----------------------------------------------------------:|:----------------------------------------------------------:|:----------------------------------------------------------:|:----------------------------------------------------------:|:----------------------------------------------------------:|:----------------------------------------------------------:|:----------------------------------------------------------:|:----------------------------------------------------------:|:----------------------------------------------------------:|:-----------------------------------------------------------:|:----------------------------------------------------------:|:----------------------------------------------------------:|:----------------------------------------------------------:|:-----------------:|:--------------:|:----------:|:----------------:|
121
+ | 3.6109 | 1.0 | 7 | 2.7573 | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 1} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 44} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 44} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 44} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 44} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 44} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 44} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 43} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 1} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 1} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 1} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 1} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 1} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 1} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 1} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 1} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 1} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 3} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 3} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 3} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 3} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 3} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 3} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 44} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 44} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 44} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 43} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 45} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 43} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 1} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 39} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 39} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 38} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 39} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 40} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 38} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 32} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 32} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 32} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 32} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 33} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 31} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 26} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 26} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 26} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 26} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 27} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 25} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 21} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 21} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 21} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 21} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 22} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 20} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 17} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 17} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 17} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 17} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 17} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 17} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 11} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 11} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 11} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 11} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 11} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 10} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 609} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 44} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 44} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 44} | 0.0 | 0.0 | 0.0 | 0.5689 |
122
+ | 2.5323 | 2.0 | 14 | 2.3438 | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 1} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 44} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 44} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 44} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 44} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 44} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 44} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 43} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 1} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 1} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 1} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 1} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 1} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 1} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 1} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 1} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 1} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 3} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 3} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 3} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 3} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 3} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 3} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 44} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 44} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 44} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 43} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 45} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 43} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 1} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 39} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 39} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 38} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 39} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 40} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 38} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 32} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 32} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 32} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 32} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 33} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 31} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 26} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 26} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 26} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 26} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 27} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 25} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 21} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 21} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 21} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 21} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 22} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 20} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 17} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 17} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 17} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 17} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 17} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 17} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 11} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 11} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 11} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 11} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 11} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 10} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 609} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 44} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 44} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 44} | 0.0 | 0.0 | 0.0 | 0.5689 |
123
+
124
+
125
+ ### Framework versions
126
+
127
+ - Transformers 4.28.0
128
+ - Pytorch 2.0.1+cu117
129
+ - Datasets 2.12.0
130
+ - Tokenizers 0.13.3
logs/events.out.tfevents.1685537997.DESKTOP-NAHDDBT.128.2 CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:03c96c7a8a568f90fde3f08d38c7f41e7c404d9bd07268854203ec7be53c5e1c
3
- size 10396
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6cf4f33e2b19a45e30261a6500ec7ea9d2817fe1168733bb5a255169b231d028
3
+ size 11393
preprocessor_config.json ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "apply_ocr": true,
3
+ "do_resize": true,
4
+ "feature_extractor_type": "LayoutLMv2FeatureExtractor",
5
+ "image_processor_type": "LayoutLMv2ImageProcessor",
6
+ "ocr_lang": null,
7
+ "processor_class": "LayoutLMv2Processor",
8
+ "resample": 2,
9
+ "size": {
10
+ "height": 224,
11
+ "width": 224
12
+ },
13
+ "tesseract_config": ""
14
+ }
special_tokens_map.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "cls_token": "[CLS]",
3
+ "mask_token": "[MASK]",
4
+ "pad_token": "[PAD]",
5
+ "sep_token": "[SEP]",
6
+ "unk_token": "[UNK]"
7
+ }
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json ADDED
@@ -0,0 +1,38 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": null,
3
+ "apply_ocr": false,
4
+ "clean_up_tokenization_spaces": true,
5
+ "cls_token": "[CLS]",
6
+ "cls_token_box": [
7
+ 0,
8
+ 0,
9
+ 0,
10
+ 0
11
+ ],
12
+ "do_basic_tokenize": true,
13
+ "do_lower_case": true,
14
+ "mask_token": "[MASK]",
15
+ "model_max_length": 512,
16
+ "never_split": null,
17
+ "only_label_first_subword": true,
18
+ "pad_token": "[PAD]",
19
+ "pad_token_box": [
20
+ 0,
21
+ 0,
22
+ 0,
23
+ 0
24
+ ],
25
+ "pad_token_label": -100,
26
+ "processor_class": "LayoutLMv2Processor",
27
+ "sep_token": "[SEP]",
28
+ "sep_token_box": [
29
+ 1000,
30
+ 1000,
31
+ 1000,
32
+ 1000
33
+ ],
34
+ "strip_accents": null,
35
+ "tokenize_chinese_chars": true,
36
+ "tokenizer_class": "LayoutLMv2Tokenizer",
37
+ "unk_token": "[UNK]"
38
+ }
vocab.txt ADDED
The diff for this file is too large to render. See raw diff