hmart824 commited on
Commit
c6b289b
1 Parent(s): 3961f93

End of training

Browse files
README.md ADDED
@@ -0,0 +1,87 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ library_name: transformers
3
+ license: mit
4
+ base_model: microsoft/layoutlm-base-uncased
5
+ tags:
6
+ - generated_from_trainer
7
+ model-index:
8
+ - name: model
9
+ results: []
10
+ ---
11
+
12
+ <!-- This model card has been generated automatically according to the information the Trainer had access to. You
13
+ should probably proofread and complete it, then remove this comment. -->
14
+
15
+ # model
16
+
17
+ This model is a fine-tuned version of [microsoft/layoutlm-base-uncased](https://huggingface.co/microsoft/layoutlm-base-uncased) on the None dataset.
18
+ It achieves the following results on the evaluation set:
19
+ - Loss: 0.0428
20
+ - B-adress: {'precision': 0.9269102990033222, 'recall': 0.9370277078085643, 'f1': 0.9319415448851774, 'number': 1191}
21
+ - B-name: {'precision': 0.9577039274924471, 'recall': 0.9378698224852071, 'f1': 0.9476831091180866, 'number': 338}
22
+ - Gst no: {'precision': 0.9689922480620154, 'recall': 0.984251968503937, 'f1': 0.9765625, 'number': 127}
23
+ - Invoice no: {'precision': 0.9583333333333334, 'recall': 0.9484536082474226, 'f1': 0.9533678756476685, 'number': 97}
24
+ - Order date: {'precision': 0.967741935483871, 'recall': 0.9836065573770492, 'f1': 0.975609756097561, 'number': 122}
25
+ - Order id: {'precision': 0.9618320610687023, 'recall': 0.984375, 'f1': 0.9729729729729729, 'number': 128}
26
+ - S-adress: {'precision': 0.9809656453110492, 'recall': 0.9957587181903864, 'f1': 0.988306828811974, 'number': 2122}
27
+ - S-name: {'precision': 0.9888268156424581, 'recall': 0.9943820224719101, 'f1': 0.9915966386554622, 'number': 534}
28
+ - Total gross: {'precision': 0.9354838709677419, 'recall': 0.9206349206349206, 'f1': 0.9279999999999999, 'number': 63}
29
+ - Total net: {'precision': 0.9253731343283582, 'recall': 0.9763779527559056, 'f1': 0.9501915708812261, 'number': 127}
30
+ - Overall Precision: 0.9633
31
+ - Overall Recall: 0.9738
32
+ - Overall F1: 0.9685
33
+ - Overall Accuracy: 0.9898
34
+
35
+ ## Model description
36
+
37
+ More information needed
38
+
39
+ ## Intended uses & limitations
40
+
41
+ More information needed
42
+
43
+ ## Training and evaluation data
44
+
45
+ More information needed
46
+
47
+ ## Training procedure
48
+
49
+ ### Training hyperparameters
50
+
51
+ The following hyperparameters were used during training:
52
+ - learning_rate: 5e-05
53
+ - train_batch_size: 16
54
+ - eval_batch_size: 8
55
+ - seed: 42
56
+ - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
57
+ - lr_scheduler_type: linear
58
+ - lr_scheduler_warmup_steps: 10
59
+ - num_epochs: 15
60
+
61
+ ### Training results
62
+
63
+ | Training Loss | Epoch | Step | Validation Loss | B-adress | B-name | Gst no | Invoice no | Order date | Order id | S-adress | S-name | Total gross | Total net | Overall Precision | Overall Recall | Overall F1 | Overall Accuracy |
64
+ |:-------------:|:-----:|:----:|:---------------:|:---------------------------------------------------------------------------------------------------------:|:--------------------------------------------------------------------------------------------------------:|:--------------------------------------------------------------------------------------------------------:|:-------------------------------------------------------------------------------------------------------:|:--------------------------------------------------------------------------------------------------------:|:-----------------------------------------------------------------------------------------------:|:---------------------------------------------------------------------------------------------------------:|:--------------------------------------------------------------------------------------------------------:|:-------------------------------------------------------------------------------------------------------:|:--------------------------------------------------------------------------------------------------------:|:-----------------:|:--------------:|:----------:|:----------------:|
65
+ | 1.1762 | 1.0 | 19 | 0.6066 | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 1191} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 338} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 127} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 97} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 122} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 128} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 2122} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 534} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 63} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 127} | 0.0 | 0.0 | 0.0 | 0.8194 |
66
+ | 0.4823 | 2.0 | 38 | 0.3283 | {'precision': 0.5819354838709677, 'recall': 0.3786733837111671, 'f1': 0.4587995930824008, 'number': 1191} | {'precision': 0.8347107438016529, 'recall': 0.2988165680473373, 'f1': 0.440087145969499, 'number': 338} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 127} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 97} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 122} | {'precision': 1.0, 'recall': 0.03125, 'f1': 0.06060606060606061, 'number': 128} | {'precision': 0.8914764788076386, 'recall': 0.9019792648444863, 'f1': 0.8966971187631764, 'number': 2122} | {'precision': 0.9348958333333334, 'recall': 0.6722846441947565, 'f1': 0.7821350762527233, 'number': 534} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 63} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 127} | 0.8245 | 0.5834 | 0.6833 | 0.9098 |
67
+ | 0.2723 | 3.0 | 57 | 0.1988 | {'precision': 0.6759189797449362, 'recall': 0.7565071368597817, 'f1': 0.7139461172741679, 'number': 1191} | {'precision': 0.7060301507537688, 'recall': 0.8313609467455622, 'f1': 0.7635869565217391, 'number': 338} | {'precision': 0.9259259259259259, 'recall': 0.7874015748031497, 'f1': 0.851063829787234, 'number': 127} | {'precision': 0.922077922077922, 'recall': 0.7319587628865979, 'f1': 0.8160919540229885, 'number': 97} | {'precision': 0.9066666666666666, 'recall': 0.5573770491803278, 'f1': 0.6903553299492385, 'number': 122} | {'precision': 0.9333333333333333, 'recall': 0.765625, 'f1': 0.8412017167381974, 'number': 128} | {'precision': 0.9612778315585673, 'recall': 0.9359095193213949, 'f1': 0.9484240687679083, 'number': 2122} | {'precision': 0.9844054580896686, 'recall': 0.9456928838951311, 'f1': 0.9646609360076408, 'number': 534} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 63} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 127} | 0.8578 | 0.8270 | 0.8421 | 0.9492 |
68
+ | 0.1825 | 4.0 | 76 | 0.1596 | {'precision': 0.6635916359163592, 'recall': 0.9059613769941226, 'f1': 0.7660631877884273, 'number': 1191} | {'precision': 0.7792207792207793, 'recall': 0.8875739644970414, 'f1': 0.8298755186721992, 'number': 338} | {'precision': 0.954954954954955, 'recall': 0.8346456692913385, 'f1': 0.8907563025210083, 'number': 127} | {'precision': 0.8631578947368421, 'recall': 0.845360824742268, 'f1': 0.8541666666666666, 'number': 97} | {'precision': 0.8725490196078431, 'recall': 0.7295081967213115, 'f1': 0.7946428571428572, 'number': 122} | {'precision': 0.9519230769230769, 'recall': 0.7734375, 'f1': 0.853448275862069, 'number': 128} | {'precision': 0.9807881773399014, 'recall': 0.9382657869934025, 'f1': 0.9590558766859345, 'number': 2122} | {'precision': 0.9865900383141762, 'recall': 0.9644194756554307, 'f1': 0.975378787878788, 'number': 534} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 63} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 127} | 0.8565 | 0.8787 | 0.8675 | 0.9558 |
69
+ | 0.1356 | 5.0 | 95 | 0.1106 | {'precision': 0.7638888888888888, 'recall': 0.9235936188077246, 'f1': 0.8361839604713037, 'number': 1191} | {'precision': 0.9381107491856677, 'recall': 0.8520710059171598, 'f1': 0.8930232558139536, 'number': 338} | {'precision': 0.9375, 'recall': 0.8267716535433071, 'f1': 0.8786610878661089, 'number': 127} | {'precision': 0.9222222222222223, 'recall': 0.8556701030927835, 'f1': 0.8877005347593583, 'number': 97} | {'precision': 0.8584070796460177, 'recall': 0.7950819672131147, 'f1': 0.825531914893617, 'number': 122} | {'precision': 0.9528301886792453, 'recall': 0.7890625, 'f1': 0.8632478632478633, 'number': 128} | {'precision': 0.9829706717123936, 'recall': 0.9792648444863337, 'f1': 0.9811142587346553, 'number': 2122} | {'precision': 0.9961685823754789, 'recall': 0.9737827715355806, 'f1': 0.9848484848484848, 'number': 534} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 63} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 127} | 0.9101 | 0.9016 | 0.9058 | 0.9685 |
70
+ | 0.0936 | 6.0 | 114 | 0.0877 | {'precision': 0.9464094319399786, 'recall': 0.7413937867338372, 'f1': 0.8314500941619587, 'number': 1191} | {'precision': 0.9203539823008849, 'recall': 0.9230769230769231, 'f1': 0.9217134416543575, 'number': 338} | {'precision': 0.9649122807017544, 'recall': 0.8661417322834646, 'f1': 0.9128630705394191, 'number': 127} | {'precision': 0.9325842696629213, 'recall': 0.8556701030927835, 'f1': 0.8924731182795699, 'number': 97} | {'precision': 0.9145299145299145, 'recall': 0.8770491803278688, 'f1': 0.895397489539749, 'number': 122} | {'precision': 0.943089430894309, 'recall': 0.90625, 'f1': 0.9243027888446216, 'number': 128} | {'precision': 0.9736111111111111, 'recall': 0.9910461828463714, 'f1': 0.9822512844465203, 'number': 2122} | {'precision': 0.9925233644859813, 'recall': 0.9943820224719101, 'f1': 0.9934518241347053, 'number': 534} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 63} | {'precision': 0.8, 'recall': 0.031496062992125984, 'f1': 0.06060606060606061, 'number': 127} | 0.9624 | 0.8763 | 0.9173 | 0.9728 |
71
+ | 0.0711 | 7.0 | 133 | 0.0721 | {'precision': 0.8949652777777778, 'recall': 0.8656591099916037, 'f1': 0.8800682885189928, 'number': 1191} | {'precision': 0.8815426997245179, 'recall': 0.9467455621301775, 'f1': 0.9129814550641939, 'number': 338} | {'precision': 0.9609375, 'recall': 0.968503937007874, 'f1': 0.9647058823529412, 'number': 127} | {'precision': 0.9157894736842105, 'recall': 0.8969072164948454, 'f1': 0.9062499999999999, 'number': 97} | {'precision': 0.937007874015748, 'recall': 0.9754098360655737, 'f1': 0.9558232931726907, 'number': 122} | {'precision': 0.9538461538461539, 'recall': 0.96875, 'f1': 0.9612403100775193, 'number': 128} | {'precision': 0.9839849269901083, 'recall': 0.9844486333647502, 'f1': 0.9842167255594817, 'number': 2122} | {'precision': 0.9851024208566108, 'recall': 0.9906367041198502, 'f1': 0.9878618113912231, 'number': 534} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 63} | {'precision': 0.5061728395061729, 'recall': 0.6456692913385826, 'f1': 0.5674740484429066, 'number': 127} | 0.9350 | 0.9289 | 0.9319 | 0.9785 |
72
+ | 0.0543 | 8.0 | 152 | 0.0551 | {'precision': 0.9027661357921207, 'recall': 0.9042821158690176, 'f1': 0.9035234899328859, 'number': 1191} | {'precision': 0.9516616314199395, 'recall': 0.9319526627218935, 'f1': 0.9417040358744395, 'number': 338} | {'precision': 0.9612403100775194, 'recall': 0.9763779527559056, 'f1': 0.9687500000000001, 'number': 127} | {'precision': 0.9368421052631579, 'recall': 0.9175257731958762, 'f1': 0.9270833333333333, 'number': 97} | {'precision': 0.9596774193548387, 'recall': 0.9754098360655737, 'f1': 0.9674796747967479, 'number': 122} | {'precision': 0.9612403100775194, 'recall': 0.96875, 'f1': 0.9649805447470817, 'number': 128} | {'precision': 0.9831697054698457, 'recall': 0.9910461828463714, 'f1': 0.9870922318704529, 'number': 2122} | {'precision': 0.9925093632958801, 'recall': 0.9925093632958801, 'f1': 0.9925093632958801, 'number': 534} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 63} | {'precision': 0.5714285714285714, 'recall': 0.7559055118110236, 'f1': 0.6508474576271186, 'number': 127} | 0.9453 | 0.9439 | 0.9446 | 0.9831 |
73
+ | 0.04 | 9.0 | 171 | 0.0536 | {'precision': 0.8660508083140878, 'recall': 0.9445843828715366, 'f1': 0.9036144578313253, 'number': 1191} | {'precision': 0.9279538904899135, 'recall': 0.9526627218934911, 'f1': 0.9401459854014598, 'number': 338} | {'precision': 0.9618320610687023, 'recall': 0.9921259842519685, 'f1': 0.9767441860465117, 'number': 127} | {'precision': 0.92, 'recall': 0.9484536082474226, 'f1': 0.9340101522842639, 'number': 97} | {'precision': 0.967741935483871, 'recall': 0.9836065573770492, 'f1': 0.975609756097561, 'number': 122} | {'precision': 0.9618320610687023, 'recall': 0.984375, 'f1': 0.9729729729729729, 'number': 128} | {'precision': 0.9809390980939098, 'recall': 0.9943449575871819, 'f1': 0.9875965363912943, 'number': 2122} | {'precision': 0.9833024118738405, 'recall': 0.9925093632958801, 'f1': 0.9878844361602983, 'number': 534} | {'precision': 0.8947368421052632, 'recall': 0.2698412698412698, 'f1': 0.4146341463414634, 'number': 63} | {'precision': 0.6228571428571429, 'recall': 0.8582677165354331, 'f1': 0.7218543046357615, 'number': 127} | 0.9324 | 0.9645 | 0.9482 | 0.9838 |
74
+ | 0.034 | 10.0 | 190 | 0.0461 | {'precision': 0.9100572363041701, 'recall': 0.9345088161209067, 'f1': 0.9221209610604806, 'number': 1191} | {'precision': 0.9546827794561934, 'recall': 0.9349112426035503, 'f1': 0.9446935724962632, 'number': 338} | {'precision': 0.9767441860465116, 'recall': 0.9921259842519685, 'f1': 0.9843749999999999, 'number': 127} | {'precision': 0.9578947368421052, 'recall': 0.9381443298969072, 'f1': 0.9479166666666666, 'number': 97} | {'precision': 0.967741935483871, 'recall': 0.9836065573770492, 'f1': 0.975609756097561, 'number': 122} | {'precision': 0.9618320610687023, 'recall': 0.984375, 'f1': 0.9729729729729729, 'number': 128} | {'precision': 0.9809390980939098, 'recall': 0.9943449575871819, 'f1': 0.9875965363912943, 'number': 2122} | {'precision': 0.9962406015037594, 'recall': 0.9925093632958801, 'f1': 0.9943714821763602, 'number': 534} | {'precision': 0.9512195121951219, 'recall': 0.6190476190476191, 'f1': 0.7500000000000001, 'number': 63} | {'precision': 0.8273381294964028, 'recall': 0.905511811023622, 'f1': 0.8646616541353382, 'number': 127} | 0.9571 | 0.9664 | 0.9617 | 0.9879 |
75
+ | 0.0291 | 11.0 | 209 | 0.0451 | {'precision': 0.928390901432182, 'recall': 0.9252728799328296, 'f1': 0.926829268292683, 'number': 1191} | {'precision': 0.9358600583090378, 'recall': 0.9497041420118343, 'f1': 0.9427312775330395, 'number': 338} | {'precision': 0.9765625, 'recall': 0.984251968503937, 'f1': 0.9803921568627452, 'number': 127} | {'precision': 0.9484536082474226, 'recall': 0.9484536082474226, 'f1': 0.9484536082474226, 'number': 97} | {'precision': 0.967741935483871, 'recall': 0.9836065573770492, 'f1': 0.975609756097561, 'number': 122} | {'precision': 0.9545454545454546, 'recall': 0.984375, 'f1': 0.9692307692307692, 'number': 128} | {'precision': 0.9764325323475046, 'recall': 0.9957587181903864, 'f1': 0.9860009332711154, 'number': 2122} | {'precision': 0.9906716417910447, 'recall': 0.9943820224719101, 'f1': 0.9925233644859812, 'number': 534} | {'precision': 0.9482758620689655, 'recall': 0.873015873015873, 'f1': 0.9090909090909091, 'number': 63} | {'precision': 0.8413793103448276, 'recall': 0.9606299212598425, 'f1': 0.8970588235294117, 'number': 127} | 0.9579 | 0.9707 | 0.9643 | 0.9883 |
76
+ | 0.0268 | 12.0 | 228 | 0.0443 | {'precision': 0.9251471825063078, 'recall': 0.9235936188077246, 'f1': 0.9243697478991596, 'number': 1191} | {'precision': 0.9413489736070382, 'recall': 0.9497041420118343, 'f1': 0.9455081001472754, 'number': 338} | {'precision': 0.9689922480620154, 'recall': 0.984251968503937, 'f1': 0.9765625, 'number': 127} | {'precision': 0.9387755102040817, 'recall': 0.9484536082474226, 'f1': 0.9435897435897437, 'number': 97} | {'precision': 0.967741935483871, 'recall': 0.9836065573770492, 'f1': 0.975609756097561, 'number': 122} | {'precision': 0.9618320610687023, 'recall': 0.984375, 'f1': 0.9729729729729729, 'number': 128} | {'precision': 0.9826941066417212, 'recall': 0.9901036757775683, 'f1': 0.9863849765258215, 'number': 2122} | {'precision': 0.9888268156424581, 'recall': 0.9943820224719101, 'f1': 0.9915966386554622, 'number': 534} | {'precision': 0.9491525423728814, 'recall': 0.8888888888888888, 'f1': 0.9180327868852458, 'number': 63} | {'precision': 0.8857142857142857, 'recall': 0.9763779527559056, 'f1': 0.9288389513108615, 'number': 127} | 0.9611 | 0.9684 | 0.9648 | 0.9887 |
77
+ | 0.0236 | 13.0 | 247 | 0.0435 | {'precision': 0.9211175020542317, 'recall': 0.9412258606213266, 'f1': 0.9310631229235881, 'number': 1191} | {'precision': 0.954954954954955, 'recall': 0.9408284023668639, 'f1': 0.9478390461997019, 'number': 338} | {'precision': 0.984251968503937, 'recall': 0.984251968503937, 'f1': 0.984251968503937, 'number': 127} | {'precision': 0.9484536082474226, 'recall': 0.9484536082474226, 'f1': 0.9484536082474226, 'number': 97} | {'precision': 0.967741935483871, 'recall': 0.9836065573770492, 'f1': 0.975609756097561, 'number': 122} | {'precision': 0.9618320610687023, 'recall': 0.984375, 'f1': 0.9729729729729729, 'number': 128} | {'precision': 0.9809390980939098, 'recall': 0.9943449575871819, 'f1': 0.9875965363912943, 'number': 2122} | {'precision': 0.9925093632958801, 'recall': 0.9925093632958801, 'f1': 0.9925093632958801, 'number': 534} | {'precision': 0.9508196721311475, 'recall': 0.9206349206349206, 'f1': 0.9354838709677418, 'number': 63} | {'precision': 0.9185185185185185, 'recall': 0.9763779527559056, 'f1': 0.9465648854961832, 'number': 127} | 0.9621 | 0.9742 | 0.9681 | 0.9896 |
78
+ | 0.0214 | 14.0 | 266 | 0.0446 | {'precision': 0.9249793899422918, 'recall': 0.9420654911838791, 'f1': 0.9334442595673876, 'number': 1191} | {'precision': 0.9494047619047619, 'recall': 0.9437869822485208, 'f1': 0.9465875370919882, 'number': 338} | {'precision': 0.9689922480620154, 'recall': 0.984251968503937, 'f1': 0.9765625, 'number': 127} | {'precision': 0.9387755102040817, 'recall': 0.9484536082474226, 'f1': 0.9435897435897437, 'number': 97} | {'precision': 0.967741935483871, 'recall': 0.9836065573770492, 'f1': 0.975609756097561, 'number': 122} | {'precision': 0.9545454545454546, 'recall': 0.984375, 'f1': 0.9692307692307692, 'number': 128} | {'precision': 0.9764325323475046, 'recall': 0.9957587181903864, 'f1': 0.9860009332711154, 'number': 2122} | {'precision': 0.9833333333333333, 'recall': 0.9943820224719101, 'f1': 0.9888268156424581, 'number': 534} | {'precision': 0.8923076923076924, 'recall': 0.9206349206349206, 'f1': 0.90625, 'number': 63} | {'precision': 0.8732394366197183, 'recall': 0.9763779527559056, 'f1': 0.9219330855018587, 'number': 127} | 0.9569 | 0.9755 | 0.9661 | 0.9890 |
79
+ | 0.0199 | 15.0 | 285 | 0.0428 | {'precision': 0.9269102990033222, 'recall': 0.9370277078085643, 'f1': 0.9319415448851774, 'number': 1191} | {'precision': 0.9577039274924471, 'recall': 0.9378698224852071, 'f1': 0.9476831091180866, 'number': 338} | {'precision': 0.9689922480620154, 'recall': 0.984251968503937, 'f1': 0.9765625, 'number': 127} | {'precision': 0.9583333333333334, 'recall': 0.9484536082474226, 'f1': 0.9533678756476685, 'number': 97} | {'precision': 0.967741935483871, 'recall': 0.9836065573770492, 'f1': 0.975609756097561, 'number': 122} | {'precision': 0.9618320610687023, 'recall': 0.984375, 'f1': 0.9729729729729729, 'number': 128} | {'precision': 0.9809656453110492, 'recall': 0.9957587181903864, 'f1': 0.988306828811974, 'number': 2122} | {'precision': 0.9888268156424581, 'recall': 0.9943820224719101, 'f1': 0.9915966386554622, 'number': 534} | {'precision': 0.9354838709677419, 'recall': 0.9206349206349206, 'f1': 0.9279999999999999, 'number': 63} | {'precision': 0.9253731343283582, 'recall': 0.9763779527559056, 'f1': 0.9501915708812261, 'number': 127} | 0.9633 | 0.9738 | 0.9685 | 0.9898 |
80
+
81
+
82
+ ### Framework versions
83
+
84
+ - Transformers 4.44.2
85
+ - Pytorch 2.4.0+cu121
86
+ - Datasets 3.0.0
87
+ - Tokenizers 0.19.1
config.json ADDED
@@ -0,0 +1,52 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "microsoft/layoutlm-base-uncased",
3
+ "architectures": [
4
+ "LayoutLMForTokenClassification"
5
+ ],
6
+ "attention_probs_dropout_prob": 0.1,
7
+ "hidden_act": "gelu",
8
+ "hidden_dropout_prob": 0.1,
9
+ "hidden_size": 768,
10
+ "id2label": {
11
+ "0": "O",
12
+ "1": "Invoice no",
13
+ "2": "Order id",
14
+ "3": "Order date",
15
+ "4": "GST no",
16
+ "5": "Total net",
17
+ "6": "Total gross",
18
+ "7": "S-name",
19
+ "8": "B-name",
20
+ "9": "S-adress",
21
+ "10": "B-adress"
22
+ },
23
+ "initializer_range": 0.02,
24
+ "intermediate_size": 3072,
25
+ "label2id": {
26
+ "B-adress": 10,
27
+ "B-name": 8,
28
+ "GST no": 4,
29
+ "Invoice no": 1,
30
+ "O": 0,
31
+ "Order date": 3,
32
+ "Order id": 2,
33
+ "S-adress": 9,
34
+ "S-name": 7,
35
+ "Total gross": 6,
36
+ "Total net": 5
37
+ },
38
+ "layer_norm_eps": 1e-12,
39
+ "max_2d_position_embeddings": 1024,
40
+ "max_position_embeddings": 512,
41
+ "model_type": "layoutlm",
42
+ "num_attention_heads": 12,
43
+ "num_hidden_layers": 12,
44
+ "output_past": true,
45
+ "pad_token_id": 0,
46
+ "position_embedding_type": "absolute",
47
+ "torch_dtype": "float32",
48
+ "transformers_version": "4.44.2",
49
+ "type_vocab_size": 2,
50
+ "use_cache": true,
51
+ "vocab_size": 30522
52
+ }
logs/events.out.tfevents.1726329966.eea13cdcbbd5.1724.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:89d6a55b3cab39293963ddfc55c43426d1163d1a5e25728d36feb17808c1f65c
3
+ size 5973
logs/events.out.tfevents.1726330291.eea13cdcbbd5.1724.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:17925de4d88965c9cedea9d53599a6be60933601c6b89f9590f1db9d4dd7a68b
3
+ size 4184
logs/events.out.tfevents.1726330547.eea13cdcbbd5.4099.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4896841709442e361e180e03f5aa0fa53904df11af25a4a61548e5ec1a2a7ea5
3
+ size 16280
logs/events.out.tfevents.1726332278.eea13cdcbbd5.4099.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:91f99beff285d537b818b7456c98e577d634becad2ab5e3a1c68f45688e44196
3
+ size 592
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c7c7c203bd94c9c899c3ace1906c88adab69f66f09e1899614e0cb1eae60bd58
3
+ size 450570516
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:74857a7a8f79978ef47dce5ff6087aa9027032decc38aa611660d907283b1576
3
+ size 5112