Sebabrata commited on
Commit
6dba50c
1 Parent(s): 25a261e

Training in progress, epoch 1

Browse files
.gitignore ADDED
@@ -0,0 +1 @@
 
 
1
+ checkpoint-*/
config.json ADDED
@@ -0,0 +1,190 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "microsoft/layoutlmv2-large-uncased",
3
+ "architectures": [
4
+ "LayoutLMv2ForTokenClassification"
5
+ ],
6
+ "attention_probs_dropout_prob": 0.1,
7
+ "convert_sync_batchnorm": true,
8
+ "coordinate_size": 171,
9
+ "detectron2_config_args": {
10
+ "MODEL.ANCHOR_GENERATOR.SIZES": [
11
+ [
12
+ 32
13
+ ],
14
+ [
15
+ 64
16
+ ],
17
+ [
18
+ 128
19
+ ],
20
+ [
21
+ 256
22
+ ],
23
+ [
24
+ 512
25
+ ]
26
+ ],
27
+ "MODEL.BACKBONE.NAME": "build_resnet_fpn_backbone",
28
+ "MODEL.FPN.IN_FEATURES": [
29
+ "res2",
30
+ "res3",
31
+ "res4",
32
+ "res5"
33
+ ],
34
+ "MODEL.MASK_ON": true,
35
+ "MODEL.PIXEL_STD": [
36
+ 57.375,
37
+ 57.12,
38
+ 58.395
39
+ ],
40
+ "MODEL.POST_NMS_TOPK_TEST": 1000,
41
+ "MODEL.RESNETS.ASPECT_RATIOS": [
42
+ [
43
+ 0.5,
44
+ 1.0,
45
+ 2.0
46
+ ]
47
+ ],
48
+ "MODEL.RESNETS.DEPTH": 101,
49
+ "MODEL.RESNETS.NUM_GROUPS": 32,
50
+ "MODEL.RESNETS.OUT_FEATURES": [
51
+ "res2",
52
+ "res3",
53
+ "res4",
54
+ "res5"
55
+ ],
56
+ "MODEL.RESNETS.SIZES": [
57
+ [
58
+ 32
59
+ ],
60
+ [
61
+ 64
62
+ ],
63
+ [
64
+ 128
65
+ ],
66
+ [
67
+ 256
68
+ ],
69
+ [
70
+ 512
71
+ ]
72
+ ],
73
+ "MODEL.RESNETS.STRIDE_IN_1X1": false,
74
+ "MODEL.RESNETS.WIDTH_PER_GROUP": 8,
75
+ "MODEL.ROI_BOX_HEAD.NAME": "FastRCNNConvFCHead",
76
+ "MODEL.ROI_BOX_HEAD.NUM_FC": 2,
77
+ "MODEL.ROI_BOX_HEAD.POOLER_RESOLUTION": 14,
78
+ "MODEL.ROI_HEADS.IN_FEATURES": [
79
+ "p2",
80
+ "p3",
81
+ "p4",
82
+ "p5"
83
+ ],
84
+ "MODEL.ROI_HEADS.NAME": "StandardROIHeads",
85
+ "MODEL.ROI_HEADS.NUM_CLASSES": 5,
86
+ "MODEL.ROI_MASK_HEAD.NAME": "MaskRCNNConvUpsampleHead",
87
+ "MODEL.ROI_MASK_HEAD.NUM_CONV": 4,
88
+ "MODEL.ROI_MASK_HEAD.POOLER_RESOLUTION": 7,
89
+ "MODEL.RPN.IN_FEATURES": [
90
+ "p2",
91
+ "p3",
92
+ "p4",
93
+ "p5",
94
+ "p6"
95
+ ],
96
+ "MODEL.RPN.POST_NMS_TOPK_TRAIN": 1000,
97
+ "MODEL.RPN.PRE_NMS_TOPK_TEST": 1000,
98
+ "MODEL.RPN.PRE_NMS_TOPK_TRAIN": 2000
99
+ },
100
+ "fast_qkv": false,
101
+ "gradient_checkpointing": false,
102
+ "has_relative_attention_bias": true,
103
+ "has_spatial_attention_bias": true,
104
+ "has_visual_segment_embedding": false,
105
+ "hidden_act": "gelu",
106
+ "hidden_dropout_prob": 0.1,
107
+ "hidden_size": 1024,
108
+ "id2label": {
109
+ "0": "E-PURCHASE_TIME",
110
+ "1": "B-SUB_TOTAL",
111
+ "2": "I-SUPPLIER_NAME",
112
+ "3": "S-SUB_TOTAL",
113
+ "4": "O",
114
+ "5": "E-SUB_TOTAL",
115
+ "6": "S-SUPPLIER_NAME",
116
+ "7": "B-PURCHASE_TIME",
117
+ "8": "I-RECEIPT_DATE",
118
+ "9": "E-RECEIPT_DATE",
119
+ "10": "I-TOTAL",
120
+ "11": "S-TOTAL",
121
+ "12": "S-TIP_AMOUNT",
122
+ "13": "I-PURCHASE_TIME",
123
+ "14": "B-RECEIPT_DATE",
124
+ "15": "S-RECEIPT_DATE",
125
+ "16": "E-TOTAL",
126
+ "17": "B-SUPPLIER_NAME",
127
+ "18": "I-SUB_TOTAL",
128
+ "19": "B-SUPPLIER_ADDRESS",
129
+ "20": "I-SUPPLIER_ADDRESS",
130
+ "21": "S-PURCHASE_TIME",
131
+ "22": "S-SUPPLIER_ADDRESS",
132
+ "23": "S-TOTAL_TAX_AMOUNT",
133
+ "24": "B-TOTAL",
134
+ "25": "E-SUPPLIER_ADDRESS",
135
+ "26": "E-SUPPLIER_NAME"
136
+ },
137
+ "image_feature_pool_shape": [
138
+ 7,
139
+ 7,
140
+ 256
141
+ ],
142
+ "initializer_range": 0.02,
143
+ "intermediate_size": 4096,
144
+ "label2id": {
145
+ "B-PURCHASE_TIME": 7,
146
+ "B-RECEIPT_DATE": 14,
147
+ "B-SUB_TOTAL": 1,
148
+ "B-SUPPLIER_ADDRESS": 19,
149
+ "B-SUPPLIER_NAME": 17,
150
+ "B-TOTAL": 24,
151
+ "E-PURCHASE_TIME": 0,
152
+ "E-RECEIPT_DATE": 9,
153
+ "E-SUB_TOTAL": 5,
154
+ "E-SUPPLIER_ADDRESS": 25,
155
+ "E-SUPPLIER_NAME": 26,
156
+ "E-TOTAL": 16,
157
+ "I-PURCHASE_TIME": 13,
158
+ "I-RECEIPT_DATE": 8,
159
+ "I-SUB_TOTAL": 18,
160
+ "I-SUPPLIER_ADDRESS": 20,
161
+ "I-SUPPLIER_NAME": 2,
162
+ "I-TOTAL": 10,
163
+ "O": 4,
164
+ "S-PURCHASE_TIME": 21,
165
+ "S-RECEIPT_DATE": 15,
166
+ "S-SUB_TOTAL": 3,
167
+ "S-SUPPLIER_ADDRESS": 22,
168
+ "S-SUPPLIER_NAME": 6,
169
+ "S-TIP_AMOUNT": 12,
170
+ "S-TOTAL": 11,
171
+ "S-TOTAL_TAX_AMOUNT": 23
172
+ },
173
+ "layer_norm_eps": 1e-12,
174
+ "max_2d_position_embeddings": 1024,
175
+ "max_position_embeddings": 512,
176
+ "max_rel_2d_pos": 256,
177
+ "max_rel_pos": 128,
178
+ "model_type": "layoutlmv2",
179
+ "num_attention_heads": 16,
180
+ "num_hidden_layers": 24,
181
+ "output_past": true,
182
+ "pad_token_id": 0,
183
+ "rel_2d_pos_bins": 64,
184
+ "rel_pos_bins": 32,
185
+ "shape_size": 170,
186
+ "torch_dtype": "float32",
187
+ "transformers_version": "4.25.0.dev0",
188
+ "type_vocab_size": 2,
189
+ "vocab_size": 30522
190
+ }
pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c3aeda59540562496cca3dfd4874d6d3be77fdba02aeb76bfeb3e74a702ed660
3
+ size 1706033123
runs/Nov24_05-07-38_instance-gp-s/1669266468.876892/events.out.tfevents.1669266468.instance-gp-s.2451.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:64014fe2a3dbc3dbb06e4445eed10ebd2be02937a2493b0e48e5947ba151b622
3
+ size 5634
runs/Nov24_05-07-38_instance-gp-s/events.out.tfevents.1669266468.instance-gp-s.2451.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:04074cfa81133d508f2ec36aa16c793b16db6bc04173eb97e8e8576a61596674
3
+ size 9964
special_tokens_map.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "cls_token": "[CLS]",
3
+ "mask_token": "[MASK]",
4
+ "pad_token": "[PAD]",
5
+ "sep_token": "[SEP]",
6
+ "unk_token": "[UNK]"
7
+ }
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": null,
3
+ "cls_token": "[CLS]",
4
+ "cls_token_box": [
5
+ 0,
6
+ 0,
7
+ 0,
8
+ 0
9
+ ],
10
+ "do_basic_tokenize": true,
11
+ "do_lower_case": true,
12
+ "mask_token": "[MASK]",
13
+ "model_max_length": 512,
14
+ "name_or_path": "microsoft/layoutlmv2-large-uncased",
15
+ "never_split": null,
16
+ "only_label_first_subword": true,
17
+ "pad_token": "[PAD]",
18
+ "pad_token_box": [
19
+ 0,
20
+ 0,
21
+ 0,
22
+ 0
23
+ ],
24
+ "pad_token_label": -100,
25
+ "sep_token": "[SEP]",
26
+ "sep_token_box": [
27
+ 1000,
28
+ 1000,
29
+ 1000,
30
+ 1000
31
+ ],
32
+ "special_tokens_map_file": null,
33
+ "strip_accents": null,
34
+ "tokenize_chinese_chars": true,
35
+ "tokenizer_class": "LayoutLMv2Tokenizer",
36
+ "unk_token": "[UNK]"
37
+ }
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:882095dc73404db79ab42a9fb4f63e50a8b7ccad8316bd911aa7ab1db69dca7f
3
+ size 3503
vocab.txt ADDED
The diff for this file is too large to render. See raw diff