nielsr HF staff commited on
Commit
e8cecaa
1 Parent(s): e8f88ea

Upload LayoutLMv2ForTokenClassification

Browse files
Files changed (2) hide show
  1. config.json +153 -0
  2. pytorch_model.bin +3 -0
config.json ADDED
@@ -0,0 +1,153 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "microsoft/layoutxlm-base",
3
+ "architectures": [
4
+ "LayoutLMv2ForTokenClassification"
5
+ ],
6
+ "attention_probs_dropout_prob": 0.1,
7
+ "bos_token_id": 0,
8
+ "convert_sync_batchnorm": true,
9
+ "coordinate_size": 128,
10
+ "detectron2_config_args": {
11
+ "MODEL.ANCHOR_GENERATOR.SIZES": [
12
+ [
13
+ 32
14
+ ],
15
+ [
16
+ 64
17
+ ],
18
+ [
19
+ 128
20
+ ],
21
+ [
22
+ 256
23
+ ],
24
+ [
25
+ 512
26
+ ]
27
+ ],
28
+ "MODEL.BACKBONE.NAME": "build_resnet_fpn_backbone",
29
+ "MODEL.FPN.IN_FEATURES": [
30
+ "res2",
31
+ "res3",
32
+ "res4",
33
+ "res5"
34
+ ],
35
+ "MODEL.MASK_ON": true,
36
+ "MODEL.PIXEL_STD": [
37
+ 57.375,
38
+ 57.12,
39
+ 58.395
40
+ ],
41
+ "MODEL.POST_NMS_TOPK_TEST": 1000,
42
+ "MODEL.RESNETS.ASPECT_RATIOS": [
43
+ [
44
+ 0.5,
45
+ 1.0,
46
+ 2.0
47
+ ]
48
+ ],
49
+ "MODEL.RESNETS.DEPTH": 101,
50
+ "MODEL.RESNETS.NUM_GROUPS": 32,
51
+ "MODEL.RESNETS.OUT_FEATURES": [
52
+ "res2",
53
+ "res3",
54
+ "res4",
55
+ "res5"
56
+ ],
57
+ "MODEL.RESNETS.SIZES": [
58
+ [
59
+ 32
60
+ ],
61
+ [
62
+ 64
63
+ ],
64
+ [
65
+ 128
66
+ ],
67
+ [
68
+ 256
69
+ ],
70
+ [
71
+ 512
72
+ ]
73
+ ],
74
+ "MODEL.RESNETS.STRIDE_IN_1X1": false,
75
+ "MODEL.RESNETS.WIDTH_PER_GROUP": 8,
76
+ "MODEL.ROI_BOX_HEAD.NAME": "FastRCNNConvFCHead",
77
+ "MODEL.ROI_BOX_HEAD.NUM_FC": 2,
78
+ "MODEL.ROI_BOX_HEAD.POOLER_RESOLUTION": 14,
79
+ "MODEL.ROI_HEADS.IN_FEATURES": [
80
+ "p2",
81
+ "p3",
82
+ "p4",
83
+ "p5"
84
+ ],
85
+ "MODEL.ROI_HEADS.NAME": "StandardROIHeads",
86
+ "MODEL.ROI_HEADS.NUM_CLASSES": 5,
87
+ "MODEL.ROI_MASK_HEAD.NAME": "MaskRCNNConvUpsampleHead",
88
+ "MODEL.ROI_MASK_HEAD.NUM_CONV": 4,
89
+ "MODEL.ROI_MASK_HEAD.POOLER_RESOLUTION": 7,
90
+ "MODEL.RPN.IN_FEATURES": [
91
+ "p2",
92
+ "p3",
93
+ "p4",
94
+ "p5",
95
+ "p6"
96
+ ],
97
+ "MODEL.RPN.POST_NMS_TOPK_TRAIN": 1000,
98
+ "MODEL.RPN.PRE_NMS_TOPK_TEST": 1000,
99
+ "MODEL.RPN.PRE_NMS_TOPK_TRAIN": 2000
100
+ },
101
+ "eos_token_id": 2,
102
+ "fast_qkv": false,
103
+ "gradient_checkpointing": false,
104
+ "has_relative_attention_bias": false,
105
+ "has_spatial_attention_bias": false,
106
+ "has_visual_segment_embedding": true,
107
+ "hidden_act": "gelu",
108
+ "hidden_dropout_prob": 0.1,
109
+ "hidden_size": 768,
110
+ "id2label": {
111
+ "0": "O",
112
+ "1": "B-QUESTION",
113
+ "2": "B-ANSWER",
114
+ "3": "B-HEADER",
115
+ "4": "I-ANSWER",
116
+ "5": "I-QUESTION",
117
+ "6": "I-HEADER"
118
+ },
119
+ "image_feature_pool_shape": [
120
+ 7,
121
+ 7,
122
+ 256
123
+ ],
124
+ "initializer_range": 0.02,
125
+ "intermediate_size": 3072,
126
+ "label2id": {
127
+ "B-ANSWER": 2,
128
+ "B-HEADER": 3,
129
+ "B-QUESTION": 1,
130
+ "I-ANSWER": 4,
131
+ "I-HEADER": 6,
132
+ "I-QUESTION": 5,
133
+ "O": 0
134
+ },
135
+ "layer_norm_eps": 1e-05,
136
+ "max_2d_position_embeddings": 1024,
137
+ "max_position_embeddings": 514,
138
+ "max_rel_2d_pos": 256,
139
+ "max_rel_pos": 128,
140
+ "model_type": "layoutlmv2",
141
+ "num_attention_heads": 12,
142
+ "num_hidden_layers": 12,
143
+ "output_past": true,
144
+ "pad_token_id": 1,
145
+ "rel_2d_pos_bins": 64,
146
+ "rel_pos_bins": 32,
147
+ "shape_size": 128,
148
+ "tokenizer_class": "LayoutXLMTokenizer",
149
+ "torch_dtype": "float32",
150
+ "transformers_version": "4.22.1",
151
+ "type_vocab_size": 1,
152
+ "vocab_size": 250002
153
+ }
pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:51d8bd72192fbe8fb3d84c7ffb0d08049021529d76b6049d8d4d28361ac49fe7
3
+ size 1476515623