dwitidibyajyoti commited on
Commit
1e6aa34
1 Parent(s): ac45d2e

Upload LayoutLMForTokenClassification

Browse files
Files changed (2) hide show
  1. config.json +40 -52
  2. pytorch_model.bin +2 -2
config.json CHANGED
@@ -1,70 +1,58 @@
1
  {
2
- "_name_or_path": "microsoft/layoutlmv3-base",
3
  "architectures": [
4
- "LayoutLMv3ForTokenClassification"
5
  ],
6
  "attention_probs_dropout_prob": 0.1,
7
- "bos_token_id": 0,
8
- "classifier_dropout": null,
9
- "coordinate_size": 128,
10
- "eos_token_id": 2,
11
- "has_relative_attention_bias": true,
12
- "has_spatial_attention_bias": true,
13
  "hidden_act": "gelu",
14
  "hidden_dropout_prob": 0.1,
15
  "hidden_size": 768,
16
  "id2label": {
17
- "0": "S-IGNORE",
18
- "1": "I-IGNORE",
19
- "2": "E-TABLE",
20
- "3": "I-TABLE",
21
- "4": "E-VALUE",
22
- "5": "B-VALUE",
23
- "6": "B-IGNORE",
24
- "7": "S-VALUE",
25
- "8": "I-VALUE",
26
- "9": "E-IGNORE",
27
- "10": "B-TABLE",
28
- "11": "S-KEY",
29
- "12": "O"
 
30
  },
31
  "initializer_range": 0.02,
32
- "input_size": 224,
33
  "intermediate_size": 3072,
34
  "label2id": {
35
- "B-IGNORE": 6,
36
- "B-TABLE": 10,
37
- "B-VALUE": 5,
38
- "E-IGNORE": 9,
39
- "E-TABLE": 2,
40
- "E-VALUE": 4,
41
- "I-IGNORE": 1,
42
- "I-TABLE": 3,
43
- "I-VALUE": 8,
44
- "O": 12,
45
- "S-IGNORE": 0,
46
- "S-KEY": 11,
47
- "S-VALUE": 7
 
48
  },
49
- "layer_norm_eps": 1e-05,
50
  "max_2d_position_embeddings": 1024,
51
- "max_position_embeddings": 514,
52
- "max_rel_2d_pos": 256,
53
- "max_rel_pos": 128,
54
- "model_type": "layoutlmv3",
55
  "num_attention_heads": 12,
56
- "num_channels": 3,
57
  "num_hidden_layers": 12,
58
- "pad_token_id": 1,
59
- "patch_size": 16,
60
- "rel_2d_pos_bins": 64,
61
- "rel_pos_bins": 32,
62
- "second_input_size": 112,
63
- "shape_size": 128,
64
- "text_embed": true,
65
  "torch_dtype": "float32",
66
- "transformers_version": "4.28.0",
67
- "type_vocab_size": 1,
68
- "visual_embed": true,
69
- "vocab_size": 50265
70
  }
 
1
  {
2
+ "_name_or_path": "microsoft/layoutlm-base-uncased",
3
  "architectures": [
4
+ "LayoutLMForTokenClassification"
5
  ],
6
  "attention_probs_dropout_prob": 0.1,
 
 
 
 
 
 
7
  "hidden_act": "gelu",
8
  "hidden_dropout_prob": 0.1,
9
  "hidden_size": 768,
10
  "id2label": {
11
+ "0": "S-VALUE",
12
+ "1": "I-VALUE",
13
+ "2": "E-IGNORE",
14
+ "3": "S-KEY",
15
+ "4": "E-COLUMN",
16
+ "5": "B-IGNORE",
17
+ "6": "S-COLUMN",
18
+ "7": "I-COLUMN",
19
+ "8": "O",
20
+ "9": "E-VALUE",
21
+ "10": "B-COLUMN",
22
+ "11": "S-IGNORE",
23
+ "12": "I-IGNORE",
24
+ "13": "B-VALUE"
25
  },
26
  "initializer_range": 0.02,
 
27
  "intermediate_size": 3072,
28
  "label2id": {
29
+ "B-COLUMN": 10,
30
+ "B-IGNORE": 5,
31
+ "B-VALUE": 13,
32
+ "E-COLUMN": 4,
33
+ "E-IGNORE": 2,
34
+ "E-VALUE": 9,
35
+ "I-COLUMN": 7,
36
+ "I-IGNORE": 12,
37
+ "I-VALUE": 1,
38
+ "O": 8,
39
+ "S-COLUMN": 6,
40
+ "S-IGNORE": 11,
41
+ "S-KEY": 3,
42
+ "S-VALUE": 0
43
  },
44
+ "layer_norm_eps": 1e-12,
45
  "max_2d_position_embeddings": 1024,
46
+ "max_position_embeddings": 512,
47
+ "model_type": "layoutlm",
 
 
48
  "num_attention_heads": 12,
 
49
  "num_hidden_layers": 12,
50
+ "output_past": true,
51
+ "pad_token_id": 0,
52
+ "position_embedding_type": "absolute",
 
 
 
 
53
  "torch_dtype": "float32",
54
+ "transformers_version": "4.32.1",
55
+ "type_vocab_size": 2,
56
+ "use_cache": true,
57
+ "vocab_size": 30522
58
  }
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:634a90ea5f78be5d2ce1ff24a617f489f97387111fbbe824acf620c5a66083ce
3
- size 503786225
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2352db3198e37db8a0fdb5b48c3298de173a728a360eb75bd091a690703b5ddc
3
+ size 450625473