yuekun commited on
Commit
db8a2ce
1 Parent(s): 39232a8

Update config.json

Browse files

Rename some parameter names.

Files changed (1) hide show
  1. config.json +9 -8
config.json CHANGED
@@ -4,21 +4,22 @@
4
  [
5
  "MGPSTRModel"
6
  ],
7
- "img_size": [32, 128],
8
  "patch_size": 4,
9
- "in_chans": 3,
10
  "max_token_length": 27,
11
- "char_num_classes": 38,
12
- "bpe_num_classes": 50257,
13
- "wp_num_classes": 30522,
14
- "embed_dim": 768,
15
- "depth": 12,
16
- "num_heads": 12,
17
  "mlp_ratio": 4,
18
  "qkv_bias": true,
19
  "drop_rate": 0.0,
20
  "attn_drop_rate": 0.0,
21
  "drop_path_rate": 0.0,
 
22
  "torch_dtype": "float32",
23
  "transformers_version": null
24
  }
 
4
  [
5
  "MGPSTRModel"
6
  ],
7
+ "image_size": [32, 128],
8
  "patch_size": 4,
9
+ "num_channels": 3,
10
  "max_token_length": 27,
11
+ "num_character_labels": 38,
12
+ "num_bpe_labels": 50257,
13
+ "num_wordpiece_labels": 30522,
14
+ "hidden_size": 768,
15
+ "num_hidden_layers": 12,
16
+ "num_attention_heads": 12,
17
  "mlp_ratio": 4,
18
  "qkv_bias": true,
19
  "drop_rate": 0.0,
20
  "attn_drop_rate": 0.0,
21
  "drop_path_rate": 0.0,
22
+ "output_a3_attentions": false,
23
  "torch_dtype": "float32",
24
  "transformers_version": null
25
  }