Bingsu commited on
Commit
46c72e0
1 Parent(s): 9b1b8cf

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +1 -1
config.json CHANGED
@@ -21,7 +21,7 @@
21
  "num_attention_heads": 12,
22
  "num_groups": 1,
23
  "num_hidden_layers": 12,
24
- "pad_token_id": 0,
25
  "torch_dtype": "float32",
26
  "transformers_version": "4.27.2",
27
  "type_vocab_size": 2,
 
21
  "num_attention_heads": 12,
22
  "num_groups": 1,
23
  "num_hidden_layers": 12,
24
+ "pad_token_id": 1,
25
  "torch_dtype": "float32",
26
  "transformers_version": "4.27.2",
27
  "type_vocab_size": 2,