Change BOS token from 0 to 2 as BOS token is equal to EOS for OPT. See: https://github.com/huggingface/transformers/issues/17431

#1
Files changed (1) hide show
  1. config.json +1 -1
config.json CHANGED
@@ -7,7 +7,7 @@
7
  ],
8
  "attention_dropout": 0.0,
9
  "attn_pdrop": 0.1,
10
- "bos_token_id": 50256,
11
  "classifier_dropout": 0.0,
12
  "d_model": 1024,
13
  "decoder_attention_heads": 16,
7
  ],
8
  "attention_dropout": 0.0,
9
  "attn_pdrop": 0.1,
10
+ "bos_token_id": 2,
11
  "classifier_dropout": 0.0,
12
  "d_model": 1024,
13
  "decoder_attention_heads": 16,