Change BOS token from 0 to 2 as BOS token is equal to EOS for OPT. See: https://github.com/huggingface/transformers/issues/17431

#1
Files changed (1) hide show
  1. config.json +1 -1
config.json CHANGED
@@ -5,7 +5,7 @@
5
  "OPTForCausalLM"
6
  ],
7
  "attention_dropout": 0.0,
8
- "bos_token_id": 0,
9
  "hidden_size": 2048,
10
  "do_layer_norm_before": true,
11
  "dropout": 0.1,
 
5
  "OPTForCausalLM"
6
  ],
7
  "attention_dropout": 0.0,
8
+ "bos_token_id": 2,
9
  "hidden_size": 2048,
10
  "do_layer_norm_before": true,
11
  "dropout": 0.1,