Change BOS token from 0 to 2 as BOS token is equal to EOS for OPT. See: https://github.com/huggingface/transformers/issues/17431

#1
Files changed (1) hide show
  1. config.json +1 -1
config.json CHANGED
@@ -6,7 +6,7 @@
6
  "OPTForCausalLM"
7
  ],
8
  "attention_dropout": 0.0,
9
- "bos_token_id": 0,
10
  "do_layer_norm_before": false,
11
  "dropout": 0.1,
12
  "eos_token_id": 2,
 
6
  "OPTForCausalLM"
7
  ],
8
  "attention_dropout": 0.0,
9
+ "bos_token_id": 2,
10
  "do_layer_norm_before": false,
11
  "dropout": 0.1,
12
  "eos_token_id": 2,