fix eos_token and bos_token in config
#1
by
mayank-mishra
- opened
- config.json +2 -2
config.json
CHANGED
|
@@ -6,9 +6,9 @@
|
|
| 6 |
"attention_softmax_in_fp32": true,
|
| 7 |
"multi_query": true,
|
| 8 |
"attn_pdrop": 0.1,
|
| 9 |
-
"bos_token_id":
|
| 10 |
"embd_pdrop": 0.1,
|
| 11 |
-
"eos_token_id":
|
| 12 |
"initializer_range": 0.02,
|
| 13 |
"layer_norm_epsilon": 1e-05,
|
| 14 |
"model_type": "gpt_bigcode",
|
|
|
|
| 6 |
"attention_softmax_in_fp32": true,
|
| 7 |
"multi_query": true,
|
| 8 |
"attn_pdrop": 0.1,
|
| 9 |
+
"bos_token_id": 49152,
|
| 10 |
"embd_pdrop": 0.1,
|
| 11 |
+
"eos_token_id": 49152,
|
| 12 |
"initializer_range": 0.02,
|
| 13 |
"layer_norm_epsilon": 1e-05,
|
| 14 |
"model_type": "gpt_bigcode",
|