Files changed (1) hide show
  1. config.json +2 -2
config.json CHANGED
@@ -8,9 +8,9 @@
8
  "AutoModelForCausalLM": "modeling_phi.PhiForCausalLM"
9
  },
10
  "attention_dropout": 0.0,
11
- "bos_token_id": null,
12
  "embd_pdrop": 0.0,
13
- "eos_token_id": null,
14
  "hidden_act": "gelu_new",
15
  "hidden_size": 2560,
16
  "initializer_range": 0.02,
 
8
  "AutoModelForCausalLM": "modeling_phi.PhiForCausalLM"
9
  },
10
  "attention_dropout": 0.0,
11
+ "bos_token_id": 50256,
12
  "embd_pdrop": 0.0,
13
+ "eos_token_id": 50256,
14
  "hidden_act": "gelu_new",
15
  "hidden_size": 2560,
16
  "initializer_range": 0.02,