Josephgflowers commited on
Commit
85bdff7
·
verified ·
1 Parent(s): b864606

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +2 -2
config.json CHANGED
@@ -6,8 +6,8 @@
6
  "attention_bias": false,
7
  "attention_dropout": 0.0,
8
  "auto_map": {
9
- "AutoConfig": "microsoft/Phi-3.5-mini-instruct--configuration_phi3.Phi3Config",
10
- "AutoModelForCausalLM": "microsoft/Phi-3.5-mini-instruct--modeling_phi3.Phi3ForCausalLM"
11
  },
12
  "bos_token_id": 1,
13
  "embd_pdrop": 0.0,
 
6
  "attention_bias": false,
7
  "attention_dropout": 0.0,
8
  "auto_map": {
9
+ "AutoConfig": "configuration_phi3.Phi3Config",
10
+ "AutoModelForCausalLM": "modeling_phi3.Phi3ForCausalLM"
11
  },
12
  "bos_token_id": 1,
13
  "embd_pdrop": 0.0,