vince62s commited on
Commit
9a9a316
1 Parent(s): c19d6fb

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +7 -7
config.json CHANGED
@@ -1,16 +1,16 @@
1
  {
2
- "_name_or_path": "vince62s/phi-2-psy",
3
  "architectures": [
4
  "PhiForCausalLM"
5
  ],
 
6
  "auto_map": {
7
- "AutoConfig": "configuration_phi.PhiConfig",
8
- "AutoModelForCausalLM": "modeling_phi.PhiForCausalLM"
9
  },
10
- "attention_dropout": 0.0,
11
- "bos_token_id": null,
12
  "embd_pdrop": 0.0,
13
- "eos_token_id": null,
14
  "hidden_act": "gelu_new",
15
  "hidden_size": 2560,
16
  "initializer_range": 0.02,
@@ -28,7 +28,7 @@
28
  "rope_theta": 10000.0,
29
  "tie_word_embeddings": false,
30
  "torch_dtype": "bfloat16",
31
- "transformers_version": "4.37.0",
32
  "use_cache": true,
33
  "vocab_size": 51200
34
  }
 
1
  {
2
+ "_name_or_path": "microsoft/phi-2",
3
  "architectures": [
4
  "PhiForCausalLM"
5
  ],
6
+ "attention_dropout": 0.0,
7
  "auto_map": {
8
+ "AutoConfig": "microsoft/phi-2--configuration_phi.PhiConfig",
9
+ "AutoModelForCausalLM": "microsoft/phi-2--modeling_phi.PhiForCausalLM"
10
  },
11
+ "bos_token_id": 50256,
 
12
  "embd_pdrop": 0.0,
13
+ "eos_token_id": 50297,
14
  "hidden_act": "gelu_new",
15
  "hidden_size": 2560,
16
  "initializer_range": 0.02,
 
28
  "rope_theta": 10000.0,
29
  "tie_word_embeddings": false,
30
  "torch_dtype": "bfloat16",
31
+ "transformers_version": "4.37.2",
32
  "use_cache": true,
33
  "vocab_size": 51200
34
  }