Xenova HF staff commited on
Commit
48d7724
1 Parent(s): a66a07b

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +2 -2
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "/scratch/azureml/cr/j/c114671e623f4fd783db31f5a24e9c46/cap/data-capability/wd/INPUT_model_path",
3
  "architectures": [
4
  "PhiLongRoPEForCausalLM"
5
  ],
@@ -15,7 +15,7 @@
15
  "initializer_range": 0.02,
16
  "intermediate_size": 8192,
17
  "max_position_embeddings": 131072,
18
- "model_type": "mistral",
19
  "xmodel_type": "phi_longrope",
20
  "num_attention_heads": 32,
21
  "num_hidden_layers": 32,
 
1
  {
2
+ "_name_or_path": "microsoft/Phi-3-mini-128k-instruct-onnx",
3
  "architectures": [
4
  "PhiLongRoPEForCausalLM"
5
  ],
 
15
  "initializer_range": 0.02,
16
  "intermediate_size": 8192,
17
  "max_position_embeddings": 131072,
18
+ "model_type": "phi3",
19
  "xmodel_type": "phi_longrope",
20
  "num_attention_heads": 32,
21
  "num_hidden_layers": 32,