g-h-chen commited on
Commit
ce27692
1 Parent(s): 7a91fcf

upload config.json

Browse files
Files changed (1) hide show
  1. config.json +2 -2
config.json CHANGED
@@ -6,7 +6,7 @@
6
  "attention_dropout": 0.0,
7
  "auto_map": {
8
  "AutoConfig": "configuration_phi.PhiConfig",
9
- "AutoModelForCausalLM": "modeling_phi.PhiForCausalLM"
10
  },
11
  "bos_token_id": null,
12
  "embd_pdrop": 0.0,
@@ -28,7 +28,7 @@
28
  "mm_use_im_start_end": false,
29
  "mm_vision_select_feature": "patch",
30
  "mm_vision_select_layer": -2,
31
- "mm_vision_tower": "openai/clip-vit-large-patch14-336",
32
  "model_type": "llava_phi",
33
  "num_attention_heads": 32,
34
  "num_hidden_layers": 32,
 
6
  "attention_dropout": 0.0,
7
  "auto_map": {
8
  "AutoConfig": "configuration_phi.PhiConfig",
9
+ "AutoModelForCausalLM": "modeling_llava_phi.LlavaPhiForCausalLM"
10
  },
11
  "bos_token_id": null,
12
  "embd_pdrop": 0.0,
 
28
  "mm_use_im_start_end": false,
29
  "mm_vision_select_feature": "patch",
30
  "mm_vision_select_layer": -2,
31
+ "mm_vision_tower": "openai/clip_vit_large_patch14_336",
32
  "model_type": "llava_phi",
33
  "num_attention_heads": 32,
34
  "num_hidden_layers": 32,