ECOFRI commited on
Commit
556d97e
1 Parent(s): d04454a

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +3 -3
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "G:\\Temp\\finetune_result\\LLAMA2-7B-CHAT_ViT-L-16-512_MOREKEYWORD_LN_PATCH_FINETUNE_ChexpertJSON_POSTTRAIN_25000_DIST",
3
  "architectures": [
4
  "CXRLLAVAModel"
5
  ],
@@ -26,7 +26,7 @@
26
  "std": 0.3821719215686275
27
  },
28
  "llama": {
29
- "_name_or_path": "/home/jovyan/llava/SW_LLAVA/LLAMA2-7B-CHAT_ViT-L-16-512_MOREKEYWORD_LN_PATCH_FINETUNE_ChexpertJSON_POSTTRAIN",
30
  "add_cross_attention": false,
31
  "architectures": [
32
  "LlamaForCausalLM"
@@ -105,7 +105,7 @@
105
  "vocab_size": 32000
106
  },
107
  "llama_model_dtype": "bf16",
108
- "llama_model_path": "/home/jovyan/llava/SW_LLAVA/LLAMA2-7B-CHAT_ViT-L-16-512_MOREKEYWORD_LN_PATCH_FINETUNE_ChexpertJSON_POSTTRAIN",
109
  "mm_projector_dim": 1024,
110
  "mm_projector_dtype": "fp32",
111
  "mm_projector_path": null,
 
1
  {
2
+ "_name_or_path": "CXR-LLAVA-v2",
3
  "architectures": [
4
  "CXRLLAVAModel"
5
  ],
 
26
  "std": 0.3821719215686275
27
  },
28
  "llama": {
29
+ "_name_or_path": "CXR-LLAVA-v2",
30
  "add_cross_attention": false,
31
  "architectures": [
32
  "LlamaForCausalLM"
 
105
  "vocab_size": 32000
106
  },
107
  "llama_model_dtype": "bf16",
108
+ "llama_model_path": "CXR-LLAVA-v2",
109
  "mm_projector_dim": 1024,
110
  "mm_projector_dtype": "fp32",
111
  "mm_projector_path": null,