ECOFRI commited on
Commit
688c744
1 Parent(s): 80f8630

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +3 -3
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "G:\\Temp\\finetune_result\\LLAMA2-7B-CHAT_ViT-L-16-512_MOREKEYWORD_LN_PATCH_FINETUNE_ChexpertJSON_POSTTRAIN_25000_DIST",
3
  "architectures": [
4
  "CXRLLAVAModel"
5
  ],
@@ -25,7 +25,7 @@
25
  "std": 0.3821719215686275
26
  },
27
  "llama": {
28
- "_name_or_path": "/home/jovyan/llava/SW_LLAVA/LLAMA2-7B-CHAT_ViT-L-16-512_MOREKEYWORD_LN_PATCH_FINETUNE_ChexpertJSON_POSTTRAIN",
29
  "add_cross_attention": false,
30
  "architectures": [
31
  "LlamaForCausalLM"
@@ -104,7 +104,7 @@
104
  "vocab_size": 32000
105
  },
106
  "llama_model_dtype": "bf16",
107
- "llama_model_path": "/home/jovyan/llava/SW_LLAVA/LLAMA2-7B-CHAT_ViT-L-16-512_MOREKEYWORD_LN_PATCH_FINETUNE_ChexpertJSON_POSTTRAIN",
108
  "mm_projector_dim": 1024,
109
  "mm_projector_dtype": "fp32",
110
  "mm_projector_path": null,
 
1
  {
2
+ "_name_or_path": "CXR-LLAVA-v2",
3
  "architectures": [
4
  "CXRLLAVAModel"
5
  ],
 
25
  "std": 0.3821719215686275
26
  },
27
  "llama": {
28
+ "_name_or_path": "CXR-LLAVA-v2",
29
  "add_cross_attention": false,
30
  "architectures": [
31
  "LlamaForCausalLM"
 
104
  "vocab_size": 32000
105
  },
106
  "llama_model_dtype": "bf16",
107
+ "llama_model_path": "CXR-LLAVA-v2",
108
  "mm_projector_dim": 1024,
109
  "mm_projector_dtype": "fp32",
110
  "mm_projector_path": null,