Update config.json
Browse files- config.json +3 -3
config.json
CHANGED
@@ -1,5 +1,5 @@
|
|
1 |
{
|
2 |
-
"_name_or_path": "
|
3 |
"architectures": [
|
4 |
"CXRLLAVAModel"
|
5 |
],
|
@@ -26,7 +26,7 @@
|
|
26 |
"std": 0.3821719215686275
|
27 |
},
|
28 |
"llama": {
|
29 |
-
"_name_or_path": "
|
30 |
"add_cross_attention": false,
|
31 |
"architectures": [
|
32 |
"LlamaForCausalLM"
|
@@ -105,7 +105,7 @@
|
|
105 |
"vocab_size": 32000
|
106 |
},
|
107 |
"llama_model_dtype": "bf16",
|
108 |
-
"llama_model_path": "
|
109 |
"mm_projector_dim": 1024,
|
110 |
"mm_projector_dtype": "fp32",
|
111 |
"mm_projector_path": null,
|
|
|
1 |
{
|
2 |
+
"_name_or_path": "CXR-LLAVA-v2",
|
3 |
"architectures": [
|
4 |
"CXRLLAVAModel"
|
5 |
],
|
|
|
26 |
"std": 0.3821719215686275
|
27 |
},
|
28 |
"llama": {
|
29 |
+
"_name_or_path": "CXR-LLAVA-v2",
|
30 |
"add_cross_attention": false,
|
31 |
"architectures": [
|
32 |
"LlamaForCausalLM"
|
|
|
105 |
"vocab_size": 32000
|
106 |
},
|
107 |
"llama_model_dtype": "bf16",
|
108 |
+
"llama_model_path": "CXR-LLAVA-v2",
|
109 |
"mm_projector_dim": 1024,
|
110 |
"mm_projector_dtype": "fp32",
|
111 |
"mm_projector_path": null,
|