luodian commited on
Commit
721a03e
1 Parent(s): e62257f

Upload folder using huggingface_hub (#2)

Browse files

- d8e83d7ddc44756d74c7e94cfe4a11d02dbbd13a046b313b3a911fada524350c (c120dc0c730da7922c83c1eeef4c9bbebb8d0eb9)
- f44c6286708fe6e1416aad8a144aeb4098008ca98f73f959ff26d8e02d2eccb2 (68dd565b3afb74aa7500c137ce64e9a36516bc4f)
- 95390e40301994bcb2122e146154cf7ca85403eb3e730c791349133f8c06847d (737d33311215ea92a4bd045a3f7af7ba3f7c0bd7)
- 9f7215ccd7cb4c185eb4fac86e4d56da03631027e58e2bf4ed1b9ebafab18c8e (af9ea461e7932c5f8ab7c5627045ad4228a232b6)

config.json CHANGED
@@ -9,7 +9,7 @@
9
  "eos_token_id": 2,
10
  "hidden_act": "silu",
11
  "hidden_size": 4096,
12
- "image_aspect_ratio": "pad",
13
  "image_crop_resolution": null,
14
  "image_grid_pinpoints": null,
15
  "image_split_resolution": null,
@@ -28,7 +28,7 @@
28
  "mm_vision_select_layer": -2,
29
  "mm_vision_tower": "openai/clip-vit-large-patch14-336",
30
  "mm_vision_tower_lr": 2e-06,
31
- "model_type": "llama",
32
  "num_attention_heads": 32,
33
  "num_hidden_layers": 32,
34
  "num_key_value_heads": 32,
@@ -41,9 +41,10 @@
41
  "tokenizer_model_max_length": 4096,
42
  "tokenizer_padding_side": "right",
43
  "torch_dtype": "bfloat16",
44
- "transformers_version": "4.39.3",
45
  "use_cache": true,
46
  "use_mm_proj": true,
47
  "vision_tower_pretrained": null,
48
- "vocab_size": 32000
 
49
  }
 
9
  "eos_token_id": 2,
10
  "hidden_act": "silu",
11
  "hidden_size": 4096,
12
+ "image_aspect_ratio": "resize",
13
  "image_crop_resolution": null,
14
  "image_grid_pinpoints": null,
15
  "image_split_resolution": null,
 
28
  "mm_vision_select_layer": -2,
29
  "mm_vision_tower": "openai/clip-vit-large-patch14-336",
30
  "mm_vision_tower_lr": 2e-06,
31
+ "model_type": "llava_llama",
32
  "num_attention_heads": 32,
33
  "num_hidden_layers": 32,
34
  "num_key_value_heads": 32,
 
41
  "tokenizer_model_max_length": 4096,
42
  "tokenizer_padding_side": "right",
43
  "torch_dtype": "bfloat16",
44
+ "transformers_version": "4.40.0.dev0",
45
  "use_cache": true,
46
  "use_mm_proj": true,
47
  "vision_tower_pretrained": null,
48
+ "vocab_size": 32000,
49
+ "s2": true
50
  }
generation_config.json CHANGED
@@ -1,9 +1,10 @@
1
  {
 
2
  "bos_token_id": 1,
3
  "eos_token_id": 2,
4
  "max_length": 4096,
5
  "pad_token_id": 0,
6
  "temperature": 0.9,
7
  "top_p": 0.6,
8
- "transformers_version": "4.39.3"
9
  }
 
1
  {
2
+ "attn_implementation": "flash_attention_2",
3
  "bos_token_id": 1,
4
  "eos_token_id": 2,
5
  "max_length": 4096,
6
  "pad_token_id": 0,
7
  "temperature": 0.9,
8
  "top_p": 0.6,
9
+ "transformers_version": "4.40.0.dev0"
10
  }
model-00001-of-00003.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:551ee1e17eebc3e3f1451f4bcbf03a1a39af4a0af7467d91a056e29281abb75c
3
  size 4938985352
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:574feed49800a1dae12a2d7715ab5ed1e6e74b199869cf0b8fdd97ea9bc21b41
3
  size 4938985352
model-00002-of-00003.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:3353bcf743c899905852253c0b13fb0ca359c3cddcd0d55c4f85d0b4bcb88a1e
3
  size 4947390880
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:edf1d995a190f9b907dba35547a17a408090e7ee12d6ae8880f71e3ccb018ee1
3
  size 4947390880
model-00003-of-00003.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:a1587c0c0508e2c74d29d814562437fb61f29d0432c3b74fd0aed5ef286bb19b
3
  size 4256302160
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b79d0a625bd372c8d1711df0ae0081ae081acd82a66784864b07b9435f7313e4
3
  size 4256302160
trainer_state.json CHANGED
The diff for this file is too large to render. See raw diff
 
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:990622a241c232a2aef2e03d149378b3536e6dbc29695b194293b69810dad045
3
- size 7160
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5c98a00b5dd88431b27304126692f44fcac408310ecc11cea928cd126156b36f
3
+ size 7544