rippertnt commited on
Commit
b2505fa
1 Parent(s): 6c30d78

Upload 9 files

Browse files
config.json CHANGED
@@ -1,7 +1,7 @@
1
  {
2
- "_name_or_path": "/home/circulus/.cache/huggingface/hub/models--llava-hf--llava-v1.6-mistral-7b-hf/snapshots/c2e74c6cfac23b6d3224bfb489d46806576a98ae",
3
  "architectures": [
4
- "LlavaNextForConditionalGeneration"
5
  ],
6
  "ignore_index": -100,
7
  "image_grid_pinpoints": [
@@ -27,7 +27,8 @@
27
  ]
28
  ],
29
  "image_token_index": 32000,
30
- "model_type": "llava_next",
 
31
  "projector_hidden_act": "gelu",
32
  "quantization_config": {
33
  "bits": 4,
@@ -53,7 +54,7 @@
53
  "vocab_size": 32064
54
  },
55
  "torch_dtype": "float16",
56
- "transformers_version": "4.39.0",
57
  "use_image_newline_parameter": true,
58
  "vision_config": {
59
  "hidden_size": 1024,
 
1
  {
2
+ "_name_or_path": "../../circulus-lvm-7b",
3
  "architectures": [
4
+ "LlavaForConditionalGeneration"
5
  ],
6
  "ignore_index": -100,
7
  "image_grid_pinpoints": [
 
27
  ]
28
  ],
29
  "image_token_index": 32000,
30
+ "model_type": "llava",
31
+ "pad_token_id": 32001,
32
  "projector_hidden_act": "gelu",
33
  "quantization_config": {
34
  "bits": 4,
 
54
  "vocab_size": 32064
55
  },
56
  "torch_dtype": "float16",
57
+ "transformers_version": "4.38.2",
58
  "use_image_newline_parameter": true,
59
  "vision_config": {
60
  "hidden_size": 1024,
generation_config.json CHANGED
@@ -1,6 +1,8 @@
1
  {
2
  "_from_model_config": true,
3
  "bos_token_id": 1,
 
4
  "eos_token_id": 2,
5
- "transformers_version": "4.39.0"
 
6
  }
 
1
  {
2
  "_from_model_config": true,
3
  "bos_token_id": 1,
4
+ "do_sample": true,
5
  "eos_token_id": 2,
6
+ "pad_token_id": 32001,
7
+ "transformers_version": "4.38.2"
8
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:151efce83ef957e249a48c1ef409e017e311f3b7f3149eeafbe544ff3955ca51
3
- size 4800976520
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4945730f4f36ae32ed76942313226a72a8f88029e9a50bbed0bf728431654e41
3
+ size 4800968240
preprocessor_config.json CHANGED
@@ -1,5 +1,4 @@
1
  {
2
- "aspect_ratio_setting": "anyres",
3
  "crop_size": {
4
  "height": 336,
5
  "width": 336
@@ -9,6 +8,7 @@
9
  "do_normalize": true,
10
  "do_rescale": true,
11
  "do_resize": true,
 
12
  "image_grid_pinpoints": [
13
  [
14
  336,
@@ -36,13 +36,13 @@
36
  0.4578275,
37
  0.40821073
38
  ],
39
- "image_processor_type": "LlavaNextImageProcessor",
40
  "image_std": [
41
  0.26862954,
42
  0.26130258,
43
  0.27577711
44
  ],
45
- "processor_class": "LlavaNextProcessor",
46
  "resample": 3,
47
  "rescale_factor": 0.00392156862745098,
48
  "size": {
 
1
  {
 
2
  "crop_size": {
3
  "height": 336,
4
  "width": 336
 
8
  "do_normalize": true,
9
  "do_rescale": true,
10
  "do_resize": true,
11
+ "image_aspect_ratio": "anyres",
12
  "image_grid_pinpoints": [
13
  [
14
  336,
 
36
  0.4578275,
37
  0.40821073
38
  ],
39
+ "image_processor_type": "CLIPImageProcessor",
40
  "image_std": [
41
  0.26862954,
42
  0.26130258,
43
  0.27577711
44
  ],
45
+ "processor_class": "LlavaProcessor",
46
  "resample": 3,
47
  "rescale_factor": 0.00392156862745098,
48
  "size": {
tokenizer_config.json CHANGED
@@ -49,13 +49,9 @@
49
  "clean_up_tokenization_spaces": false,
50
  "eos_token": "</s>",
51
  "legacy": true,
52
- "max_length": null,
53
  "model_max_length": 1000000000000000019884624838656,
54
- "pad_to_multiple_of": null,
55
  "pad_token": "<pad>",
56
- "pad_token_type_id": 0,
57
- "padding_side": "left",
58
- "processor_class": "LlavaNextProcessor",
59
  "sp_model_kwargs": {},
60
  "spaces_between_special_tokens": false,
61
  "tokenizer_class": "LlamaTokenizer",
 
49
  "clean_up_tokenization_spaces": false,
50
  "eos_token": "</s>",
51
  "legacy": true,
 
52
  "model_max_length": 1000000000000000019884624838656,
 
53
  "pad_token": "<pad>",
54
+ "processor_class": "LlavaProcessor",
 
 
55
  "sp_model_kwargs": {},
56
  "spaces_between_special_tokens": false,
57
  "tokenizer_class": "LlamaTokenizer",