Upload folder using huggingface_hub
Browse files- .gitattributes +20 -0
- README.md +116 -0
- config.json +28 -0
- imat-bf16-gmerged.dat +3 -0
- special_tokens_map.json +30 -0
- tokenizer_config.json +53 -0
- yi-1.5-6b-chat-bf16.gguf +3 -0
- yi-1.5-6b-chat-imat-IQ1_S.gguf +3 -0
- yi-1.5-6b-chat-imat-IQ2_M.gguf +3 -0
- yi-1.5-6b-chat-imat-IQ2_S.gguf +3 -0
- yi-1.5-6b-chat-imat-IQ2_XS.gguf +3 -0
- yi-1.5-6b-chat-imat-IQ2_XXS.gguf +3 -0
- yi-1.5-6b-chat-imat-IQ3_M.gguf +3 -0
- yi-1.5-6b-chat-imat-IQ3_S.gguf +3 -0
- yi-1.5-6b-chat-imat-IQ3_XS.gguf +3 -0
- yi-1.5-6b-chat-imat-IQ3_XXS.gguf +3 -0
- yi-1.5-6b-chat-imat-IQ4_NL.gguf +3 -0
- yi-1.5-6b-chat-imat-IQ4_XS.gguf +3 -0
- yi-1.5-6b-chat-imat-Q4_0.gguf +3 -0
- yi-1.5-6b-chat-imat-Q4_K_M.gguf +3 -0
- yi-1.5-6b-chat-imat-Q4_K_S.gguf +3 -0
- yi-1.5-6b-chat-imat-Q5_K_M.gguf +3 -0
- yi-1.5-6b-chat-imat-Q5_K_S.gguf +3 -0
- yi-1.5-6b-chat-imat-Q6_K.gguf +3 -0
- yi-1.5-6b-chat-imat-Q8_0.gguf +3 -0
.gitattributes
CHANGED
@@ -33,3 +33,23 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
|
33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
36 |
+
imat-bf16-gmerged.dat filter=lfs diff=lfs merge=lfs -text
|
37 |
+
yi-1.5-6b-chat-bf16.gguf filter=lfs diff=lfs merge=lfs -text
|
38 |
+
yi-1.5-6b-chat-imat-IQ1_S.gguf filter=lfs diff=lfs merge=lfs -text
|
39 |
+
yi-1.5-6b-chat-imat-IQ2_M.gguf filter=lfs diff=lfs merge=lfs -text
|
40 |
+
yi-1.5-6b-chat-imat-IQ2_S.gguf filter=lfs diff=lfs merge=lfs -text
|
41 |
+
yi-1.5-6b-chat-imat-IQ2_XS.gguf filter=lfs diff=lfs merge=lfs -text
|
42 |
+
yi-1.5-6b-chat-imat-IQ2_XXS.gguf filter=lfs diff=lfs merge=lfs -text
|
43 |
+
yi-1.5-6b-chat-imat-IQ3_M.gguf filter=lfs diff=lfs merge=lfs -text
|
44 |
+
yi-1.5-6b-chat-imat-IQ3_S.gguf filter=lfs diff=lfs merge=lfs -text
|
45 |
+
yi-1.5-6b-chat-imat-IQ3_XS.gguf filter=lfs diff=lfs merge=lfs -text
|
46 |
+
yi-1.5-6b-chat-imat-IQ3_XXS.gguf filter=lfs diff=lfs merge=lfs -text
|
47 |
+
yi-1.5-6b-chat-imat-IQ4_NL.gguf filter=lfs diff=lfs merge=lfs -text
|
48 |
+
yi-1.5-6b-chat-imat-IQ4_XS.gguf filter=lfs diff=lfs merge=lfs -text
|
49 |
+
yi-1.5-6b-chat-imat-Q4_0.gguf filter=lfs diff=lfs merge=lfs -text
|
50 |
+
yi-1.5-6b-chat-imat-Q4_K_M.gguf filter=lfs diff=lfs merge=lfs -text
|
51 |
+
yi-1.5-6b-chat-imat-Q4_K_S.gguf filter=lfs diff=lfs merge=lfs -text
|
52 |
+
yi-1.5-6b-chat-imat-Q5_K_M.gguf filter=lfs diff=lfs merge=lfs -text
|
53 |
+
yi-1.5-6b-chat-imat-Q5_K_S.gguf filter=lfs diff=lfs merge=lfs -text
|
54 |
+
yi-1.5-6b-chat-imat-Q6_K.gguf filter=lfs diff=lfs merge=lfs -text
|
55 |
+
yi-1.5-6b-chat-imat-Q8_0.gguf filter=lfs diff=lfs merge=lfs -text
|
README.md
ADDED
@@ -0,0 +1,116 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
---
|
2 |
+
license: apache-2.0
|
3 |
+
pipeline_tag: text-generation
|
4 |
+
base_model: 01-ai/Yi-1.5-6B-Chat
|
5 |
+
tags:
|
6 |
+
- yi
|
7 |
+
- 01-ai
|
8 |
+
- instruct
|
9 |
+
- finetune
|
10 |
+
- chatml
|
11 |
+
- gguf
|
12 |
+
- imatrix
|
13 |
+
- importance matrix
|
14 |
+
model-index:
|
15 |
+
- name: 01-ai/Yi-1.5-6B-Chat-iMat-GGUF
|
16 |
+
results: []
|
17 |
+
---
|
18 |
+
|
19 |
+
# Quant Infos
|
20 |
+
|
21 |
+
- quants done with an importance matrix for improved quantization loss
|
22 |
+
- gguf & imatrix generated from bf16 for "optimal" accuracy loss (some say this is snake oil, but it can't hurt)
|
23 |
+
- Wide coverage of different gguf quant types from Q\_8\_0 down to IQ1\_S
|
24 |
+
- Quantized with [llama.cpp](https://github.com/ggerganov/llama.cpp) commit [dc685be46622a8fabfd57cfa804237c8f15679b8](https://github.com/ggerganov/llama.cpp/commit/dc685be46622a8fabfd57cfa804237c8f15679b8) (master as of 2024-05-12)
|
25 |
+
- Imatrix generated with [this](https://github.com/ggerganov/llama.cpp/discussions/5263#discussioncomment-8395384) multi-purpose dataset.
|
26 |
+
```
|
27 |
+
./imatrix -c 512 -m $model_name-f16.gguf -f $llama_cpp_path/groups_merged.txt -o $out_path/imat-f16-gmerged.dat
|
28 |
+
```
|
29 |
+
|
30 |
+
# Original Model Card:
|
31 |
+
|
32 |
+
<div align="center">
|
33 |
+
|
34 |
+
<picture>
|
35 |
+
<img src="https://raw.githubusercontent.com/01-ai/Yi/main/assets/img/Yi_logo_icon_light.svg" width="150px">
|
36 |
+
</picture>
|
37 |
+
|
38 |
+
</div>
|
39 |
+
|
40 |
+
<p align="center">
|
41 |
+
<a href="https://github.com/01-ai">🐙 GitHub</a> •
|
42 |
+
<a href="https://discord.gg/hYUwWddeAu">👾 Discord</a> •
|
43 |
+
<a href="https://twitter.com/01ai_yi">🐤 Twitter</a> •
|
44 |
+
<a href="https://github.com/01-ai/Yi-1.5/issues/2">💬 WeChat</a>
|
45 |
+
<br/>
|
46 |
+
<a href="https://arxiv.org/abs/2403.04652">📝 Paper</a> •
|
47 |
+
<a href="https://github.com/01-ai/Yi/tree/main?tab=readme-ov-file#faq">🙌 FAQ</a> •
|
48 |
+
<a href="https://github.com/01-ai/Yi/tree/main?tab=readme-ov-file#learning-hub">📗 Learning Hub</a>
|
49 |
+
</p>
|
50 |
+
|
51 |
+
# Intro
|
52 |
+
|
53 |
+
Yi-1.5 is an upgraded version of Yi. It is continuously pre-trained on Yi with a high-quality corpus of 500B tokens and fine-tuned on 3M diverse fine-tuning samples.
|
54 |
+
|
55 |
+
Compared with Yi, Yi-1.5 delivers stronger performance in coding, math, reasoning, and instruction-following capability, while still maintaining excellent capabilities in language understanding, commonsense reasoning, and reading comprehension.
|
56 |
+
|
57 |
+
<div align="center">
|
58 |
+
|
59 |
+
Model | Context Length | Pre-trained Tokens
|
60 |
+
| :------------: | :------------: | :------------: |
|
61 |
+
| Yi-1.5 | 4K | 3.6T
|
62 |
+
|
63 |
+
</div>
|
64 |
+
|
65 |
+
# Models
|
66 |
+
|
67 |
+
- Chat models
|
68 |
+
|
69 |
+
<div align="center">
|
70 |
+
|
71 |
+
| Name | Download |
|
72 |
+
| --------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
73 |
+
| Yi-1.5-34B-Chat | • [🤗 Hugging Face](https://huggingface.co/collections/01-ai/yi-15-2024-05-663f3ecab5f815a3eaca7ca8) • [🤖 ModelScope](https://www.modelscope.cn/organization/01ai) |
|
74 |
+
| Yi-1.5-9B-Chat | • [🤗 Hugging Face](https://huggingface.co/collections/01-ai/yi-15-2024-05-663f3ecab5f815a3eaca7ca8) • [🤖 ModelScope](https://www.modelscope.cn/organization/01ai) |
|
75 |
+
| Yi-1.5-6B-Chat | • [🤗 Hugging Face](https://huggingface.co/collections/01-ai/yi-15-2024-05-663f3ecab5f815a3eaca7ca8) • [🤖 ModelScope](https://www.modelscope.cn/organization/01ai) |
|
76 |
+
|
77 |
+
</div>
|
78 |
+
|
79 |
+
- Base models
|
80 |
+
|
81 |
+
<div align="center">
|
82 |
+
|
83 |
+
| Name | Download |
|
84 |
+
| ---------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
85 |
+
| Yi-1.5-34B | • [🤗 Hugging Face](https://huggingface.co/collections/01-ai/yi-15-2024-05-663f3ecab5f815a3eaca7ca8) • [🤖 ModelScope](https://www.modelscope.cn/organization/01ai) |
|
86 |
+
| Yi-1.5-9B | • [🤗 Hugging Face](https://huggingface.co/collections/01-ai/yi-15-2024-05-663f3ecab5f815a3eaca7ca8) • [🤖 ModelScope](https://www.modelscope.cn/organization/01ai) |
|
87 |
+
| Yi-1.5-6B | • [🤗 Hugging Face](https://huggingface.co/collections/01-ai/yi-15-2024-05-663f3ecab5f815a3eaca7ca8) • [🤖 ModelScope](https://www.modelscope.cn/organization/01ai) |
|
88 |
+
|
89 |
+
</div>
|
90 |
+
|
91 |
+
# Benchmarks
|
92 |
+
|
93 |
+
- Chat models
|
94 |
+
|
95 |
+
Yi-1.5-34B-Chat is on par with or excels beyond larger models in most benchmarks.
|
96 |
+
|
97 |
+
![image/png](https://cdn-uploads.huggingface.co/production/uploads/656d9adce8bf55919aca7c3f/KcsJ9Oc1VnEmfCDEJc5cd.png)
|
98 |
+
|
99 |
+
Yi-1.5-9B-Chat is the top performer among similarly sized open-source models.
|
100 |
+
|
101 |
+
![image/png](https://cdn-uploads.huggingface.co/production/uploads/656d9adce8bf55919aca7c3f/xf6pLg5jqRCwjlh6m3t6_.png)
|
102 |
+
|
103 |
+
- Base models
|
104 |
+
|
105 |
+
Yi-1.5-34B is on par with or excels beyond larger models in some benchmarks.
|
106 |
+
|
107 |
+
![image/png](https://cdn-uploads.huggingface.co/production/uploads/656d9adce8bf55919aca7c3f/BwU7QM-03dZvZzwdIE1xY.png)
|
108 |
+
|
109 |
+
Yi-1.5-9B is the top performer among similarly sized open-source models.
|
110 |
+
|
111 |
+
![image/png](https://cdn-uploads.huggingface.co/production/uploads/656d9adce8bf55919aca7c3f/y-EYSYPT-3aWLJ0x8R94F.png)
|
112 |
+
|
113 |
+
# Quick Start
|
114 |
+
|
115 |
+
For getting up and running with Yi-1.5 models quickly, see [README](https://github.com/01-ai/Yi-1.5).
|
116 |
+
|
config.json
ADDED
@@ -0,0 +1,28 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"architectures": [
|
3 |
+
"LlamaForCausalLM"
|
4 |
+
],
|
5 |
+
"attention_bias": false,
|
6 |
+
"attention_dropout": 0.0,
|
7 |
+
"bos_token_id": 1,
|
8 |
+
"eos_token_id": 2,
|
9 |
+
"hidden_act": "silu",
|
10 |
+
"hidden_size": 4096,
|
11 |
+
"initializer_range": 0.02,
|
12 |
+
"intermediate_size": 11008,
|
13 |
+
"max_position_embeddings": 4096,
|
14 |
+
"model_type": "llama",
|
15 |
+
"num_attention_heads": 32,
|
16 |
+
"num_hidden_layers": 32,
|
17 |
+
"num_key_value_heads": 4,
|
18 |
+
"pad_token_id": 0,
|
19 |
+
"pretraining_tp": 1,
|
20 |
+
"rms_norm_eps": 1e-06,
|
21 |
+
"rope_scaling": null,
|
22 |
+
"rope_theta": 5000000.0,
|
23 |
+
"tie_word_embeddings": false,
|
24 |
+
"torch_dtype": "bfloat16",
|
25 |
+
"transformers_version": "4.40.0",
|
26 |
+
"use_cache": false,
|
27 |
+
"vocab_size": 64000
|
28 |
+
}
|
imat-bf16-gmerged.dat
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a3ad9c8d573ea4bddc0212e45e64f9738f996f4b089fdd0bfe2f7153e655c481
|
3 |
+
size 4562201
|
special_tokens_map.json
ADDED
@@ -0,0 +1,30 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"bos_token": {
|
3 |
+
"content": "<|startoftext|>",
|
4 |
+
"lstrip": false,
|
5 |
+
"normalized": true,
|
6 |
+
"rstrip": false,
|
7 |
+
"single_word": false
|
8 |
+
},
|
9 |
+
"eos_token": {
|
10 |
+
"content": "<|im_end|>",
|
11 |
+
"lstrip": false,
|
12 |
+
"normalized": false,
|
13 |
+
"rstrip": false,
|
14 |
+
"single_word": false
|
15 |
+
},
|
16 |
+
"pad_token": {
|
17 |
+
"content": "<unk>",
|
18 |
+
"lstrip": false,
|
19 |
+
"normalized": true,
|
20 |
+
"rstrip": false,
|
21 |
+
"single_word": false
|
22 |
+
},
|
23 |
+
"unk_token": {
|
24 |
+
"content": "<unk>",
|
25 |
+
"lstrip": false,
|
26 |
+
"normalized": true,
|
27 |
+
"rstrip": false,
|
28 |
+
"single_word": false
|
29 |
+
}
|
30 |
+
}
|
tokenizer_config.json
ADDED
@@ -0,0 +1,53 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"add_bos_token": false,
|
3 |
+
"add_eos_token": false,
|
4 |
+
"add_prefix_space": true,
|
5 |
+
"added_tokens_decoder": {
|
6 |
+
"0": {
|
7 |
+
"content": "<unk>",
|
8 |
+
"lstrip": false,
|
9 |
+
"normalized": true,
|
10 |
+
"rstrip": false,
|
11 |
+
"single_word": false,
|
12 |
+
"special": true
|
13 |
+
},
|
14 |
+
"1": {
|
15 |
+
"content": "<|startoftext|>",
|
16 |
+
"lstrip": false,
|
17 |
+
"normalized": true,
|
18 |
+
"rstrip": false,
|
19 |
+
"single_word": false,
|
20 |
+
"special": true
|
21 |
+
},
|
22 |
+
"2": {
|
23 |
+
"content": "<|endoftext|>",
|
24 |
+
"lstrip": false,
|
25 |
+
"normalized": true,
|
26 |
+
"rstrip": false,
|
27 |
+
"single_word": false,
|
28 |
+
"special": true
|
29 |
+
},
|
30 |
+
"7": {
|
31 |
+
"content": "<|im_end|>",
|
32 |
+
"lstrip": false,
|
33 |
+
"normalized": false,
|
34 |
+
"rstrip": false,
|
35 |
+
"single_word": false,
|
36 |
+
"special": true
|
37 |
+
}
|
38 |
+
},
|
39 |
+
"bos_token": "<|startoftext|>",
|
40 |
+
"chat_template": "{% if messages[0]['role'] == 'system' %}{% set system_message = messages[0]['content'] %}{% endif %}{% if system_message is defined %}{{ system_message }}{% endif %}{% for message in messages %}{% set content = message['content'] %}{% if message['role'] == 'user' %}{{ '<|im_start|>user\\n' + content + '<|im_end|>\\n<|im_start|>assistant\\n' }}{% elif message['role'] == 'assistant' %}{{ content + '<|im_end|>' + '\\n' }}{% endif %}{% endfor %}",
|
41 |
+
"clean_up_tokenization_spaces": false,
|
42 |
+
"eos_token": "<|im_end|>",
|
43 |
+
"legacy": true,
|
44 |
+
"model_max_length": 4096,
|
45 |
+
"pad_token": "<unk>",
|
46 |
+
"padding_side": "right",
|
47 |
+
"sp_model_kwargs": {},
|
48 |
+
"spaces_between_special_tokens": false,
|
49 |
+
"split_special_tokens": false,
|
50 |
+
"tokenizer_class": "LlamaTokenizer",
|
51 |
+
"unk_token": "<unk>",
|
52 |
+
"use_default_system_prompt": false
|
53 |
+
}
|
yi-1.5-6b-chat-bf16.gguf
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a79bd54ce0ab813fb54da80aa1ed5e1bf72405b3268c44553b099bba28b93dc4
|
3 |
+
size 12124098400
|
yi-1.5-6b-chat-imat-IQ1_S.gguf
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ece6443521972c39948ed1e2c4bbf9590387fc19d3337b84dd488431fe0aa555
|
3 |
+
size 1432293504
|
yi-1.5-6b-chat-imat-IQ2_M.gguf
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ab487dbbb99499027ce08ce124ddc722f95a2660fa975bd6da49a181f7853f8b
|
3 |
+
size 2163527808
|
yi-1.5-6b-chat-imat-IQ2_S.gguf
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:4c81cba1f56896b77fe8980208b76d30c39891ddc5a3f41eee4ebaddb79a791a
|
3 |
+
size 2015023232
|
yi-1.5-6b-chat-imat-IQ2_XS.gguf
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:7ed5956bcfd4987230d745ff14c7af7e5f29a41e5a96be621b390ddb86ce2d34
|
3 |
+
size 1894584448
|
yi-1.5-6b-chat-imat-IQ2_XXS.gguf
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:4a0ae9ca0459383568db95d8abf97ef684409c4bba499d165af58191fe28d958
|
3 |
+
size 1729302656
|
yi-1.5-6b-chat-imat-IQ3_M.gguf
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:2920a4e063d81b21186687a4145044e1e07222b00ba85ac2a87367257d1e9cec
|
3 |
+
size 2813366400
|
yi-1.5-6b-chat-imat-IQ3_S.gguf
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:eeedb16adf9ff1d83fa74edf16770be98a395115f9c3a69d423b50638ec650a3
|
3 |
+
size 2718109824
|
yi-1.5-6b-chat-imat-IQ3_XS.gguf
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:17998e899011a82472ae9b31e292d516f5edc7f8eed2d0515e71f75b8449e06f
|
3 |
+
size 2588348544
|
yi-1.5-6b-chat-imat-IQ3_XXS.gguf
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:0dc900a4495712106cfb56caa94a504da0e2150cfba522c63f5d47384dbdbff4
|
3 |
+
size 2414334080
|
yi-1.5-6b-chat-imat-IQ4_NL.gguf
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:4c7d6f46b1b3dc9c010c04474fced5d43e4861e7998d0b27946d4c7baf3fa98b
|
3 |
+
size 3487715456
|
yi-1.5-6b-chat-imat-IQ4_XS.gguf
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:cc7add1995789254805b8c3519bb46dc37f390787dd6ecdd99f4d73b9c575460
|
3 |
+
size 3308605568
|
yi-1.5-6b-chat-imat-Q4_0.gguf
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:7ebb26c49da7b96c2b2eee1f382b14877d7885b107e8bd31904f6b476eb14ce3
|
3 |
+
size 3490599040
|
yi-1.5-6b-chat-imat-Q4_K_M.gguf
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:f0e4f53cd01cd35db4462e9acc49a35983e5d4467ca0caad39bfb21e2221e9de
|
3 |
+
size 3673968768
|
yi-1.5-6b-chat-imat-Q4_K_S.gguf
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:7c7b6dd3005ae588b3b1a5825c8930d24e572f8eda048f63aef91d97ac842c6f
|
3 |
+
size 3502919808
|
yi-1.5-6b-chat-imat-Q5_K_M.gguf
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:e0591ea7fbbfab5342c7bce4f7e139482aa395d21fd5071564e2a650cb80add3
|
3 |
+
size 4304425088
|
yi-1.5-6b-chat-imat-Q5_K_S.gguf
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:6f99b9c308423479f62abdb426b7c594312537784b917bb0a45ab9ac92261368
|
3 |
+
size 4204155008
|
yi-1.5-6b-chat-imat-Q6_K.gguf
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9eb711ce9a456bfb3efa4adc04262f83edb5e769ca2fde943c74f96f8f51671d
|
3 |
+
size 4974284928
|
yi-1.5-6b-chat-imat-Q8_0.gguf
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:bd51ddda04f56b83e4807c88746552f3d1aad9770b2339483be42906a51b9eae
|
3 |
+
size 6442127488
|