yueqingyou commited on
Commit
74278c9
1 Parent(s): 5a477c4

Add BioQwen 1.8B q4f16_1 model weights

Browse files
added_tokens.json ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ {
2
+ "<|endoftext|>": 151643,
3
+ "<|im_end|>": 151645,
4
+ "<|im_start|>": 151644,
5
+ "<|padoftext|>": 151646
6
+ }
merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
mlc-chat-config.json ADDED
@@ -0,0 +1,79 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "version": "0.1.0",
3
+ "model_type": "qwen2",
4
+ "quantization": "q4f16_1",
5
+ "model_config": {
6
+ "hidden_act": "silu",
7
+ "hidden_size": 2048,
8
+ "intermediate_size": 5504,
9
+ "num_attention_heads": 16,
10
+ "num_hidden_layers": 24,
11
+ "num_key_value_heads": 16,
12
+ "rms_norm_eps": 1e-06,
13
+ "rope_theta": 1000000.0,
14
+ "vocab_size": 151936,
15
+ "tie_word_embeddings": false,
16
+ "context_window_size": 512,
17
+ "prefill_chunk_size": 512,
18
+ "tensor_parallel_shards": 1,
19
+ "head_dim": 128,
20
+ "dtype": "float32",
21
+ "max_batch_size": 80
22
+ },
23
+ "vocab_size": 151936,
24
+ "context_window_size": 512,
25
+ "sliding_window_size": -1,
26
+ "prefill_chunk_size": 512,
27
+ "attention_sink_size": -1,
28
+ "tensor_parallel_shards": 1,
29
+ "temperature": 1.0,
30
+ "presence_penalty": 0.0,
31
+ "frequency_penalty": 0.0,
32
+ "repetition_penalty": 1.0,
33
+ "top_p": 1.0,
34
+ "tokenizer_files": [
35
+ "tokenizer.json",
36
+ "vocab.json",
37
+ "merges.txt",
38
+ "added_tokens.json",
39
+ "tokenizer_config.json"
40
+ ],
41
+ "tokenizer_info": {
42
+ "token_postproc_method": "byte_level",
43
+ "prepend_space_in_encode": false,
44
+ "strip_space_in_decode": false
45
+ },
46
+ "conv_template": {
47
+ "name": "chatml",
48
+ "system_template": "<|im_start|>system\n{system_message}<|im_end|>\n",
49
+ "system_message": "You are BIO-QWEN, an advanced AI specializing in the field of biology.",
50
+ "system_prefix_token_ids": null,
51
+ "add_role_after_system_message": true,
52
+ "roles": {
53
+ "user": "<|im_start|>user",
54
+ "assistant": "<|im_start|>assistant"
55
+ },
56
+ "role_templates": {
57
+ "user": "{user_message}",
58
+ "assistant": "{assistant_message}",
59
+ "tool": "{tool_message}"
60
+ },
61
+ "messages": [],
62
+ "seps": [
63
+ "<|im_end|>\n"
64
+ ],
65
+ "role_content_sep": "\n",
66
+ "role_empty_sep": "\n",
67
+ "stop_str": [
68
+ "<|im_end|>"
69
+ ],
70
+ "stop_token_ids": [
71
+ 151645
72
+ ],
73
+ "function_string": "",
74
+ "use_function_calling": false
75
+ },
76
+ "pad_token_id": 151646,
77
+ "bos_token_id": 0,
78
+ "eos_token_id": 151643
79
+ }
ndarray-cache.json ADDED
The diff for this file is too large to render. See raw diff
 
params_shard_0.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:64f9e7d0028281341273241591c30f67e566f7ebc5915df582f87f94ca9ef33f
3
+ size 155582464
params_shard_1.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2e3fa1d553159b1659f826a7a12bd7b4720f9e42a003c08282f415cacc1575a5
3
+ size 155582464
params_shard_10.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1114e443c1f8dbb27a107c831efe6bc5011e416f09f965cfb9b94f3673650b12
3
+ size 28479488
params_shard_11.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2db6da0ad6d929d1e8d1f5a975e1bc0002997be8d9ee008391934d75075cfbca
3
+ size 28479488
params_shard_12.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b202eecab5835ee51ca00429349687c213feeba143214a4a49fb6308766123b7
3
+ size 28479488
params_shard_13.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fc1b582f88d9994784bab9f7981b3ab4f1b99df9232255585c58addfd81f8b27
3
+ size 28479488
params_shard_14.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f54ef29bdc054ec3474f5a3bb23e8886e555a572c53dcfa1c937473b7996d731
3
+ size 28479488
params_shard_15.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8049cb82b893bfee52ff30827a7ecac33c4ec6651ef834cb331baf312138c4ab
3
+ size 28479488
params_shard_16.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:675bfde34f5cfa2e68d0eaf6fa02cfcd4cc4fc9274d42c8b80b582fbb616a4f9
3
+ size 28479488
params_shard_17.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4ee231092efadc5ef7622a5b034df73bfd32467b13abb9bb73160227cc3852fb
3
+ size 28479488
params_shard_18.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3e408b9e5fdc05a76ececd9214f1b462d75b4a666d579ed143fc762665130d7c
3
+ size 28479488
params_shard_19.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c6d163af146ef6ac2a455129058818e87f1b373454ee692d06b5a54838f71ea4
3
+ size 28479488
params_shard_2.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:242534ec48c8ef25f543c5595fa1cd0f66e9c4e31c523e897c403aed881251b8
3
+ size 19447808
params_shard_20.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0e7d3151e49f3d2f0052526928b2bda0c86728c7217c7b2204d05fb309237114
3
+ size 28479488
params_shard_21.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:39807e3c83da950374707eb5bc79e31c4a9220d8c667bd9c44d6eca5a234a07c
3
+ size 28479488
params_shard_22.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bd317c98ea3c61224f5d214415952643e3110fb54ad05589a9df11bf828f91e5
3
+ size 28479488
params_shard_23.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:76ecb62ff202a7d836bda3ce012130f488b0a2b9b37610663f673967e385e134
3
+ size 28479488
params_shard_24.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:037ce69a325ac70e99f409b7f085fe4d31bc3f08155b7fbbeb68c14db3fbba1b
3
+ size 28479488
params_shard_25.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:38765a2d784bddd2fadd12cf694c0f1472b24c21ae2568e1685b7cd8f540ff4a
3
+ size 28479488
params_shard_26.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:204c715fc677db88581788cf672173cd9dd8152162dc034db942a440d2ac4970
3
+ size 28479488
params_shard_27.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:928695ec7625da753fc23bd4081d29326cea9fa4bebe9d31b396945e7c4214e5
3
+ size 22138880
params_shard_3.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bf89c1653a9a244eb4fafd9a4ee89a8190f47e4fd8d929db44a5659137433755
3
+ size 25792512
params_shard_4.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cc2353d9de0dae97e5237eb4b0a9cddf27905ebb83bda88a0593c665ab55b58c
3
+ size 28479488
params_shard_5.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7da04422dc20a512c8746835438f72d7512a6aeebf86dabca7e730fa6157a3a7
3
+ size 28479488
params_shard_6.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1d0e9b0bae661265814caabd7d8b429257beaad0d3d7066c4be7c034a4169c3c
3
+ size 28479488
params_shard_7.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fa02ca0c83eb2ee2b7f435a3f2b1801b9b1695fb7f97495edf84f62abefc6ed4
3
+ size 28479488
params_shard_8.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b13015b65daf989d628d6f43f59a2482b7cfbc8ee0f70077a3c395c4148552ce
3
+ size 28479488
params_shard_9.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:31504f9beef6d02faf44c90d33f46f8e80f901c4e86e55fea0eea3a34b18654b
3
+ size 28479488
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json ADDED
@@ -0,0 +1,51 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_prefix_space": false,
3
+ "added_tokens_decoder": {
4
+ "151643": {
5
+ "content": "<|endoftext|>",
6
+ "lstrip": false,
7
+ "normalized": false,
8
+ "rstrip": false,
9
+ "single_word": false,
10
+ "special": true
11
+ },
12
+ "151644": {
13
+ "content": "<|im_start|>",
14
+ "lstrip": false,
15
+ "normalized": false,
16
+ "rstrip": false,
17
+ "single_word": false,
18
+ "special": true
19
+ },
20
+ "151645": {
21
+ "content": "<|im_end|>",
22
+ "lstrip": false,
23
+ "normalized": false,
24
+ "rstrip": false,
25
+ "single_word": false,
26
+ "special": true
27
+ },
28
+ "151646": {
29
+ "content": "<|padoftext|>",
30
+ "lstrip": false,
31
+ "normalized": false,
32
+ "rstrip": false,
33
+ "single_word": false,
34
+ "special": true
35
+ }
36
+ },
37
+ "additional_special_tokens": [
38
+ "<|im_start|>",
39
+ "<|im_end|>"
40
+ ],
41
+ "bos_token": null,
42
+ "chat_template": "{% for message in messages %}{% if loop.first and messages[0]['role'] != 'system' %}{{ '<|im_start|>system\nYou are a helpful assistant<|im_end|>\n' }}{% endif %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}",
43
+ "clean_up_tokenization_spaces": false,
44
+ "eos_token": "<|endoftext|>",
45
+ "errors": "replace",
46
+ "model_max_length": 32768,
47
+ "pad_token": "<|padoftext|>",
48
+ "split_special_tokens": false,
49
+ "tokenizer_class": "Qwen2Tokenizer",
50
+ "unk_token": null
51
+ }
vocab.json ADDED
The diff for this file is too large to render. See raw diff