iyanello commited on
Commit
527d491
1 Parent(s): e7b9f2e

Upload 29 files

Browse files
params_shard_100.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4e522b9eb0e4e0575d450c699ead06fe332adfa916cfb2bcc1496cec91b7b740
3
+ size 29360128
params_shard_101.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d8ada6666c14f849defa4140831a486dbc877494d0842e83a15458d278a203f6
3
+ size 22036480
params_shard_102.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:26466ed1a1ee1fe7469fdb9e581b36d218890fcf373bb043cfe947004d6c604f
3
+ size 58720256
params_shard_103.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ccda2584e4f7b728215c6e7ffc47dfd2f24bd6b1dbf66853b1947035a6d04042
3
+ size 29360128
params_shard_104.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:14dd05b27fbb4ccaa29e6287e9e3c7708caf4e1af04d67b841b9dd1d1484a085
3
+ size 30932992
params_shard_105.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:adad236714c78c52d9cbb7b12e70eba8750a9259028cd3a63cd13a553cb5b1ee
3
+ size 65540096
params_shard_106.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:379bd47a4034299be6ed18c6c16883e0b257fdc9f39dda23a475224ed7bd9c06
3
+ size 12935680
params_shard_81.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6c7c706d8f43cfaaeeec6335f86cf524b5d0dc595323e2c4924f54af30aa032d
3
+ size 29360128
params_shard_82.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fe37fc4189ba0e92c9cba650aac1f31f199568bd2937a201ef04492107be694c
3
+ size 25182208
params_shard_83.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:314e53ea53d80d054fc7b03d9f49cde999c62d45b225134cf60aa6191c1292ad
3
+ size 58720256
params_shard_84.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a79cce13f354dc7697dc2e58f3b4314145c61de38646450a1deefa157e241163
3
+ size 29360128
params_shard_85.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:aef5db9d6fdb35c4a4e07ccf29cfb15eba4bedd5870a635eb573a6b4a06877d7
3
+ size 33046528
params_shard_86.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a517efc8981276d4c0a0dca33d11f1d4d48ec2f8de363385de5a9f76337941af
3
+ size 58720256
params_shard_87.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b43a9df78bb6f40e587f35e2ecbbc8806ad995cad33a3fea39b4a86f6d036da2
3
+ size 29360128
params_shard_88.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c1af6740fa8ebc029776bab1d87f802e7b737937b3b8b985ad46f63350a6b2a2
3
+ size 22036480
params_shard_89.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:53bc74e34c7073d78fcc7e14f582ad0c9109692e35b342698702b2f9737263ec
3
+ size 58720256
params_shard_90.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1c6d33ba4d8230d8b6da8e799b8dcdf9cc4d04ca024a9f99fa8e989ee34d2071
3
+ size 29360128
params_shard_91.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4a4187e4cc98cc1b3bb0717aeb8b94bb75fb47f49ed1450a7e800876fdfb238c
3
+ size 30932992
params_shard_92.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c897ac3858f080366fd05214c1723ebef9f4967297aae30b2e86da18499b0742
3
+ size 58720256
params_shard_93.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ae06f1bd6428d09e04ee71d56aa253c5651fa7740653d508eaa6693326d6c488
3
+ size 27279360
params_shard_94.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e98eb3fe865d427ebdc2a06ff3bc23ee71fd61800d56ff4639b0bc131347df65
3
+ size 29360128
params_shard_95.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7af5440008ba7ffd9b9b5033cde357b5eff100e7a44077683dfbd3c4c71bc53d
3
+ size 25182208
params_shard_96.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7f7c47090df2336785c5ab80b85836b29383c3f64c4c8ab7bc927cc99fcc436e
3
+ size 58720256
params_shard_97.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:76d6aa09096b7b6c1a1fd6eb8e6b0f5d8bd021ef687b33c1727c04037abe5f5d
3
+ size 29360128
params_shard_98.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:57acc7740dc24b35a8467f4c9cba7262c205ee1863e8d628af519e553c9f6f47
3
+ size 33046528
params_shard_99.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3ab10f975358b95bae1a835e865094f7db27157559f0f5ffdb7624fd17180717
3
+ size 58720256
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dadfd56d766715c61d2ef780a525ab43b8e6da4de6865bda3d95fdef5e134055
3
+ size 493443
tokenizer_config.json ADDED
@@ -0,0 +1,64 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": true,
3
+ "add_eos_token": false,
4
+ "added_tokens_decoder": {
5
+ "0": {
6
+ "content": "<unk>",
7
+ "lstrip": false,
8
+ "normalized": false,
9
+ "rstrip": false,
10
+ "single_word": false,
11
+ "special": true
12
+ },
13
+ "1": {
14
+ "content": "<s>",
15
+ "lstrip": false,
16
+ "normalized": false,
17
+ "rstrip": false,
18
+ "single_word": false,
19
+ "special": true
20
+ },
21
+ "2": {
22
+ "content": "</s>",
23
+ "lstrip": false,
24
+ "normalized": false,
25
+ "rstrip": false,
26
+ "single_word": false,
27
+ "special": true
28
+ },
29
+ "32000": {
30
+ "content": "<|end_of_turn|>",
31
+ "lstrip": false,
32
+ "normalized": false,
33
+ "rstrip": false,
34
+ "single_word": false,
35
+ "special": false
36
+ },
37
+ "32001": {
38
+ "content": "<|pad_0|>",
39
+ "lstrip": false,
40
+ "normalized": false,
41
+ "rstrip": false,
42
+ "single_word": false,
43
+ "special": false
44
+ }
45
+ },
46
+ "additional_special_tokens": [
47
+ "<|end_of_turn|>",
48
+ "<|pad_0|>"
49
+ ],
50
+ "bos_token": "<s>",
51
+ "chat_template": "{{ bos_token }}{% for message in messages %}{{ 'GPT4 Correct ' + message['role'].title() + ': ' + message['content'] + '<|end_of_turn|>'}}{% endfor %}{% if add_generation_prompt %}{{ 'GPT4 Correct Assistant:' }}{% endif %}",
52
+ "clean_up_tokenization_spaces": false,
53
+ "eos_token": "</s>",
54
+ "legacy": true,
55
+ "model_max_length": 1000000000000000019884624838656,
56
+ "pad_token": "</s>",
57
+ "sp_model_kwargs": {},
58
+ "spaces_between_special_tokens": false,
59
+ "tokenizer_class": "LlamaTokenizer",
60
+ "trust_remote_code": false,
61
+ "unk_token": "<unk>",
62
+ "use_default_system_prompt": true,
63
+ "use_fast": true
64
+ }