IlyaGusev commited on
Commit
bc54c81
1 Parent(s): c588356

v4: ChatML -> LLama3 template

Browse files
model-00001-of-00004.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:23d40b2624d779257ec389b441c65d7ba5365bb86c0b6d5d3341cbf5dbc2cb25
3
  size 4976698672
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d8c5b217a8ccef05a066f359ae73341bcff727022c428578e8ef7f85f9c56f7d
3
  size 4976698672
model-00002-of-00004.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:5d773178b71d3c0e544ad4a971e754a5a9eb6f884e882e80cdf9497287fc4ff8
3
  size 4999802720
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4115f9788f398b4ab09e463c9265add7f37ce8fd985fc809280dad614b71ae2e
3
  size 4999802720
model-00003-of-00004.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:47c2fbf539155e5796e525249ed99744f8f1453970313e6078b6becf20aa085a
3
  size 4915916176
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b2e4d243f89d797559130d0a377bfa97ea278711074e5113044a8f8da8f2503e
3
  size 4915916176
model-00004-of-00004.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:47d7b7ef31a88c44cc3fd39c43f91b4fe87a7a492f1e3ecee72dbefa4e479f49
3
  size 1168138808
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6c91b732fd6453ccc08c7de471705fabbfbb0b69801bff9b2d96979a44db89a1
3
  size 1168138808
special_tokens_map.json CHANGED
@@ -1,24 +1,20 @@
1
  {
2
- "additional_special_tokens": [
3
- "<|im_start|>",
4
- "<|im_end|>"
5
- ],
6
  "bos_token": {
7
- "content": "<|im_start|>",
8
  "lstrip": false,
9
  "normalized": false,
10
  "rstrip": false,
11
  "single_word": false
12
  },
13
  "eos_token": {
14
- "content": "<|im_end|>",
15
  "lstrip": false,
16
  "normalized": false,
17
  "rstrip": false,
18
  "single_word": false
19
  },
20
  "pad_token": {
21
- "content": "<|im_start|>",
22
  "lstrip": false,
23
  "normalized": false,
24
  "rstrip": false,
 
1
  {
 
 
 
 
2
  "bos_token": {
3
+ "content": "<|begin_of_text|>",
4
  "lstrip": false,
5
  "normalized": false,
6
  "rstrip": false,
7
  "single_word": false
8
  },
9
  "eos_token": {
10
+ "content": "<|eot_id|>",
11
  "lstrip": false,
12
  "normalized": false,
13
  "rstrip": false,
14
  "single_word": false
15
  },
16
  "pad_token": {
17
+ "content": "<|begin_of_text|>",
18
  "lstrip": false,
19
  "normalized": false,
20
  "rstrip": false,
tokenizer.json CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:8e7cad82642921f006bd0a63b22c613ccbfcf663e81a42da76a476e69bafc8df
3
- size 9084486
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e134af98b985517b4f068e3755ae90d4e9cd2d45d328325dc503f1c6b2d06cc7
3
+ size 9085698
tokenizer_config.json CHANGED
@@ -1,7 +1,7 @@
1
  {
2
  "added_tokens_decoder": {
3
  "128000": {
4
- "content": "<|im_start|>",
5
  "lstrip": false,
6
  "normalized": false,
7
  "rstrip": false,
@@ -73,7 +73,7 @@
73
  "special": true
74
  },
75
  "128009": {
76
- "content": "<|im_end|>",
77
  "lstrip": false,
78
  "normalized": false,
79
  "rstrip": false,
@@ -2049,20 +2049,16 @@
2049
  "special": true
2050
  }
2051
  },
2052
- "additional_special_tokens": [
2053
- "<|im_start|>",
2054
- "<|im_end|>"
2055
- ],
2056
- "bos_token": "<|im_start|>",
2057
- "chat_template": "{% if messages[0]['role'] == 'system' %}{% set loop_messages = messages[1:] %}{% set system_message = messages[0]['content'] %}{% else %}{% set loop_messages = messages %}{% set system_message = 'Ты — Сайга, русскоязычный автоматический ассистент. Ты разговариваешь с людьми и помогаешь им.' %}{% endif %}{% if not add_generation_prompt is defined %}{% set add_generation_prompt = false %}{% endif %}{% for message in loop_messages %}{% if loop.index0 == 0 %}{{'<|im_start|>system\n' + system_message + '<|im_end|>\n'}}{% endif %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}",
2058
  "clean_up_tokenization_spaces": true,
2059
- "eos_token": "<|im_end|>",
2060
  "model_input_names": [
2061
  "input_ids",
2062
  "attention_mask"
2063
  ],
2064
  "model_max_length": 8192,
2065
- "pad_token": "<|im_start|>",
2066
  "padding_side": "left",
2067
  "tokenizer_class": "PreTrainedTokenizerFast"
2068
  }
 
1
  {
2
  "added_tokens_decoder": {
3
  "128000": {
4
+ "content": "<|begin_of_text|>",
5
  "lstrip": false,
6
  "normalized": false,
7
  "rstrip": false,
 
73
  "special": true
74
  },
75
  "128009": {
76
+ "content": "<|eot_id|>",
77
  "lstrip": false,
78
  "normalized": false,
79
  "rstrip": false,
 
2049
  "special": true
2050
  }
2051
  },
2052
+ "bos_token": "<|begin_of_text|>",
2053
+ "chat_template": "{% set loop_messages = messages %}{% for message in loop_messages %}{% set content = '<|start_header_id|>' + message['role'] + '<|end_header_id|>\n\n'+ message['content'] | trim + '<|eot_id|>' %}{% if loop.index0 == 0 %}{% set content = bos_token + content %}{% endif %}{{ content }}{% endfor %}{% if add_generation_prompt %}{{ '<|start_header_id|>assistant<|end_header_id|>\n\n' }}{% endif %}",
 
 
 
 
2054
  "clean_up_tokenization_spaces": true,
2055
+ "eos_token": "<|eot_id|>",
2056
  "model_input_names": [
2057
  "input_ids",
2058
  "attention_mask"
2059
  ],
2060
  "model_max_length": 8192,
2061
+ "pad_token": "<|begin_of_text|>",
2062
  "padding_side": "left",
2063
  "tokenizer_class": "PreTrainedTokenizerFast"
2064
  }