RikkiXu commited on
Commit
94815fd
1 Parent(s): 331a206

Training in progress, step 100

Browse files
config.json CHANGED
@@ -1,16 +1,16 @@
1
  {
2
- "_name_or_path": "/mnt/bn/xuruijie-llm/checkpoints/new_world/ultral-round1",
3
  "architectures": [
4
  "MistralForCausalLM"
5
  ],
6
  "attention_dropout": 0.0,
7
  "bos_token_id": 1,
8
- "eos_token_id": 32000,
9
  "hidden_act": "silu",
10
  "hidden_size": 4096,
11
  "initializer_range": 0.02,
12
  "intermediate_size": 14336,
13
- "max_position_embeddings": 8192,
14
  "model_type": "mistral",
15
  "num_attention_heads": 32,
16
  "num_hidden_layers": 32,
@@ -20,7 +20,7 @@
20
  "sliding_window": 4096,
21
  "tie_word_embeddings": false,
22
  "torch_dtype": "bfloat16",
23
- "transformers_version": "4.40.2",
24
  "use_cache": false,
25
- "vocab_size": 32002
26
  }
 
1
  {
2
+ "_name_or_path": "princeton-nlp/Mistral-7B-Base-SFT-DPO",
3
  "architectures": [
4
  "MistralForCausalLM"
5
  ],
6
  "attention_dropout": 0.0,
7
  "bos_token_id": 1,
8
+ "eos_token_id": 2,
9
  "hidden_act": "silu",
10
  "hidden_size": 4096,
11
  "initializer_range": 0.02,
12
  "intermediate_size": 14336,
13
+ "max_position_embeddings": 32768,
14
  "model_type": "mistral",
15
  "num_attention_heads": 32,
16
  "num_hidden_layers": 32,
 
20
  "sliding_window": 4096,
21
  "tie_word_embeddings": false,
22
  "torch_dtype": "bfloat16",
23
+ "transformers_version": "4.39.3",
24
  "use_cache": false,
25
+ "vocab_size": 32000
26
  }
runs/Jun12_17-10-52_n136-129-074/events.out.tfevents.1718185190.n136-129-074.3476682.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1629c81e51102edc6f478cfb2c5b551d47bbaeefb7dd02e9892db93ce28f622e
3
+ size 12294
special_tokens_map.json CHANGED
@@ -1,8 +1,4 @@
1
  {
2
- "additional_special_tokens": [
3
- "<|end_of_turn|>",
4
- "<|pad_0|>"
5
- ],
6
  "bos_token": {
7
  "content": "<s>",
8
  "lstrip": false,
@@ -11,19 +7,13 @@
11
  "single_word": false
12
  },
13
  "eos_token": {
14
- "content": "<|end_of_turn|>",
15
- "lstrip": false,
16
- "normalized": false,
17
- "rstrip": false,
18
- "single_word": false
19
- },
20
- "pad_token": {
21
- "content": "<|end_of_turn|>",
22
  "lstrip": false,
23
  "normalized": false,
24
  "rstrip": false,
25
  "single_word": false
26
  },
 
27
  "unk_token": {
28
  "content": "<unk>",
29
  "lstrip": false,
 
1
  {
 
 
 
 
2
  "bos_token": {
3
  "content": "<s>",
4
  "lstrip": false,
 
7
  "single_word": false
8
  },
9
  "eos_token": {
10
+ "content": "</s>",
 
 
 
 
 
 
 
11
  "lstrip": false,
12
  "normalized": false,
13
  "rstrip": false,
14
  "single_word": false
15
  },
16
+ "pad_token": "</s>",
17
  "unk_token": {
18
  "content": "<unk>",
19
  "lstrip": false,
tokenizer.json CHANGED
@@ -29,24 +29,6 @@
29
  "rstrip": false,
30
  "normalized": false,
31
  "special": true
32
- },
33
- {
34
- "id": 32000,
35
- "content": "<|end_of_turn|>",
36
- "single_word": false,
37
- "lstrip": false,
38
- "rstrip": false,
39
- "normalized": false,
40
- "special": true
41
- },
42
- {
43
- "id": 32001,
44
- "content": "<|pad_0|>",
45
- "single_word": false,
46
- "lstrip": false,
47
- "rstrip": false,
48
- "normalized": false,
49
- "special": true
50
  }
51
  ],
52
  "normalizer": {
@@ -152,7 +134,6 @@
152
  "end_of_word_suffix": null,
153
  "fuse_unk": true,
154
  "byte_fallback": true,
155
- "ignore_merges": false,
156
  "vocab": {
157
  "<unk>": 0,
158
  "<s>": 1,
 
29
  "rstrip": false,
30
  "normalized": false,
31
  "special": true
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
32
  }
33
  ],
34
  "normalizer": {
 
134
  "end_of_word_suffix": null,
135
  "fuse_unk": true,
136
  "byte_fallback": true,
 
137
  "vocab": {
138
  "<unk>": 0,
139
  "<s>": 1,
tokenizer_config.json CHANGED
@@ -25,38 +25,19 @@
25
  "rstrip": false,
26
  "single_word": false,
27
  "special": true
28
- },
29
- "32000": {
30
- "content": "<|end_of_turn|>",
31
- "lstrip": false,
32
- "normalized": false,
33
- "rstrip": false,
34
- "single_word": false,
35
- "special": true
36
- },
37
- "32001": {
38
- "content": "<|pad_0|>",
39
- "lstrip": false,
40
- "normalized": false,
41
- "rstrip": false,
42
- "single_word": false,
43
- "special": true
44
  }
45
  },
46
- "additional_special_tokens": [
47
- "<|end_of_turn|>",
48
- "<|pad_0|>"
49
- ],
50
  "bos_token": "<s>",
51
- "chat_template": "{% for message in messages %}\n{% if message['role'] == 'user' %}\n{{ '\n\nHuman: '+ message['content'] + eos_token }}\n{% elif message['role'] == 'assistant' %}\n{{ '\n\nAssistant: ' + message['content'] + eos_token }}\n{% endif %}\n{% if loop.last and add_generation_prompt %}\n{{ '\n\nAssistant: ' }}\n{% endif %}\n{% endfor %}",
52
  "clean_up_tokenization_spaces": false,
53
- "eos_token": "<|end_of_turn|>",
54
  "legacy": true,
55
  "model_max_length": 2048,
56
- "pad_token": "<|end_of_turn|>",
57
  "sp_model_kwargs": {},
58
  "spaces_between_special_tokens": false,
59
  "tokenizer_class": "LlamaTokenizer",
60
  "unk_token": "<unk>",
61
- "use_default_system_prompt": true
62
  }
 
25
  "rstrip": false,
26
  "single_word": false,
27
  "special": true
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
28
  }
29
  },
30
+ "additional_special_tokens": [],
 
 
 
31
  "bos_token": "<s>",
32
+ "chat_template": "{% for message in messages %}\n{% if message['role'] == 'user' %}\n{{ '<|user|>\n' + message['content'] + eos_token }}\n{% elif message['role'] == 'system' %}\n{{ '<|system|>\n' + message['content'] + eos_token }}\n{% elif message['role'] == 'assistant' %}\n{{ '<|assistant|>\n' + message['content'] + eos_token }}\n{% endif %}\n{% if loop.last and add_generation_prompt %}\n{{ '<|assistant|>' }}\n{% endif %}\n{% endfor %}",
33
  "clean_up_tokenization_spaces": false,
34
+ "eos_token": "</s>",
35
  "legacy": true,
36
  "model_max_length": 2048,
37
+ "pad_token": "</s>",
38
  "sp_model_kwargs": {},
39
  "spaces_between_special_tokens": false,
40
  "tokenizer_class": "LlamaTokenizer",
41
  "unk_token": "<unk>",
42
+ "use_default_system_prompt": false
43
  }
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:3d3c4f74fae0ad31d129ff557779326b0ad540e20beb74d4160e4acd0f9088ae
3
- size 6328
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:893103f270d52d59f97a930cbc3798f7a4c3f083afc65e6d9bb5a98254d03ed1
3
+ size 6264