RikkiXu commited on
Commit
d5d55a3
1 Parent(s): 5fcf273

Training in progress, step 100

Browse files
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "princeton-nlp/Mistral-7B-Base-SFT-SimPO",
3
  "architectures": [
4
  "MistralForCausalLM"
5
  ],
@@ -20,7 +20,7 @@
20
  "sliding_window": 4096,
21
  "tie_word_embeddings": false,
22
  "torch_dtype": "bfloat16",
23
- "transformers_version": "4.41.1",
24
  "use_cache": false,
25
  "vocab_size": 32000
26
  }
 
1
  {
2
+ "_name_or_path": "/mnt/bn/xuruijie-llm/checkpoints/chatml/round1-5-beta1",
3
  "architectures": [
4
  "MistralForCausalLM"
5
  ],
 
20
  "sliding_window": 4096,
21
  "tie_word_embeddings": false,
22
  "torch_dtype": "bfloat16",
23
+ "transformers_version": "4.39.3",
24
  "use_cache": false,
25
  "vocab_size": 32000
26
  }
runs/Jun19_21-36-55_n136-100-194/events.out.tfevents.1718804244.n136-100-194.4102142.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e574164c61f65a2104293ed3f2c5fc5feaa08cc35f82ed560eb6dc4272e807fc
3
+ size 5600
runs/Jun19_21-42-20_n136-100-194/events.out.tfevents.1718804567.n136-100-194.4105091.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:01b93c0d28830d06bf9f2f72615aa690eb3c5f1d30df3dea995484a7f2e9aedb
3
+ size 12360
tokenizer.json CHANGED
@@ -134,7 +134,6 @@
134
  "end_of_word_suffix": null,
135
  "fuse_unk": true,
136
  "byte_fallback": true,
137
- "ignore_merges": false,
138
  "vocab": {
139
  "<unk>": 0,
140
  "<s>": 1,
 
134
  "end_of_word_suffix": null,
135
  "fuse_unk": true,
136
  "byte_fallback": true,
 
137
  "vocab": {
138
  "<unk>": 0,
139
  "<s>": 1,
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:1d43daf6967bdbc93130e5fa6570c72d0feeace742e348e0392bb90c6689eda8
3
- size 6520
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3b98319cc7f8f873aaf0e58666d771b84128c64476049571b0dcb80f9fec032c
3
+ size 6328