RikkiXu commited on
Commit
cc87d6b
1 Parent(s): 851a87e

Training in progress, step 100

Browse files
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "alignment-handbook/zephyr-7b-sft-full",
3
  "architectures": [
4
  "MistralForCausalLM"
5
  ],
@@ -20,7 +20,7 @@
20
  "sliding_window": 4096,
21
  "tie_word_embeddings": false,
22
  "torch_dtype": "bfloat16",
23
- "transformers_version": "4.39.3",
24
  "use_cache": false,
25
  "vocab_size": 32000
26
  }
 
1
  {
2
+ "_name_or_path": "/mnt/bn/xuruijie-llm/checkpoints/chatml/round1-5-beta1",
3
  "architectures": [
4
  "MistralForCausalLM"
5
  ],
 
20
  "sliding_window": 4096,
21
  "tie_word_embeddings": false,
22
  "torch_dtype": "bfloat16",
23
+ "transformers_version": "4.41.1",
24
  "use_cache": false,
25
  "vocab_size": 32000
26
  }
runs/Jun21_17-08-00_n136-082-130/events.out.tfevents.1718961008.n136-082-130.2131127.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:27d87bcd2015adabd7590ea3adc23577bd5d9d78d9e9d5124943e409f5d4d115
3
+ size 12581
tokenizer.json CHANGED
@@ -134,6 +134,7 @@
134
  "end_of_word_suffix": null,
135
  "fuse_unk": true,
136
  "byte_fallback": true,
 
137
  "vocab": {
138
  "<unk>": 0,
139
  "<s>": 1,
 
134
  "end_of_word_suffix": null,
135
  "fuse_unk": true,
136
  "byte_fallback": true,
137
+ "ignore_merges": false,
138
  "vocab": {
139
  "<unk>": 0,
140
  "<s>": 1,
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:bec7c8a62a21dfa7db36090d7c7e37914b21f7e60b2a75556ca82856b00326c8
3
- size 6264
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4a03ea8d0bb33d47ee73aaae32c5621065b110a7f4643bc0e9d555172437c45b
3
+ size 6520