RikkiXu commited on
Commit
1cc66fa
1 Parent(s): 6b4c199

Training in progress, step 100

Browse files
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "/mnt/bn/xuruijie-llm/checkpoints/new_world/v1-distill-round2-0.5-lr58",
3
  "architectures": [
4
  "MistralForCausalLM"
5
  ],
@@ -20,7 +20,7 @@
20
  "sliding_window": 4096,
21
  "tie_word_embeddings": false,
22
  "torch_dtype": "bfloat16",
23
- "transformers_version": "4.41.1",
24
  "use_cache": false,
25
  "vocab_size": 32002
26
  }
 
1
  {
2
+ "_name_or_path": "/mnt/bn/xuruijie-llm/checkpoints/new_world/v1-ultral",
3
  "architectures": [
4
  "MistralForCausalLM"
5
  ],
 
20
  "sliding_window": 4096,
21
  "tie_word_embeddings": false,
22
  "torch_dtype": "bfloat16",
23
+ "transformers_version": "4.38.2",
24
  "use_cache": false,
25
  "vocab_size": 32002
26
  }
runs/Jun06_13-29-12_n136-129-074/events.out.tfevents.1717652376.n136-129-074.1035360.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d3038a15ec7983b5aeeaf93e3c5f4de23e71198323ab2629cc878806d290e224
3
+ size 12302
tokenizer.json CHANGED
@@ -152,7 +152,6 @@
152
  "end_of_word_suffix": null,
153
  "fuse_unk": true,
154
  "byte_fallback": true,
155
- "ignore_merges": false,
156
  "vocab": {
157
  "<unk>": 0,
158
  "<s>": 1,
 
152
  "end_of_word_suffix": null,
153
  "fuse_unk": true,
154
  "byte_fallback": true,
 
155
  "vocab": {
156
  "<unk>": 0,
157
  "<s>": 1,
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:4ab0d2ab5fad688425813e23c6e2b9f01a1ff29a0fb544a932591b142890b321
3
- size 6520
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0fca5a261a54300d1452b13f8cd6d979be181e6f4026bf678536b29a267ee051
3
+ size 6264