nnheui commited on
Commit
35a6ffb
1 Parent(s): 91aef91

Training in progress, step 100

Browse files
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "/home/ubuntu/hieu.nn/LangProject/alignment-handbook/outputs/models/pythia-1.4b-sft-full/final-model",
3
  "architectures": [
4
  "GPTNeoXForCausalLM"
5
  ],
@@ -23,7 +23,7 @@
23
  "rotary_pct": 0.25,
24
  "tie_word_embeddings": false,
25
  "torch_dtype": "bfloat16",
26
- "transformers_version": "4.38.2",
27
  "use_cache": false,
28
  "use_parallel_residual": true,
29
  "vocab_size": 50304
 
1
  {
2
+ "_name_or_path": "nnheui/pythia-1.4b-sft-full",
3
  "architectures": [
4
  "GPTNeoXForCausalLM"
5
  ],
 
23
  "rotary_pct": 0.25,
24
  "tie_word_embeddings": false,
25
  "torch_dtype": "bfloat16",
26
+ "transformers_version": "4.40.0",
27
  "use_cache": false,
28
  "use_parallel_residual": true,
29
  "vocab_size": 50304
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:7985dff3ea63585b23c86c702954f01e9d87d382f21dd6aabd582482c423b6c8
3
  size 2829330208
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:635f619ab31b0b3348b39a70a64e85d9f0a51fc44cabb2a9a855bcfaf593745d
3
  size 2829330208
runs/Jul08_01-33-05_42dbe5cf9ed4/events.out.tfevents.1720402951.42dbe5cf9ed4.841582.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a57b5ce5b1aca85d8570a2bd960cfc0ae305300df9cb70050a1670032b5c507c
3
+ size 13908
tokenizer.json CHANGED
@@ -239,10 +239,30 @@
239
  "use_regex": true
240
  },
241
  "post_processor": {
242
- "type": "ByteLevel",
243
- "add_prefix_space": false,
244
- "trim_offsets": true,
245
- "use_regex": true
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
246
  },
247
  "decoder": {
248
  "type": "ByteLevel",
@@ -258,6 +278,7 @@
258
  "end_of_word_suffix": null,
259
  "fuse_unk": false,
260
  "byte_fallback": false,
 
261
  "vocab": {
262
  "<|endoftext|>": 0,
263
  "<|padding|>": 1,
 
239
  "use_regex": true
240
  },
241
  "post_processor": {
242
+ "type": "TemplateProcessing",
243
+ "single": [
244
+ {
245
+ "Sequence": {
246
+ "id": "A",
247
+ "type_id": 0
248
+ }
249
+ }
250
+ ],
251
+ "pair": [
252
+ {
253
+ "Sequence": {
254
+ "id": "A",
255
+ "type_id": 0
256
+ }
257
+ },
258
+ {
259
+ "Sequence": {
260
+ "id": "B",
261
+ "type_id": 1
262
+ }
263
+ }
264
+ ],
265
+ "special_tokens": {}
266
  },
267
  "decoder": {
268
  "type": "ByteLevel",
 
278
  "end_of_word_suffix": null,
279
  "fuse_unk": false,
280
  "byte_fallback": false,
281
+ "ignore_merges": false,
282
  "vocab": {
283
  "<|endoftext|>": 0,
284
  "<|padding|>": 1,
tokenizer_config.json CHANGED
@@ -1,4 +1,6 @@
1
  {
 
 
2
  "add_prefix_space": false,
3
  "added_tokens_decoder": {
4
  "0": {
 
1
  {
2
+ "add_bos_token": false,
3
+ "add_eos_token": false,
4
  "add_prefix_space": false,
5
  "added_tokens_decoder": {
6
  "0": {
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:40d40e1ead2e5443b55473419d9b5be7870cb8fe5679d6a416b4a119812c2701
3
- size 6200
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:794aae45412f58e207f7e57b16d0915e74c54e21f8feb1bcf71aef160294272c
3
+ size 6264