kevinscaria commited on
Commit
de7c1d4
1 Parent(s): 6ff6c23

Upload 12 files

Browse files
config.json CHANGED
@@ -25,7 +25,7 @@
25
  "relative_attention_num_buckets": 32,
26
  "tie_word_embeddings": false,
27
  "torch_dtype": "float32",
28
- "transformers_version": "4.25.1",
29
  "use_cache": true,
30
  "vocab_size": 32100
31
  }
 
25
  "relative_attention_num_buckets": 32,
26
  "tie_word_embeddings": false,
27
  "torch_dtype": "float32",
28
+ "transformers_version": "4.28.1",
29
  "use_cache": true,
30
  "vocab_size": 32100
31
  }
generation_config.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "decoder_start_token_id": 0,
4
+ "eos_token_id": 1,
5
+ "pad_token_id": 0,
6
+ "transformers_version": "4.28.1"
7
+ }
optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:456d3711287986b7f27929fe0fbded05bcbbd98387a92740b9b1ef56ad894477
3
+ size 1980446085
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:6682ee07d4c121ae9481fb86383883addad2e26debb18702f3816fa7b2c6605a
3
  size 990236853
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d675b65f1f4cba3406b8bd9e9ff2625f9581e68665bc3e274b79770e475cac62
3
  size 990236853
rng_state.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:df27e633b2b4506b18a5a41802d52e86fe621a88e79819863260ed69ac7f9335
3
+ size 14575
scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:867316fc27f84daa9fb93f7705f521aa73cb5355d4500dd63c428294ee5115c9
3
+ size 627
tokenizer_config.json CHANGED
@@ -101,13 +101,12 @@
101
  "<extra_id_98>",
102
  "<extra_id_99>"
103
  ],
 
104
  "eos_token": "</s>",
105
  "extra_ids": 100,
106
  "model_max_length": 512,
107
- "name_or_path": "allenai/tk-instruct-base-def-pos",
108
  "pad_token": "<pad>",
109
  "sp_model_kwargs": {},
110
- "special_tokens_map_file": "/home/patrick/.cache/huggingface/transformers/76bf19bfedb85afbe644966ca9ab7b0404d753a41bf601115bced39f825ffa9c.c94798918c92ded6aeef2d2f0e666d2cc4145eca1aa6e1336fde07f2e13e2f46",
111
  "tokenizer_class": "T5Tokenizer",
112
  "unk_token": "<unk>"
113
  }
 
101
  "<extra_id_98>",
102
  "<extra_id_99>"
103
  ],
104
+ "clean_up_tokenization_spaces": true,
105
  "eos_token": "</s>",
106
  "extra_ids": 100,
107
  "model_max_length": 512,
 
108
  "pad_token": "<pad>",
109
  "sp_model_kwargs": {},
 
110
  "tokenizer_class": "T5Tokenizer",
111
  "unk_token": "<unk>"
112
  }
trainer_state.json ADDED
@@ -0,0 +1,38 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_metric": null,
3
+ "best_model_checkpoint": null,
4
+ "epoch": 2.0,
5
+ "global_step": 580,
6
+ "is_hyper_param_search": false,
7
+ "is_local_process_zero": true,
8
+ "is_world_process_zero": true,
9
+ "log_history": [
10
+ {
11
+ "epoch": 1.0,
12
+ "eval_loss": 0.3067856431007385,
13
+ "eval_runtime": 17.8304,
14
+ "eval_samples_per_second": 35.782,
15
+ "eval_steps_per_second": 2.243,
16
+ "step": 290
17
+ },
18
+ {
19
+ "epoch": 1.72,
20
+ "learning_rate": 3.508587904974522e-05,
21
+ "loss": 0.3404,
22
+ "step": 500
23
+ },
24
+ {
25
+ "epoch": 2.0,
26
+ "eval_loss": 0.27310052514076233,
27
+ "eval_runtime": 17.9797,
28
+ "eval_samples_per_second": 35.485,
29
+ "eval_steps_per_second": 2.225,
30
+ "step": 580
31
+ }
32
+ ],
33
+ "max_steps": 1160,
34
+ "num_train_epochs": 4,
35
+ "total_flos": 2050812231828480.0,
36
+ "trial_name": null,
37
+ "trial_params": null
38
+ }
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:ff225437f574cbe9b9eaf0d86afb50eb816cb3f7268686ea9a5a08ab58af84cf
3
- size 3707
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:500d4126a7ddd04082bf4f576e30f8a87b3d0d59fcf6c4a5c2a2fc51a5e0ac8a
3
+ size 3835