DAVID TORRES commited on
Commit
03e161c
·
1 Parent(s): fc96915

End of training

Browse files
README.md CHANGED
@@ -32,15 +32,13 @@ More information needed
32
  ### Training hyperparameters
33
 
34
  The following hyperparameters were used during training:
35
- - learning_rate: 0.0005
36
  - train_batch_size: 8
37
  - eval_batch_size: 8
38
  - seed: 42
39
- - gradient_accumulation_steps: 8
40
- - total_train_batch_size: 64
41
  - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
42
- - lr_scheduler_type: cosine
43
- - num_epochs: 300
44
 
45
  ### Training results
46
 
 
32
  ### Training hyperparameters
33
 
34
  The following hyperparameters were used during training:
35
+ - learning_rate: 5e-05
36
  - train_batch_size: 8
37
  - eval_batch_size: 8
38
  - seed: 42
 
 
39
  - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
40
+ - lr_scheduler_type: linear
41
+ - num_epochs: 3.0
42
 
43
  ### Training results
44
 
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:31aa75174f1171830bbbd66a21b8d5a782f80e1a78e5d0006d746d19af98d641
3
- size 1324917277
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7d80ed7d49f311a809b2250816134017b2f8fd2fb84edadede4687d22562508f
3
+ size 1324911837
special_tokens_map.json CHANGED
@@ -1,6 +1,6 @@
1
  {
2
  "bos_token": "</s>",
3
  "eos_token": "</s>",
4
- "pad_token": "</s>",
5
  "unk_token": "</s>"
6
  }
 
1
  {
2
  "bos_token": "</s>",
3
  "eos_token": "</s>",
4
+ "pad_token": "<pad>",
5
  "unk_token": "</s>"
6
  }
tokenizer.json CHANGED
@@ -1,11 +1,6 @@
1
  {
2
  "version": "1.0",
3
- "truncation": {
4
- "direction": "Right",
5
- "max_length": 512,
6
- "strategy": "LongestFirst",
7
- "stride": 0
8
- },
9
  "padding": null,
10
  "added_tokens": [
11
  {
 
1
  {
2
  "version": "1.0",
3
+ "truncation": null,
 
 
 
 
 
4
  "padding": null,
5
  "added_tokens": [
6
  {
tokenizer_config.json CHANGED
@@ -25,7 +25,7 @@
25
  "eos_token": "</s>",
26
  "errors": "replace",
27
  "model_max_length": 1000000000000000019884624838656,
28
- "pad_token": "</s>",
29
  "tokenizer_class": "GPT2Tokenizer",
30
  "unk_token": "</s>"
31
  }
 
25
  "eos_token": "</s>",
26
  "errors": "replace",
27
  "model_max_length": 1000000000000000019884624838656,
28
+ "pad_token": "<pad>",
29
  "tokenizer_class": "GPT2Tokenizer",
30
  "unk_token": "</s>"
31
  }
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:4a499fd307ebb77a86f6a84938c4bd727e699e35ea9c55d2a0139d41b74511ab
3
  size 4091
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a63dfdc47e8b387ef099ad7587a389f1b89c0336fb4de6bc94b0354187b16f0a
3
  size 4091