rymaju commited on
Commit
7132de0
·
1 Parent(s): ac213b0

Training in progress, step 500

Browse files
config.json CHANGED
@@ -9,23 +9,19 @@
9
  "decoder_start_token_id": 0,
10
  "dense_act_fn": "relu",
11
  "dropout_rate": 0.1,
12
- "early_stopping": true,
13
  "eos_token_id": 1,
14
  "feed_forward_proj": "relu",
15
  "initializer_factor": 1.0,
16
  "is_encoder_decoder": true,
17
  "is_gated_act": false,
18
  "layer_norm_epsilon": 1e-06,
19
- "max_length": 300,
20
  "model_type": "t5",
21
  "n_positions": 512,
22
- "num_beams": 4,
23
  "num_decoder_layers": 6,
24
  "num_heads": 8,
25
  "num_layers": 6,
26
  "output_past": true,
27
  "pad_token_id": 0,
28
- "prefix": "translate English to German: ",
29
  "relative_attention_max_distance": 128,
30
  "relative_attention_num_buckets": 32,
31
  "task_specific_params": {
 
9
  "decoder_start_token_id": 0,
10
  "dense_act_fn": "relu",
11
  "dropout_rate": 0.1,
 
12
  "eos_token_id": 1,
13
  "feed_forward_proj": "relu",
14
  "initializer_factor": 1.0,
15
  "is_encoder_decoder": true,
16
  "is_gated_act": false,
17
  "layer_norm_epsilon": 1e-06,
 
18
  "model_type": "t5",
19
  "n_positions": 512,
 
20
  "num_decoder_layers": 6,
21
  "num_heads": 8,
22
  "num_layers": 6,
23
  "output_past": true,
24
  "pad_token_id": 0,
 
25
  "relative_attention_max_distance": 128,
26
  "relative_attention_num_buckets": 32,
27
  "task_specific_params": {
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:ad59bbce055db01c8e28249ca3fb5ff46e61abf468bdf7c5ba3324d59d5ffde7
3
  size 242070267
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5e1393fa21c429b0f623504f6e7865574bc9a8923eb88061574bd9cbb4b92f78
3
  size 242070267
runs/Nov07_02-21-33_56224b23b126/1667787922.2229264/events.out.tfevents.1667787922.56224b23b126.77.9 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3c0c6f87413ebcb3e143855b9155ac3b5fe63a9d0d808d5eb0789ce8003349c5
3
+ size 5743
runs/Nov07_02-21-33_56224b23b126/events.out.tfevents.1667787922.56224b23b126.77.8 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:708d54259798387dd542954c290ae09e4605628997ee9016fc1e8dce3a76b2c8
3
+ size 4836
runs/Nov07_02-26-00_56224b23b126/1667787976.3028436/events.out.tfevents.1667787976.56224b23b126.77.11 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:595789bd20863c61fe2f183a857af507441ea7a1bc659a28cefeff8c5fdd8c65
3
+ size 5743
runs/Nov07_02-26-00_56224b23b126/events.out.tfevents.1667787976.56224b23b126.77.10 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4341290b29d2e7a4cc4463f22fa1f41bc2815e078d474d7bc35b7e393fda40fd
3
+ size 4883
tokenizer.json CHANGED
@@ -1,6 +1,11 @@
1
  {
2
  "version": "1.0",
3
- "truncation": null,
 
 
 
 
 
4
  "padding": null,
5
  "added_tokens": [
6
  {
 
1
  {
2
  "version": "1.0",
3
+ "truncation": {
4
+ "direction": "Right",
5
+ "max_length": 128,
6
+ "strategy": "LongestFirst",
7
+ "stride": 0
8
+ },
9
  "padding": null,
10
  "added_tokens": [
11
  {
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:d77eb01a67c4bdbda53d42bce3ac957057e9bc83a0989b57b5017e3d76cb85f8
3
  size 3567
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4d45f2fa49d12a66c3787cdb757fab96a279c622df44c51872d1da04c112ed39
3
  size 3567