anton-l HF staff commited on
Commit
a551342
1 Parent(s): 301b9ff

End of training

Browse files
all_results.json ADDED
@@ -0,0 +1,32 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 10.0,
3
+ "epoch_cs": 10.0,
4
+ "epoch_de": 10.0,
5
+ "epoch_en": 10.0,
6
+ "epoch_es": 10.0,
7
+ "epoch_fi": 10.0,
8
+ "epoch_fr": 10.0,
9
+ "epoch_hr": 10.0,
10
+ "epoch_hu": 10.0,
11
+ "epoch_it": 10.0,
12
+ "epoch_nl": 10.0,
13
+ "epoch_pl": 10.0,
14
+ "epoch_ro": 10.0,
15
+ "epoch_sk": 10.0,
16
+ "epoch_sl": 10.0,
17
+ "eval_cer": 0.09657029192146015,
18
+ "eval_cer_en": 0.09657029192146015,
19
+ "eval_loss": 0.3126685321331024,
20
+ "eval_loss_en": 0.3126685321331024,
21
+ "eval_runtime": 3.8804071428571434,
22
+ "eval_samples_per_second": 2.428785714285714,
23
+ "eval_steps_per_second": 0.3045714285714286,
24
+ "eval_wer": 0.15493465525011266,
25
+ "eval_wer_en": 0.15493465525011266,
26
+ "predict_samples": 1842,
27
+ "train_loss": 0.30801558772101284,
28
+ "train_runtime": 38596.418,
29
+ "train_samples": 166975,
30
+ "train_samples_per_second": 43.262,
31
+ "train_steps_per_second": 0.676
32
+ }
config.json CHANGED
@@ -99,7 +99,7 @@
99
  1,
100
  1
101
  ],
102
- "torch_dtype": "float32",
103
  "transformers_version": "4.18.0.dev0",
104
  "use_weighted_layer_sum": false,
105
  "vocab_size": 31,
 
99
  1,
100
  1
101
  ],
102
+ "torch_dtype": "float16",
103
  "transformers_version": "4.18.0.dev0",
104
  "use_weighted_layer_sum": false,
105
  "vocab_size": 31,
predict_results.json ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 10.0,
3
+ "epoch_cs": 10.0,
4
+ "epoch_de": 10.0,
5
+ "epoch_en": 10.0,
6
+ "epoch_es": 10.0,
7
+ "epoch_fi": 10.0,
8
+ "epoch_fr": 10.0,
9
+ "epoch_hr": 10.0,
10
+ "epoch_hu": 10.0,
11
+ "epoch_it": 10.0,
12
+ "epoch_nl": 10.0,
13
+ "epoch_pl": 10.0,
14
+ "epoch_ro": 10.0,
15
+ "epoch_sk": 10.0,
16
+ "epoch_sl": 10.0,
17
+ "eval_cer": 0.09657029192146015,
18
+ "eval_cer_en": 0.09657029192146015,
19
+ "eval_loss": 0.3126685321331024,
20
+ "eval_loss_en": 0.3126685321331024,
21
+ "eval_runtime": 3.8804071428571434,
22
+ "eval_samples_per_second": 2.428785714285714,
23
+ "eval_steps_per_second": 0.3045714285714286,
24
+ "eval_wer": 0.15493465525011266,
25
+ "eval_wer_en": 0.15493465525011266,
26
+ "predict_samples": 1842
27
+ }
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:b41e67fc9c8a9edc0b6b17e883ad7903aebcc1a69f98da3c7b237c30a044ce15
3
- size 1262053361
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:600f191dd530c3ac20ef204575b89eead93f3eaba38ca7d6213b61a85caa4211
3
+ size 631112369
runs/Apr29_14-23-44_anton-xtreme-s/events.out.tfevents.1651297385.anton-xtreme-s.15092.2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7d40611eadd48a05852bbeec42d7efff1e75f7aebc6679684ef6c0c34e0bdede
3
+ size 4706
train_results.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 10.0,
3
+ "train_loss": 0.30801558772101284,
4
+ "train_runtime": 38596.418,
5
+ "train_samples": 166975,
6
+ "train_samples_per_second": 43.262,
7
+ "train_steps_per_second": 0.676
8
+ }
trainer_state.json ADDED
The diff for this file is too large to render. See raw diff