Tflatval commited on
Commit
ad6caee
1 Parent(s): 48dec73

Upload folder using huggingface_hub

Browse files
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ language_model/unigrams.txt filter=lfs diff=lfs merge=lfs -text
added_tokens.json ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ {
2
+ "</s>": 33,
3
+ "<s>": 32
4
+ }
alphabet.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"labels": [" ", "a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k", "l", "m", "n", "o", "p", "q", "r", "s", "t", "u", "v", "w", "x", "y", "z", "\u00e5", "\u00e6", "\u00f8" ,"\u2047", "", "<s>", "</s>"], "is_bpe": false}
config.json ADDED
@@ -0,0 +1,116 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "NbAiLab/nb-wav2vec2-300m-bokmaal",
3
+ "activation_dropout": 0.055,
4
+ "adapter_attn_dim": null,
5
+ "adapter_kernel_size": 3,
6
+ "adapter_stride": 2,
7
+ "add_adapter": false,
8
+ "apply_spec_augment": true,
9
+ "architectures": [
10
+ "Wav2Vec2ForCTC"
11
+ ],
12
+ "attention_dropout": 0.094,
13
+ "bos_token_id": 1,
14
+ "classifier_proj_size": 256,
15
+ "codevector_dim": 768,
16
+ "contrastive_logits_temperature": 0.1,
17
+ "conv_bias": true,
18
+ "conv_dim": [
19
+ 512,
20
+ 512,
21
+ 512,
22
+ 512,
23
+ 512,
24
+ 512,
25
+ 512
26
+ ],
27
+ "conv_kernel": [
28
+ 10,
29
+ 3,
30
+ 3,
31
+ 3,
32
+ 3,
33
+ 2,
34
+ 2
35
+ ],
36
+ "conv_stride": [
37
+ 5,
38
+ 2,
39
+ 2,
40
+ 2,
41
+ 2,
42
+ 2,
43
+ 2
44
+ ],
45
+ "ctc_loss_reduction": "mean",
46
+ "ctc_zero_infinity": true,
47
+ "diversity_loss_weight": 0.1,
48
+ "do_stable_layer_norm": true,
49
+ "eos_token_id": 2,
50
+ "feat_extract_activation": "gelu",
51
+ "feat_extract_dropout": 0.0,
52
+ "feat_extract_norm": "layer",
53
+ "feat_proj_dropout": 0.04,
54
+ "feat_quantizer_dropout": 0.0,
55
+ "final_dropout": 0.0,
56
+ "hidden_act": "gelu",
57
+ "hidden_dropout": 0.047,
58
+ "hidden_size": 1024,
59
+ "initializer_range": 0.02,
60
+ "intermediate_size": 4096,
61
+ "layer_norm_eps": 1e-05,
62
+ "layerdrop": 0.041,
63
+ "mask_channel_length": 10,
64
+ "mask_channel_min_space": 1,
65
+ "mask_channel_other": 0.0,
66
+ "mask_channel_prob": 0.0,
67
+ "mask_channel_selection": "static",
68
+ "mask_feature_length": 64,
69
+ "mask_feature_min_masks": 0,
70
+ "mask_feature_prob": 0.25,
71
+ "mask_time_length": 10,
72
+ "mask_time_min_masks": 2,
73
+ "mask_time_min_space": 1,
74
+ "mask_time_other": 0.0,
75
+ "mask_time_prob": 0.082,
76
+ "mask_time_selection": "static",
77
+ "model_type": "wav2vec2",
78
+ "num_adapter_layers": 3,
79
+ "num_attention_heads": 16,
80
+ "num_codevector_groups": 2,
81
+ "num_codevectors_per_group": 320,
82
+ "num_conv_pos_embedding_groups": 16,
83
+ "num_conv_pos_embeddings": 128,
84
+ "num_feat_extract_layers": 7,
85
+ "num_hidden_layers": 24,
86
+ "num_negatives": 100,
87
+ "output_hidden_size": 1024,
88
+ "pad_token_id": 31,
89
+ "proj_codevector_dim": 768,
90
+ "tdnn_dilation": [
91
+ 1,
92
+ 2,
93
+ 3,
94
+ 1,
95
+ 1
96
+ ],
97
+ "tdnn_dim": [
98
+ 512,
99
+ 512,
100
+ 512,
101
+ 512,
102
+ 1500
103
+ ],
104
+ "tdnn_kernel": [
105
+ 5,
106
+ 3,
107
+ 3,
108
+ 1,
109
+ 1
110
+ ],
111
+ "torch_dtype": "float32",
112
+ "transformers_version": "4.38.1",
113
+ "use_weighted_layer_sum": false,
114
+ "vocab_size": 34,
115
+ "xvector_output_dim": 512
116
+ }
language_model/5gram.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7b41c24c63f2f0585bea83666369593f3b3e6d047f327a90f36ebca2c35ef0ff
3
+ size 4243671427
language_model/attrs.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"alpha": 0.5, "beta": 0.1, "unk_score_offset": -10.0, "score_boundary": true}
language_model/unigrams.txt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ac3e71ca49838ca355df6fdcb8d89344a5a9bf9e1a76587cdf5df1367c19b9a9
3
+ size 16759269
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6e8d14bbeae7e2299e1cb1bec7d33a5c504123b2c8baef1220328655a4dc8da8
3
+ size 1261946880
optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:61eb088da69d356d7c29968d2b07d23431f2717504acc3ce5c60e1756c4e6abc
3
+ size 2490438582
preprocessor_config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "do_normalize": true,
3
+ "feature_extractor_type": "Wav2Vec2FeatureExtractor",
4
+ "feature_size": 1,
5
+ "padding_side": "right",
6
+ "padding_value": 0,
7
+ "processor_class": "Wav2Vec2ProcessorWithLM",
8
+ "return_attention_mask": true,
9
+ "sampling_rate": 16000
10
+ }
rng_state.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a85c1aaaa76a5671c8e58e092209ee3f942513dde6a8493494907d0ec004f3ce
3
+ size 14308
scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:aa83df6e6421d27820c9d0450307f066ab9a9503c6227f93590f6a71aae78a3e
3
+ size 1064
special_tokens_map.json ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": "<s>",
3
+ "eos_token": "</s>",
4
+ "pad_token": "[PAD]",
5
+ "unk_token": "[UNK]"
6
+ }
tokenizer_config.json ADDED
@@ -0,0 +1,47 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "added_tokens_decoder": {
3
+ "30": {
4
+ "content": "[UNK]",
5
+ "lstrip": true,
6
+ "normalized": false,
7
+ "rstrip": true,
8
+ "single_word": false,
9
+ "special": false
10
+ },
11
+ "31": {
12
+ "content": "[PAD]",
13
+ "lstrip": true,
14
+ "normalized": false,
15
+ "rstrip": true,
16
+ "single_word": false,
17
+ "special": false
18
+ },
19
+ "32": {
20
+ "content": "<s>",
21
+ "lstrip": false,
22
+ "normalized": false,
23
+ "rstrip": false,
24
+ "single_word": false,
25
+ "special": true
26
+ },
27
+ "33": {
28
+ "content": "</s>",
29
+ "lstrip": false,
30
+ "normalized": false,
31
+ "rstrip": false,
32
+ "single_word": false,
33
+ "special": true
34
+ }
35
+ },
36
+ "bos_token": "<s>",
37
+ "clean_up_tokenization_spaces": true,
38
+ "do_lower_case": false,
39
+ "eos_token": "</s>",
40
+ "model_max_length": 1000000000000000019884624838656,
41
+ "pad_token": "[PAD]",
42
+ "replace_word_delimiter_char": " ",
43
+ "target_lang": null,
44
+ "tokenizer_class": "Wav2Vec2CTCTokenizer",
45
+ "unk_token": "[UNK]",
46
+ "word_delimiter_token": "|"
47
+ }
trainer_state.json ADDED
@@ -0,0 +1,1612 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_metric": 0.14368463395012068,
3
+ "best_model_checkpoint": "/cluster/home/torstefl/Master/saved_model/W2V/Combined/NB-RUND/22.05/checkpoint-48581",
4
+ "epoch": 37.0,
5
+ "eval_steps": 500,
6
+ "global_step": 48581,
7
+ "is_hyper_param_search": false,
8
+ "is_local_process_zero": true,
9
+ "is_world_process_zero": true,
10
+ "log_history": [
11
+ {
12
+ "epoch": 1.0,
13
+ "grad_norm": 3.1965060234069824,
14
+ "learning_rate": 8.740000000000001e-05,
15
+ "loss": 0.5607,
16
+ "step": 1313
17
+ },
18
+ {
19
+ "epoch": 1.0,
20
+ "eval_bigbrother_loss": 1.985207200050354,
21
+ "eval_bigbrother_runtime": 42.9617,
22
+ "eval_bigbrother_samples_per_second": 31.889,
23
+ "eval_bigbrother_steps_per_second": 1.001,
24
+ "eval_bigbrother_wer": 0.5811649077579957,
25
+ "step": 1313
26
+ },
27
+ {
28
+ "epoch": 1.0,
29
+ "eval_NB_RUND_loss": 0.3991325795650482,
30
+ "eval_NB_RUND_runtime": 118.1565,
31
+ "eval_NB_RUND_samples_per_second": 15.894,
32
+ "eval_NB_RUND_steps_per_second": 0.499,
33
+ "eval_NB_RUND_wer": 0.21319388576025744,
34
+ "step": 1313
35
+ },
36
+ {
37
+ "epoch": 1.0,
38
+ "eval_rundkast_loss": 0.3412451446056366,
39
+ "eval_rundkast_runtime": 33.4748,
40
+ "eval_rundkast_samples_per_second": 40.18,
41
+ "eval_rundkast_steps_per_second": 1.285,
42
+ "eval_rundkast_wer": 0.1870824053452116,
43
+ "step": 1313
44
+ },
45
+ {
46
+ "epoch": 1.0,
47
+ "eval_nb_samtale_loss": 0.5455772876739502,
48
+ "eval_nb_samtale_runtime": 38.7172,
49
+ "eval_nb_samtale_samples_per_second": 13.766,
50
+ "eval_nb_samtale_steps_per_second": 0.439,
51
+ "eval_nb_samtale_wer": 0.23990885416666666,
52
+ "step": 1313
53
+ },
54
+ {
55
+ "epoch": 2.0,
56
+ "grad_norm": 4.949782371520996,
57
+ "learning_rate": 9.779694237553901e-05,
58
+ "loss": 0.4692,
59
+ "step": 2626
60
+ },
61
+ {
62
+ "epoch": 2.0,
63
+ "eval_bigbrother_loss": 1.9343265295028687,
64
+ "eval_bigbrother_runtime": 41.3509,
65
+ "eval_bigbrother_samples_per_second": 33.131,
66
+ "eval_bigbrother_steps_per_second": 1.04,
67
+ "eval_bigbrother_wer": 0.5653750100700878,
68
+ "step": 2626
69
+ },
70
+ {
71
+ "epoch": 2.0,
72
+ "eval_NB_RUND_loss": 0.3552016317844391,
73
+ "eval_NB_RUND_runtime": 86.1673,
74
+ "eval_NB_RUND_samples_per_second": 21.795,
75
+ "eval_NB_RUND_steps_per_second": 0.685,
76
+ "eval_NB_RUND_wer": 0.1984714400643604,
77
+ "step": 2626
78
+ },
79
+ {
80
+ "epoch": 2.0,
81
+ "eval_rundkast_loss": 0.29250362515449524,
82
+ "eval_rundkast_runtime": 33.3182,
83
+ "eval_rundkast_samples_per_second": 40.368,
84
+ "eval_rundkast_steps_per_second": 1.291,
85
+ "eval_rundkast_wer": 0.17181037225580656,
86
+ "step": 2626
87
+ },
88
+ {
89
+ "epoch": 2.0,
90
+ "eval_nb_samtale_loss": 0.5133547782897949,
91
+ "eval_nb_samtale_runtime": 37.6446,
92
+ "eval_nb_samtale_samples_per_second": 14.159,
93
+ "eval_nb_samtale_steps_per_second": 0.452,
94
+ "eval_nb_samtale_wer": 0.224609375,
95
+ "step": 2626
96
+ },
97
+ {
98
+ "epoch": 3.0,
99
+ "grad_norm": 2.300737142562866,
100
+ "learning_rate": 9.522344178753431e-05,
101
+ "loss": 0.4264,
102
+ "step": 3939
103
+ },
104
+ {
105
+ "epoch": 3.0,
106
+ "eval_bigbrother_loss": 1.831033706665039,
107
+ "eval_bigbrother_runtime": 41.3456,
108
+ "eval_bigbrother_samples_per_second": 33.135,
109
+ "eval_bigbrother_steps_per_second": 1.04,
110
+ "eval_bigbrother_wer": 0.5553049222589221,
111
+ "step": 3939
112
+ },
113
+ {
114
+ "epoch": 3.0,
115
+ "eval_NB_RUND_loss": 0.33576926589012146,
116
+ "eval_NB_RUND_runtime": 85.9654,
117
+ "eval_NB_RUND_samples_per_second": 21.846,
118
+ "eval_NB_RUND_steps_per_second": 0.686,
119
+ "eval_NB_RUND_wer": 0.19296057924376508,
120
+ "step": 3939
121
+ },
122
+ {
123
+ "epoch": 3.0,
124
+ "eval_rundkast_loss": 0.2803370952606201,
125
+ "eval_rundkast_runtime": 33.2875,
126
+ "eval_rundkast_samples_per_second": 40.406,
127
+ "eval_rundkast_steps_per_second": 1.292,
128
+ "eval_rundkast_wer": 0.16457206490614062,
129
+ "step": 3939
130
+ },
131
+ {
132
+ "epoch": 3.0,
133
+ "eval_nb_samtale_loss": 0.47577109932899475,
134
+ "eval_nb_samtale_runtime": 37.7954,
135
+ "eval_nb_samtale_samples_per_second": 14.102,
136
+ "eval_nb_samtale_steps_per_second": 0.45,
137
+ "eval_nb_samtale_wer": 0.22102864583333334,
138
+ "step": 3939
139
+ },
140
+ {
141
+ "epoch": 4.0,
142
+ "grad_norm": 4.673075199127197,
143
+ "learning_rate": 9.26499411995296e-05,
144
+ "loss": 0.397,
145
+ "step": 5252
146
+ },
147
+ {
148
+ "epoch": 4.0,
149
+ "eval_bigbrother_loss": 1.7913014888763428,
150
+ "eval_bigbrother_runtime": 41.2837,
151
+ "eval_bigbrother_samples_per_second": 33.185,
152
+ "eval_bigbrother_steps_per_second": 1.042,
153
+ "eval_bigbrother_wer": 0.5413679207282688,
154
+ "step": 5252
155
+ },
156
+ {
157
+ "epoch": 4.0,
158
+ "eval_NB_RUND_loss": 0.3278275728225708,
159
+ "eval_NB_RUND_runtime": 85.8041,
160
+ "eval_NB_RUND_samples_per_second": 21.887,
161
+ "eval_NB_RUND_steps_per_second": 0.688,
162
+ "eval_NB_RUND_wer": 0.1829847144006436,
163
+ "step": 5252
164
+ },
165
+ {
166
+ "epoch": 4.0,
167
+ "eval_rundkast_loss": 0.2707855999469757,
168
+ "eval_rundkast_runtime": 33.1752,
169
+ "eval_rundkast_samples_per_second": 40.542,
170
+ "eval_rundkast_steps_per_second": 1.296,
171
+ "eval_rundkast_wer": 0.15431116767419661,
172
+ "step": 5252
173
+ },
174
+ {
175
+ "epoch": 4.0,
176
+ "eval_nb_samtale_loss": 0.47150808572769165,
177
+ "eval_nb_samtale_runtime": 37.7895,
178
+ "eval_nb_samtale_samples_per_second": 14.104,
179
+ "eval_nb_samtale_steps_per_second": 0.45,
180
+ "eval_nb_samtale_wer": 0.21158854166666666,
181
+ "step": 5252
182
+ },
183
+ {
184
+ "epoch": 5.0,
185
+ "grad_norm": 2.968538999557495,
186
+ "learning_rate": 9.00764406115249e-05,
187
+ "loss": 0.3742,
188
+ "step": 6565
189
+ },
190
+ {
191
+ "epoch": 5.0,
192
+ "eval_bigbrother_loss": 1.9187724590301514,
193
+ "eval_bigbrother_runtime": 41.3465,
194
+ "eval_bigbrother_samples_per_second": 33.135,
195
+ "eval_bigbrother_steps_per_second": 1.04,
196
+ "eval_bigbrother_wer": 0.5373398856038024,
197
+ "step": 6565
198
+ },
199
+ {
200
+ "epoch": 5.0,
201
+ "eval_NB_RUND_loss": 0.3254011571407318,
202
+ "eval_NB_RUND_runtime": 85.8831,
203
+ "eval_NB_RUND_samples_per_second": 21.867,
204
+ "eval_NB_RUND_steps_per_second": 0.687,
205
+ "eval_NB_RUND_wer": 0.17666934835076428,
206
+ "step": 6565
207
+ },
208
+ {
209
+ "epoch": 5.0,
210
+ "eval_rundkast_loss": 0.2743120491504669,
211
+ "eval_rundkast_runtime": 32.9681,
212
+ "eval_rundkast_samples_per_second": 40.797,
213
+ "eval_rundkast_steps_per_second": 1.304,
214
+ "eval_rundkast_wer": 0.14937957365574292,
215
+ "step": 6565
216
+ },
217
+ {
218
+ "epoch": 5.0,
219
+ "eval_nb_samtale_loss": 0.453761488199234,
220
+ "eval_nb_samtale_runtime": 38.2375,
221
+ "eval_nb_samtale_samples_per_second": 13.939,
222
+ "eval_nb_samtale_steps_per_second": 0.445,
223
+ "eval_nb_samtale_wer": 0.20475260416666666,
224
+ "step": 6565
225
+ },
226
+ {
227
+ "epoch": 6.0,
228
+ "grad_norm": 2.727647304534912,
229
+ "learning_rate": 8.750490003920032e-05,
230
+ "loss": 0.3559,
231
+ "step": 7878
232
+ },
233
+ {
234
+ "epoch": 6.0,
235
+ "eval_bigbrother_loss": 1.8001114130020142,
236
+ "eval_bigbrother_runtime": 41.331,
237
+ "eval_bigbrother_samples_per_second": 33.147,
238
+ "eval_bigbrother_steps_per_second": 1.04,
239
+ "eval_bigbrother_wer": 0.5302505437847418,
240
+ "step": 7878
241
+ },
242
+ {
243
+ "epoch": 6.0,
244
+ "eval_NB_RUND_loss": 0.31470128893852234,
245
+ "eval_NB_RUND_runtime": 85.2967,
246
+ "eval_NB_RUND_samples_per_second": 22.017,
247
+ "eval_NB_RUND_steps_per_second": 0.692,
248
+ "eval_NB_RUND_wer": 0.1740949316170555,
249
+ "step": 7878
250
+ },
251
+ {
252
+ "epoch": 6.0,
253
+ "eval_rundkast_loss": 0.26562032103538513,
254
+ "eval_rundkast_runtime": 33.1186,
255
+ "eval_rundkast_samples_per_second": 40.612,
256
+ "eval_rundkast_steps_per_second": 1.298,
257
+ "eval_rundkast_wer": 0.14802736239261852,
258
+ "step": 7878
259
+ },
260
+ {
261
+ "epoch": 6.0,
262
+ "eval_nb_samtale_loss": 0.43801799416542053,
263
+ "eval_nb_samtale_runtime": 37.7602,
264
+ "eval_nb_samtale_samples_per_second": 14.115,
265
+ "eval_nb_samtale_steps_per_second": 0.45,
266
+ "eval_nb_samtale_wer": 0.20100911458333334,
267
+ "step": 7878
268
+ },
269
+ {
270
+ "epoch": 7.0,
271
+ "grad_norm": 4.560679912567139,
272
+ "learning_rate": 8.493139945119561e-05,
273
+ "loss": 0.3425,
274
+ "step": 9191
275
+ },
276
+ {
277
+ "epoch": 7.0,
278
+ "eval_bigbrother_loss": 1.8266634941101074,
279
+ "eval_bigbrother_runtime": 41.178,
280
+ "eval_bigbrother_samples_per_second": 33.27,
281
+ "eval_bigbrother_steps_per_second": 1.044,
282
+ "eval_bigbrother_wer": 0.5329090469668896,
283
+ "step": 9191
284
+ },
285
+ {
286
+ "epoch": 7.0,
287
+ "eval_NB_RUND_loss": 0.3031991124153137,
288
+ "eval_NB_RUND_runtime": 85.2734,
289
+ "eval_NB_RUND_samples_per_second": 22.023,
290
+ "eval_NB_RUND_steps_per_second": 0.692,
291
+ "eval_NB_RUND_wer": 0.168141592920354,
292
+ "step": 9191
293
+ },
294
+ {
295
+ "epoch": 7.0,
296
+ "eval_rundkast_loss": 0.2632894814014435,
297
+ "eval_rundkast_runtime": 33.1093,
298
+ "eval_rundkast_samples_per_second": 40.623,
299
+ "eval_rundkast_steps_per_second": 1.299,
300
+ "eval_rundkast_wer": 0.14007317849188675,
301
+ "step": 9191
302
+ },
303
+ {
304
+ "epoch": 7.0,
305
+ "eval_nb_samtale_loss": 0.4041052460670471,
306
+ "eval_nb_samtale_runtime": 37.7437,
307
+ "eval_nb_samtale_samples_per_second": 14.122,
308
+ "eval_nb_samtale_steps_per_second": 0.45,
309
+ "eval_nb_samtale_wer": 0.1962890625,
310
+ "step": 9191
311
+ },
312
+ {
313
+ "epoch": 8.0,
314
+ "grad_norm": 2.754389524459839,
315
+ "learning_rate": 8.235985887887103e-05,
316
+ "loss": 0.3264,
317
+ "step": 10504
318
+ },
319
+ {
320
+ "epoch": 8.0,
321
+ "eval_bigbrother_loss": 1.930435299873352,
322
+ "eval_bigbrother_runtime": 41.8539,
323
+ "eval_bigbrother_samples_per_second": 32.733,
324
+ "eval_bigbrother_steps_per_second": 1.027,
325
+ "eval_bigbrother_wer": 0.5231612019656812,
326
+ "step": 10504
327
+ },
328
+ {
329
+ "epoch": 8.0,
330
+ "eval_NB_RUND_loss": 0.3151450455188751,
331
+ "eval_NB_RUND_runtime": 106.9286,
332
+ "eval_NB_RUND_samples_per_second": 17.563,
333
+ "eval_NB_RUND_steps_per_second": 0.552,
334
+ "eval_NB_RUND_wer": 0.16790024135156878,
335
+ "step": 10504
336
+ },
337
+ {
338
+ "epoch": 8.0,
339
+ "eval_rundkast_loss": 0.2726036012172699,
340
+ "eval_rundkast_runtime": 37.1374,
341
+ "eval_rundkast_samples_per_second": 36.217,
342
+ "eval_rundkast_steps_per_second": 1.158,
343
+ "eval_rundkast_wer": 0.1422208081450843,
344
+ "step": 10504
345
+ },
346
+ {
347
+ "epoch": 8.0,
348
+ "eval_nb_samtale_loss": 0.422342449426651,
349
+ "eval_nb_samtale_runtime": 37.6557,
350
+ "eval_nb_samtale_samples_per_second": 14.155,
351
+ "eval_nb_samtale_steps_per_second": 0.451,
352
+ "eval_nb_samtale_wer": 0.19490559895833334,
353
+ "step": 10504
354
+ },
355
+ {
356
+ "epoch": 9.0,
357
+ "grad_norm": 1.9258495569229126,
358
+ "learning_rate": 7.978635829086632e-05,
359
+ "loss": 0.3126,
360
+ "step": 11817
361
+ },
362
+ {
363
+ "epoch": 9.0,
364
+ "eval_bigbrother_loss": 1.8571630716323853,
365
+ "eval_bigbrother_runtime": 41.3864,
366
+ "eval_bigbrother_samples_per_second": 33.103,
367
+ "eval_bigbrother_steps_per_second": 1.039,
368
+ "eval_bigbrother_wer": 0.524611294610489,
369
+ "step": 11817
370
+ },
371
+ {
372
+ "epoch": 9.0,
373
+ "eval_NB_RUND_loss": 0.3117910325527191,
374
+ "eval_NB_RUND_runtime": 86.1461,
375
+ "eval_NB_RUND_samples_per_second": 21.8,
376
+ "eval_NB_RUND_steps_per_second": 0.685,
377
+ "eval_NB_RUND_wer": 0.16287208366854383,
378
+ "step": 11817
379
+ },
380
+ {
381
+ "epoch": 9.0,
382
+ "eval_rundkast_loss": 0.27052268385887146,
383
+ "eval_rundkast_runtime": 33.2454,
384
+ "eval_rundkast_samples_per_second": 40.457,
385
+ "eval_rundkast_steps_per_second": 1.293,
386
+ "eval_rundkast_wer": 0.1399140948138721,
387
+ "step": 11817
388
+ },
389
+ {
390
+ "epoch": 9.0,
391
+ "eval_nb_samtale_loss": 0.41521695256233215,
392
+ "eval_nb_samtale_runtime": 37.6049,
393
+ "eval_nb_samtale_samples_per_second": 14.174,
394
+ "eval_nb_samtale_steps_per_second": 0.452,
395
+ "eval_nb_samtale_wer": 0.18636067708333334,
396
+ "step": 11817
397
+ },
398
+ {
399
+ "epoch": 10.0,
400
+ "grad_norm": 4.801841735839844,
401
+ "learning_rate": 7.721285770286163e-05,
402
+ "loss": 0.3009,
403
+ "step": 13130
404
+ },
405
+ {
406
+ "epoch": 10.0,
407
+ "eval_bigbrother_loss": 1.9679045677185059,
408
+ "eval_bigbrother_runtime": 41.3496,
409
+ "eval_bigbrother_samples_per_second": 33.132,
410
+ "eval_bigbrother_steps_per_second": 1.04,
411
+ "eval_bigbrother_wer": 0.5192942882461935,
412
+ "step": 13130
413
+ },
414
+ {
415
+ "epoch": 10.0,
416
+ "eval_NB_RUND_loss": 0.31853994727134705,
417
+ "eval_NB_RUND_runtime": 85.1737,
418
+ "eval_NB_RUND_samples_per_second": 22.049,
419
+ "eval_NB_RUND_steps_per_second": 0.693,
420
+ "eval_NB_RUND_wer": 0.16182622687047465,
421
+ "step": 13130
422
+ },
423
+ {
424
+ "epoch": 10.0,
425
+ "eval_rundkast_loss": 0.2728247046470642,
426
+ "eval_rundkast_runtime": 32.73,
427
+ "eval_rundkast_samples_per_second": 41.094,
428
+ "eval_rundkast_steps_per_second": 1.314,
429
+ "eval_rundkast_wer": 0.13880050906776964,
430
+ "step": 13130
431
+ },
432
+ {
433
+ "epoch": 10.0,
434
+ "eval_nb_samtale_loss": 0.4334540069103241,
435
+ "eval_nb_samtale_runtime": 37.2959,
436
+ "eval_nb_samtale_samples_per_second": 14.291,
437
+ "eval_nb_samtale_steps_per_second": 0.456,
438
+ "eval_nb_samtale_wer": 0.185302734375,
439
+ "step": 13130
440
+ },
441
+ {
442
+ "epoch": 11.0,
443
+ "grad_norm": 2.734304428100586,
444
+ "learning_rate": 7.464131713053705e-05,
445
+ "loss": 0.2911,
446
+ "step": 14443
447
+ },
448
+ {
449
+ "epoch": 11.0,
450
+ "eval_bigbrother_loss": 2.009200096130371,
451
+ "eval_bigbrother_runtime": 41.4463,
452
+ "eval_bigbrother_samples_per_second": 33.055,
453
+ "eval_bigbrother_steps_per_second": 1.037,
454
+ "eval_bigbrother_wer": 0.5190526061387255,
455
+ "step": 14443
456
+ },
457
+ {
458
+ "epoch": 11.0,
459
+ "eval_NB_RUND_loss": 0.3390868902206421,
460
+ "eval_NB_RUND_runtime": 85.2442,
461
+ "eval_NB_RUND_samples_per_second": 22.031,
462
+ "eval_NB_RUND_steps_per_second": 0.692,
463
+ "eval_NB_RUND_wer": 0.16061946902654867,
464
+ "step": 14443
465
+ },
466
+ {
467
+ "epoch": 11.0,
468
+ "eval_rundkast_loss": 0.3032366931438446,
469
+ "eval_rundkast_runtime": 33.1436,
470
+ "eval_rundkast_samples_per_second": 40.581,
471
+ "eval_rundkast_steps_per_second": 1.297,
472
+ "eval_rundkast_wer": 0.1363347120585428,
473
+ "step": 14443
474
+ },
475
+ {
476
+ "epoch": 11.0,
477
+ "eval_nb_samtale_loss": 0.4296092391014099,
478
+ "eval_nb_samtale_runtime": 37.8422,
479
+ "eval_nb_samtale_samples_per_second": 14.085,
480
+ "eval_nb_samtale_steps_per_second": 0.449,
481
+ "eval_nb_samtale_wer": 0.184814453125,
482
+ "step": 14443
483
+ },
484
+ {
485
+ "epoch": 12.0,
486
+ "grad_norm": 3.582427740097046,
487
+ "learning_rate": 7.206781654253235e-05,
488
+ "loss": 0.2795,
489
+ "step": 15756
490
+ },
491
+ {
492
+ "epoch": 12.0,
493
+ "eval_bigbrother_loss": 2.00494122505188,
494
+ "eval_bigbrother_runtime": 41.5443,
495
+ "eval_bigbrother_samples_per_second": 32.977,
496
+ "eval_bigbrother_steps_per_second": 1.035,
497
+ "eval_bigbrother_wer": 0.5176025134939176,
498
+ "step": 15756
499
+ },
500
+ {
501
+ "epoch": 12.0,
502
+ "eval_NB_RUND_loss": 0.3187481760978699,
503
+ "eval_NB_RUND_runtime": 85.8022,
504
+ "eval_NB_RUND_samples_per_second": 21.888,
505
+ "eval_NB_RUND_steps_per_second": 0.688,
506
+ "eval_NB_RUND_wer": 0.15901045856798068,
507
+ "step": 15756
508
+ },
509
+ {
510
+ "epoch": 12.0,
511
+ "eval_rundkast_loss": 0.2752975821495056,
512
+ "eval_rundkast_runtime": 33.305,
513
+ "eval_rundkast_samples_per_second": 40.384,
514
+ "eval_rundkast_steps_per_second": 1.291,
515
+ "eval_rundkast_wer": 0.1350620426344257,
516
+ "step": 15756
517
+ },
518
+ {
519
+ "epoch": 12.0,
520
+ "eval_nb_samtale_loss": 0.42811375856399536,
521
+ "eval_nb_samtale_runtime": 37.713,
522
+ "eval_nb_samtale_samples_per_second": 14.133,
523
+ "eval_nb_samtale_steps_per_second": 0.451,
524
+ "eval_nb_samtale_wer": 0.18310546875,
525
+ "step": 15756
526
+ },
527
+ {
528
+ "epoch": 13.0,
529
+ "grad_norm": 4.836462020874023,
530
+ "learning_rate": 6.94982359858879e-05,
531
+ "loss": 0.2712,
532
+ "step": 17069
533
+ },
534
+ {
535
+ "epoch": 13.0,
536
+ "eval_bigbrother_loss": 1.9352320432662964,
537
+ "eval_bigbrother_runtime": 41.182,
538
+ "eval_bigbrother_samples_per_second": 33.267,
539
+ "eval_bigbrother_steps_per_second": 1.044,
540
+ "eval_bigbrother_wer": 0.5195359703536615,
541
+ "step": 17069
542
+ },
543
+ {
544
+ "epoch": 13.0,
545
+ "eval_NB_RUND_loss": 0.305833101272583,
546
+ "eval_NB_RUND_runtime": 84.7929,
547
+ "eval_NB_RUND_samples_per_second": 22.148,
548
+ "eval_NB_RUND_steps_per_second": 0.696,
549
+ "eval_NB_RUND_wer": 0.1578037007240547,
550
+ "step": 17069
551
+ },
552
+ {
553
+ "epoch": 13.0,
554
+ "eval_rundkast_loss": 0.26513412594795227,
555
+ "eval_rundkast_runtime": 32.9292,
556
+ "eval_rundkast_samples_per_second": 40.845,
557
+ "eval_rundkast_steps_per_second": 1.306,
558
+ "eval_rundkast_wer": 0.13434616608335984,
559
+ "step": 17069
560
+ },
561
+ {
562
+ "epoch": 13.0,
563
+ "eval_nb_samtale_loss": 0.4084182679653168,
564
+ "eval_nb_samtale_runtime": 37.2213,
565
+ "eval_nb_samtale_samples_per_second": 14.32,
566
+ "eval_nb_samtale_steps_per_second": 0.457,
567
+ "eval_nb_samtale_wer": 0.18131510416666666,
568
+ "step": 17069
569
+ },
570
+ {
571
+ "epoch": 14.0,
572
+ "grad_norm": 3.973949909210205,
573
+ "learning_rate": 6.692473539788319e-05,
574
+ "loss": 0.2655,
575
+ "step": 18382
576
+ },
577
+ {
578
+ "epoch": 14.0,
579
+ "eval_bigbrother_loss": 1.9412481784820557,
580
+ "eval_bigbrother_runtime": 41.0966,
581
+ "eval_bigbrother_samples_per_second": 33.336,
582
+ "eval_bigbrother_steps_per_second": 1.046,
583
+ "eval_bigbrother_wer": 0.5118021429146862,
584
+ "step": 18382
585
+ },
586
+ {
587
+ "epoch": 14.0,
588
+ "eval_NB_RUND_loss": 0.3095969557762146,
589
+ "eval_NB_RUND_runtime": 84.8936,
590
+ "eval_NB_RUND_samples_per_second": 22.122,
591
+ "eval_NB_RUND_steps_per_second": 0.695,
592
+ "eval_NB_RUND_wer": 0.15530973451327434,
593
+ "step": 18382
594
+ },
595
+ {
596
+ "epoch": 14.0,
597
+ "eval_rundkast_loss": 0.2749696373939514,
598
+ "eval_rundkast_runtime": 33.0503,
599
+ "eval_rundkast_samples_per_second": 40.696,
600
+ "eval_rundkast_steps_per_second": 1.301,
601
+ "eval_rundkast_wer": 0.1327553293032135,
602
+ "step": 18382
603
+ },
604
+ {
605
+ "epoch": 14.0,
606
+ "eval_nb_samtale_loss": 0.395680695772171,
607
+ "eval_nb_samtale_runtime": 37.1829,
608
+ "eval_nb_samtale_samples_per_second": 14.335,
609
+ "eval_nb_samtale_steps_per_second": 0.457,
610
+ "eval_nb_samtale_wer": 0.17789713541666666,
611
+ "step": 18382
612
+ },
613
+ {
614
+ "epoch": 15.0,
615
+ "grad_norm": 3.9156057834625244,
616
+ "learning_rate": 6.435123480987849e-05,
617
+ "loss": 0.2557,
618
+ "step": 19695
619
+ },
620
+ {
621
+ "epoch": 15.0,
622
+ "eval_bigbrother_loss": 1.9280781745910645,
623
+ "eval_bigbrother_runtime": 42.2614,
624
+ "eval_bigbrother_samples_per_second": 32.417,
625
+ "eval_bigbrother_steps_per_second": 1.017,
626
+ "eval_bigbrother_wer": 0.5109159751873036,
627
+ "step": 19695
628
+ },
629
+ {
630
+ "epoch": 15.0,
631
+ "eval_NB_RUND_loss": 0.2862784266471863,
632
+ "eval_NB_RUND_runtime": 85.5508,
633
+ "eval_NB_RUND_samples_per_second": 21.952,
634
+ "eval_NB_RUND_steps_per_second": 0.69,
635
+ "eval_NB_RUND_wer": 0.1563958165728077,
636
+ "step": 19695
637
+ },
638
+ {
639
+ "epoch": 15.0,
640
+ "eval_rundkast_loss": 0.2504226267337799,
641
+ "eval_rundkast_runtime": 33.4378,
642
+ "eval_rundkast_samples_per_second": 40.224,
643
+ "eval_rundkast_steps_per_second": 1.286,
644
+ "eval_rundkast_wer": 0.13426662424435254,
645
+ "step": 19695
646
+ },
647
+ {
648
+ "epoch": 15.0,
649
+ "eval_nb_samtale_loss": 0.37682539224624634,
650
+ "eval_nb_samtale_runtime": 37.3809,
651
+ "eval_nb_samtale_samples_per_second": 14.259,
652
+ "eval_nb_samtale_steps_per_second": 0.455,
653
+ "eval_nb_samtale_wer": 0.17887369791666666,
654
+ "step": 19695
655
+ },
656
+ {
657
+ "epoch": 16.0,
658
+ "grad_norm": 4.767539024353027,
659
+ "learning_rate": 6.17796942375539e-05,
660
+ "loss": 0.2497,
661
+ "step": 21008
662
+ },
663
+ {
664
+ "epoch": 16.0,
665
+ "eval_bigbrother_loss": 2.0177502632141113,
666
+ "eval_bigbrother_runtime": 41.1529,
667
+ "eval_bigbrother_samples_per_second": 33.291,
668
+ "eval_bigbrother_steps_per_second": 1.045,
669
+ "eval_bigbrother_wer": 0.5074518649802626,
670
+ "step": 21008
671
+ },
672
+ {
673
+ "epoch": 16.0,
674
+ "eval_NB_RUND_loss": 0.32014137506484985,
675
+ "eval_NB_RUND_runtime": 85.0566,
676
+ "eval_NB_RUND_samples_per_second": 22.079,
677
+ "eval_NB_RUND_steps_per_second": 0.694,
678
+ "eval_NB_RUND_wer": 0.15522928399034594,
679
+ "step": 21008
680
+ },
681
+ {
682
+ "epoch": 16.0,
683
+ "eval_rundkast_loss": 0.28833797574043274,
684
+ "eval_rundkast_runtime": 33.0799,
685
+ "eval_rundkast_samples_per_second": 40.659,
686
+ "eval_rundkast_steps_per_second": 1.3,
687
+ "eval_rundkast_wer": 0.13219853643016227,
688
+ "step": 21008
689
+ },
690
+ {
691
+ "epoch": 16.0,
692
+ "eval_nb_samtale_loss": 0.40036484599113464,
693
+ "eval_nb_samtale_runtime": 37.391,
694
+ "eval_nb_samtale_samples_per_second": 14.255,
695
+ "eval_nb_samtale_steps_per_second": 0.455,
696
+ "eval_nb_samtale_wer": 0.17838541666666666,
697
+ "step": 21008
698
+ },
699
+ {
700
+ "epoch": 17.0,
701
+ "grad_norm": 2.875094413757324,
702
+ "learning_rate": 5.9206193649549204e-05,
703
+ "loss": 0.2424,
704
+ "step": 22321
705
+ },
706
+ {
707
+ "epoch": 17.0,
708
+ "eval_bigbrother_loss": 2.1849660873413086,
709
+ "eval_bigbrother_runtime": 41.489,
710
+ "eval_bigbrother_samples_per_second": 33.021,
711
+ "eval_bigbrother_steps_per_second": 1.036,
712
+ "eval_bigbrother_wer": 0.51494401031177,
713
+ "step": 22321
714
+ },
715
+ {
716
+ "epoch": 17.0,
717
+ "eval_NB_RUND_loss": 0.32200556993484497,
718
+ "eval_NB_RUND_runtime": 85.425,
719
+ "eval_NB_RUND_samples_per_second": 21.984,
720
+ "eval_NB_RUND_steps_per_second": 0.691,
721
+ "eval_NB_RUND_wer": 0.15289621882542237,
722
+ "step": 22321
723
+ },
724
+ {
725
+ "epoch": 17.0,
726
+ "eval_rundkast_loss": 0.28678199648857117,
727
+ "eval_rundkast_runtime": 33.351,
728
+ "eval_rundkast_samples_per_second": 40.329,
729
+ "eval_rundkast_steps_per_second": 1.289,
730
+ "eval_rundkast_wer": 0.13180082723512568,
731
+ "step": 22321
732
+ },
733
+ {
734
+ "epoch": 17.0,
735
+ "eval_nb_samtale_loss": 0.4106721878051758,
736
+ "eval_nb_samtale_runtime": 37.5903,
737
+ "eval_nb_samtale_samples_per_second": 14.179,
738
+ "eval_nb_samtale_steps_per_second": 0.452,
739
+ "eval_nb_samtale_wer": 0.17415364583333334,
740
+ "step": 22321
741
+ },
742
+ {
743
+ "epoch": 18.0,
744
+ "grad_norm": 4.949888229370117,
745
+ "learning_rate": 5.66326930615445e-05,
746
+ "loss": 0.2374,
747
+ "step": 23634
748
+ },
749
+ {
750
+ "epoch": 18.0,
751
+ "eval_bigbrother_loss": 2.1056079864501953,
752
+ "eval_bigbrother_runtime": 41.6333,
753
+ "eval_bigbrother_samples_per_second": 32.906,
754
+ "eval_bigbrother_steps_per_second": 1.033,
755
+ "eval_bigbrother_wer": 0.5105937323773463,
756
+ "step": 23634
757
+ },
758
+ {
759
+ "epoch": 18.0,
760
+ "eval_NB_RUND_loss": 0.32035475969314575,
761
+ "eval_NB_RUND_runtime": 87.7221,
762
+ "eval_NB_RUND_samples_per_second": 21.409,
763
+ "eval_NB_RUND_steps_per_second": 0.673,
764
+ "eval_NB_RUND_wer": 0.15329847144006437,
765
+ "step": 23634
766
+ },
767
+ {
768
+ "epoch": 18.0,
769
+ "eval_rundkast_loss": 0.29013389348983765,
770
+ "eval_rundkast_runtime": 33.2901,
771
+ "eval_rundkast_samples_per_second": 40.402,
772
+ "eval_rundkast_steps_per_second": 1.292,
773
+ "eval_rundkast_wer": 0.13005090677696468,
774
+ "step": 23634
775
+ },
776
+ {
777
+ "epoch": 18.0,
778
+ "eval_nb_samtale_loss": 0.39669641852378845,
779
+ "eval_nb_samtale_runtime": 38.5507,
780
+ "eval_nb_samtale_samples_per_second": 13.826,
781
+ "eval_nb_samtale_steps_per_second": 0.441,
782
+ "eval_nb_samtale_wer": 0.17724609375,
783
+ "step": 23634
784
+ },
785
+ {
786
+ "epoch": 19.0,
787
+ "grad_norm": 5.62293004989624,
788
+ "learning_rate": 5.4063112504900036e-05,
789
+ "loss": 0.2313,
790
+ "step": 24947
791
+ },
792
+ {
793
+ "epoch": 19.0,
794
+ "eval_bigbrother_loss": 2.0347869396209717,
795
+ "eval_bigbrother_runtime": 41.8028,
796
+ "eval_bigbrother_samples_per_second": 32.773,
797
+ "eval_bigbrother_steps_per_second": 1.029,
798
+ "eval_bigbrother_wer": 0.503584951260775,
799
+ "step": 24947
800
+ },
801
+ {
802
+ "epoch": 19.0,
803
+ "eval_NB_RUND_loss": 0.3127482533454895,
804
+ "eval_NB_RUND_runtime": 85.6096,
805
+ "eval_NB_RUND_samples_per_second": 21.937,
806
+ "eval_NB_RUND_steps_per_second": 0.689,
807
+ "eval_NB_RUND_wer": 0.15100563153660498,
808
+ "step": 24947
809
+ },
810
+ {
811
+ "epoch": 19.0,
812
+ "eval_rundkast_loss": 0.2715360224246979,
813
+ "eval_rundkast_runtime": 33.3269,
814
+ "eval_rundkast_samples_per_second": 40.358,
815
+ "eval_rundkast_steps_per_second": 1.29,
816
+ "eval_rundkast_wer": 0.1292554883868915,
817
+ "step": 24947
818
+ },
819
+ {
820
+ "epoch": 19.0,
821
+ "eval_nb_samtale_loss": 0.4169124960899353,
822
+ "eval_nb_samtale_runtime": 37.555,
823
+ "eval_nb_samtale_samples_per_second": 14.193,
824
+ "eval_nb_samtale_steps_per_second": 0.453,
825
+ "eval_nb_samtale_wer": 0.17350260416666666,
826
+ "step": 24947
827
+ },
828
+ {
829
+ "epoch": 20.0,
830
+ "grad_norm": 2.86464262008667,
831
+ "learning_rate": 5.148961191689534e-05,
832
+ "loss": 0.2227,
833
+ "step": 26260
834
+ },
835
+ {
836
+ "epoch": 20.0,
837
+ "eval_bigbrother_loss": 2.1283321380615234,
838
+ "eval_bigbrother_runtime": 41.6851,
839
+ "eval_bigbrother_samples_per_second": 32.865,
840
+ "eval_bigbrother_steps_per_second": 1.032,
841
+ "eval_bigbrother_wer": 0.5016514944010312,
842
+ "step": 26260
843
+ },
844
+ {
845
+ "epoch": 20.0,
846
+ "eval_NB_RUND_loss": 0.3282929062843323,
847
+ "eval_NB_RUND_runtime": 86.0672,
848
+ "eval_NB_RUND_samples_per_second": 21.82,
849
+ "eval_NB_RUND_steps_per_second": 0.686,
850
+ "eval_NB_RUND_wer": 0.15160901045856798,
851
+ "step": 26260
852
+ },
853
+ {
854
+ "epoch": 20.0,
855
+ "eval_rundkast_loss": 0.2934817969799042,
856
+ "eval_rundkast_runtime": 33.1568,
857
+ "eval_rundkast_samples_per_second": 40.565,
858
+ "eval_rundkast_steps_per_second": 1.297,
859
+ "eval_rundkast_wer": 0.1306076996500159,
860
+ "step": 26260
861
+ },
862
+ {
863
+ "epoch": 20.0,
864
+ "eval_nb_samtale_loss": 0.41641756892204285,
865
+ "eval_nb_samtale_runtime": 37.7201,
866
+ "eval_nb_samtale_samples_per_second": 14.13,
867
+ "eval_nb_samtale_steps_per_second": 0.451,
868
+ "eval_nb_samtale_wer": 0.17301432291666666,
869
+ "step": 26260
870
+ },
871
+ {
872
+ "epoch": 21.0,
873
+ "grad_norm": 3.0193545818328857,
874
+ "learning_rate": 4.8916111328890636e-05,
875
+ "loss": 0.2214,
876
+ "step": 27573
877
+ },
878
+ {
879
+ "epoch": 21.0,
880
+ "eval_bigbrother_loss": 2.2207038402557373,
881
+ "eval_bigbrother_runtime": 41.5705,
882
+ "eval_bigbrother_samples_per_second": 32.956,
883
+ "eval_bigbrother_steps_per_second": 1.034,
884
+ "eval_bigbrother_wer": 0.50656569725288,
885
+ "step": 27573
886
+ },
887
+ {
888
+ "epoch": 21.0,
889
+ "eval_NB_RUND_loss": 0.32080039381980896,
890
+ "eval_NB_RUND_runtime": 85.8822,
891
+ "eval_NB_RUND_samples_per_second": 21.867,
892
+ "eval_NB_RUND_steps_per_second": 0.687,
893
+ "eval_NB_RUND_wer": 0.1498793242156074,
894
+ "step": 27573
895
+ },
896
+ {
897
+ "epoch": 21.0,
898
+ "eval_rundkast_loss": 0.28392452001571655,
899
+ "eval_rundkast_runtime": 33.3668,
900
+ "eval_rundkast_samples_per_second": 40.31,
901
+ "eval_rundkast_steps_per_second": 1.289,
902
+ "eval_rundkast_wer": 0.1287782373528476,
903
+ "step": 27573
904
+ },
905
+ {
906
+ "epoch": 21.0,
907
+ "eval_nb_samtale_loss": 0.4135919511318207,
908
+ "eval_nb_samtale_runtime": 37.8061,
909
+ "eval_nb_samtale_samples_per_second": 14.098,
910
+ "eval_nb_samtale_steps_per_second": 0.45,
911
+ "eval_nb_samtale_wer": 0.17097981770833334,
912
+ "step": 27573
913
+ },
914
+ {
915
+ "epoch": 22.0,
916
+ "grad_norm": 3.5664854049682617,
917
+ "learning_rate": 4.6344570756566055e-05,
918
+ "loss": 0.2162,
919
+ "step": 28886
920
+ },
921
+ {
922
+ "epoch": 22.0,
923
+ "eval_bigbrother_loss": 2.1843111515045166,
924
+ "eval_bigbrother_runtime": 40.759,
925
+ "eval_bigbrother_samples_per_second": 33.612,
926
+ "eval_bigbrother_steps_per_second": 1.055,
927
+ "eval_bigbrother_wer": 0.5060823330379441,
928
+ "step": 28886
929
+ },
930
+ {
931
+ "epoch": 22.0,
932
+ "eval_NB_RUND_loss": 0.33185243606567383,
933
+ "eval_NB_RUND_runtime": 84.3565,
934
+ "eval_NB_RUND_samples_per_second": 22.263,
935
+ "eval_NB_RUND_steps_per_second": 0.699,
936
+ "eval_NB_RUND_wer": 0.1508447304907482,
937
+ "step": 28886
938
+ },
939
+ {
940
+ "epoch": 22.0,
941
+ "eval_rundkast_loss": 0.2995389699935913,
942
+ "eval_rundkast_runtime": 32.6931,
943
+ "eval_rundkast_samples_per_second": 41.14,
944
+ "eval_rundkast_steps_per_second": 1.315,
945
+ "eval_rundkast_wer": 0.12869869551384028,
946
+ "step": 28886
947
+ },
948
+ {
949
+ "epoch": 22.0,
950
+ "eval_nb_samtale_loss": 0.4128870964050293,
951
+ "eval_nb_samtale_runtime": 36.8088,
952
+ "eval_nb_samtale_samples_per_second": 14.48,
953
+ "eval_nb_samtale_steps_per_second": 0.462,
954
+ "eval_nb_samtale_wer": 0.173828125,
955
+ "step": 28886
956
+ },
957
+ {
958
+ "epoch": 23.0,
959
+ "grad_norm": 4.249322414398193,
960
+ "learning_rate": 4.37749901999216e-05,
961
+ "loss": 0.2125,
962
+ "step": 30199
963
+ },
964
+ {
965
+ "epoch": 23.0,
966
+ "eval_bigbrother_loss": 2.460817575454712,
967
+ "eval_bigbrother_runtime": 40.6315,
968
+ "eval_bigbrother_samples_per_second": 33.718,
969
+ "eval_bigbrother_steps_per_second": 1.058,
970
+ "eval_bigbrother_wer": 0.5047128010956256,
971
+ "step": 30199
972
+ },
973
+ {
974
+ "epoch": 23.0,
975
+ "eval_NB_RUND_loss": 0.34763312339782715,
976
+ "eval_NB_RUND_runtime": 84.2518,
977
+ "eval_NB_RUND_samples_per_second": 22.29,
978
+ "eval_NB_RUND_steps_per_second": 0.7,
979
+ "eval_NB_RUND_wer": 0.14774738535800483,
980
+ "step": 30199
981
+ },
982
+ {
983
+ "epoch": 23.0,
984
+ "eval_rundkast_loss": 0.3159354031085968,
985
+ "eval_rundkast_runtime": 32.702,
986
+ "eval_rundkast_samples_per_second": 41.129,
987
+ "eval_rundkast_steps_per_second": 1.315,
988
+ "eval_rundkast_wer": 0.12758510976773782,
989
+ "step": 30199
990
+ },
991
+ {
992
+ "epoch": 23.0,
993
+ "eval_nb_samtale_loss": 0.42722073197364807,
994
+ "eval_nb_samtale_runtime": 36.8013,
995
+ "eval_nb_samtale_samples_per_second": 14.483,
996
+ "eval_nb_samtale_steps_per_second": 0.462,
997
+ "eval_nb_samtale_wer": 0.16796875,
998
+ "step": 30199
999
+ },
1000
+ {
1001
+ "epoch": 24.0,
1002
+ "grad_norm": 3.8950681686401367,
1003
+ "learning_rate": 4.12014896119169e-05,
1004
+ "loss": 0.2054,
1005
+ "step": 31512
1006
+ },
1007
+ {
1008
+ "epoch": 24.0,
1009
+ "eval_bigbrother_loss": 2.2196638584136963,
1010
+ "eval_bigbrother_runtime": 41.3959,
1011
+ "eval_bigbrother_samples_per_second": 33.095,
1012
+ "eval_bigbrother_steps_per_second": 1.039,
1013
+ "eval_bigbrother_wer": 0.4956900024168211,
1014
+ "step": 31512
1015
+ },
1016
+ {
1017
+ "epoch": 24.0,
1018
+ "eval_NB_RUND_loss": 0.31967854499816895,
1019
+ "eval_NB_RUND_runtime": 85.4915,
1020
+ "eval_NB_RUND_samples_per_second": 21.967,
1021
+ "eval_NB_RUND_steps_per_second": 0.69,
1022
+ "eval_NB_RUND_wer": 0.14903459372485922,
1023
+ "step": 31512
1024
+ },
1025
+ {
1026
+ "epoch": 24.0,
1027
+ "eval_rundkast_loss": 0.28724637627601624,
1028
+ "eval_rundkast_runtime": 32.6772,
1029
+ "eval_rundkast_samples_per_second": 41.16,
1030
+ "eval_rundkast_steps_per_second": 1.316,
1031
+ "eval_rundkast_wer": 0.12726694241170855,
1032
+ "step": 31512
1033
+ },
1034
+ {
1035
+ "epoch": 24.0,
1036
+ "eval_nb_samtale_loss": 0.401165634393692,
1037
+ "eval_nb_samtale_runtime": 37.0545,
1038
+ "eval_nb_samtale_samples_per_second": 14.384,
1039
+ "eval_nb_samtale_steps_per_second": 0.459,
1040
+ "eval_nb_samtale_wer": 0.17106119791666666,
1041
+ "step": 31512
1042
+ },
1043
+ {
1044
+ "epoch": 25.0,
1045
+ "grad_norm": 3.483825922012329,
1046
+ "learning_rate": 3.8627989023912194e-05,
1047
+ "loss": 0.2016,
1048
+ "step": 32825
1049
+ },
1050
+ {
1051
+ "epoch": 25.0,
1052
+ "eval_bigbrother_loss": 2.3513925075531006,
1053
+ "eval_bigbrother_runtime": 40.9522,
1054
+ "eval_bigbrother_samples_per_second": 33.454,
1055
+ "eval_bigbrother_steps_per_second": 1.05,
1056
+ "eval_bigbrother_wer": 0.5014098122935632,
1057
+ "step": 32825
1058
+ },
1059
+ {
1060
+ "epoch": 25.0,
1061
+ "eval_NB_RUND_loss": 0.3408574163913727,
1062
+ "eval_NB_RUND_runtime": 84.79,
1063
+ "eval_NB_RUND_samples_per_second": 22.149,
1064
+ "eval_NB_RUND_steps_per_second": 0.696,
1065
+ "eval_NB_RUND_wer": 0.14714400643604184,
1066
+ "step": 32825
1067
+ },
1068
+ {
1069
+ "epoch": 25.0,
1070
+ "eval_rundkast_loss": 0.3113822042942047,
1071
+ "eval_rundkast_runtime": 33.2613,
1072
+ "eval_rundkast_samples_per_second": 40.437,
1073
+ "eval_rundkast_steps_per_second": 1.293,
1074
+ "eval_rundkast_wer": 0.12599427298759147,
1075
+ "step": 32825
1076
+ },
1077
+ {
1078
+ "epoch": 25.0,
1079
+ "eval_nb_samtale_loss": 0.41482433676719666,
1080
+ "eval_nb_samtale_runtime": 36.9383,
1081
+ "eval_nb_samtale_samples_per_second": 14.429,
1082
+ "eval_nb_samtale_steps_per_second": 0.46,
1083
+ "eval_nb_samtale_wer": 0.16861979166666666,
1084
+ "step": 32825
1085
+ },
1086
+ {
1087
+ "epoch": 26.0,
1088
+ "grad_norm": 5.882791042327881,
1089
+ "learning_rate": 3.6054488435907494e-05,
1090
+ "loss": 0.1986,
1091
+ "step": 34138
1092
+ },
1093
+ {
1094
+ "epoch": 26.0,
1095
+ "eval_bigbrother_loss": 2.2447853088378906,
1096
+ "eval_bigbrother_runtime": 41.1993,
1097
+ "eval_bigbrother_samples_per_second": 33.253,
1098
+ "eval_bigbrother_steps_per_second": 1.044,
1099
+ "eval_bigbrother_wer": 0.4957705631193104,
1100
+ "step": 34138
1101
+ },
1102
+ {
1103
+ "epoch": 26.0,
1104
+ "eval_NB_RUND_loss": 0.3391417860984802,
1105
+ "eval_NB_RUND_runtime": 86.9591,
1106
+ "eval_NB_RUND_samples_per_second": 21.596,
1107
+ "eval_NB_RUND_steps_per_second": 0.678,
1108
+ "eval_NB_RUND_wer": 0.14734513274336283,
1109
+ "step": 34138
1110
+ },
1111
+ {
1112
+ "epoch": 26.0,
1113
+ "eval_rundkast_loss": 0.3099968433380127,
1114
+ "eval_rundkast_runtime": 32.6851,
1115
+ "eval_rundkast_samples_per_second": 41.15,
1116
+ "eval_rundkast_steps_per_second": 1.316,
1117
+ "eval_rundkast_wer": 0.12575564747056953,
1118
+ "step": 34138
1119
+ },
1120
+ {
1121
+ "epoch": 26.0,
1122
+ "eval_nb_samtale_loss": 0.41232603788375854,
1123
+ "eval_nb_samtale_runtime": 37.0492,
1124
+ "eval_nb_samtale_samples_per_second": 14.386,
1125
+ "eval_nb_samtale_steps_per_second": 0.459,
1126
+ "eval_nb_samtale_wer": 0.16951497395833334,
1127
+ "step": 34138
1128
+ },
1129
+ {
1130
+ "epoch": 27.0,
1131
+ "grad_norm": 2.837421178817749,
1132
+ "learning_rate": 3.348098784790279e-05,
1133
+ "loss": 0.1953,
1134
+ "step": 35451
1135
+ },
1136
+ {
1137
+ "epoch": 27.0,
1138
+ "eval_bigbrother_loss": 2.240429401397705,
1139
+ "eval_bigbrother_runtime": 41.126,
1140
+ "eval_bigbrother_samples_per_second": 33.312,
1141
+ "eval_bigbrother_steps_per_second": 1.046,
1142
+ "eval_bigbrother_wer": 0.49552888101184245,
1143
+ "step": 35451
1144
+ },
1145
+ {
1146
+ "epoch": 27.0,
1147
+ "eval_NB_RUND_loss": 0.3420700132846832,
1148
+ "eval_NB_RUND_runtime": 84.1024,
1149
+ "eval_NB_RUND_samples_per_second": 22.33,
1150
+ "eval_NB_RUND_steps_per_second": 0.702,
1151
+ "eval_NB_RUND_wer": 0.14738535800482702,
1152
+ "step": 35451
1153
+ },
1154
+ {
1155
+ "epoch": 27.0,
1156
+ "eval_rundkast_loss": 0.3145124018192291,
1157
+ "eval_rundkast_runtime": 32.9045,
1158
+ "eval_rundkast_samples_per_second": 40.876,
1159
+ "eval_rundkast_steps_per_second": 1.307,
1160
+ "eval_rundkast_wer": 0.12591473114858415,
1161
+ "step": 35451
1162
+ },
1163
+ {
1164
+ "epoch": 27.0,
1165
+ "eval_nb_samtale_loss": 0.4117932617664337,
1166
+ "eval_nb_samtale_runtime": 36.8227,
1167
+ "eval_nb_samtale_samples_per_second": 14.475,
1168
+ "eval_nb_samtale_steps_per_second": 0.462,
1169
+ "eval_nb_samtale_wer": 0.16943359375,
1170
+ "step": 35451
1171
+ },
1172
+ {
1173
+ "epoch": 28.0,
1174
+ "grad_norm": 3.3868250846862793,
1175
+ "learning_rate": 3.0909447275578206e-05,
1176
+ "loss": 0.1939,
1177
+ "step": 36764
1178
+ },
1179
+ {
1180
+ "epoch": 28.0,
1181
+ "eval_bigbrother_loss": 2.409648895263672,
1182
+ "eval_bigbrother_runtime": 41.1082,
1183
+ "eval_bigbrother_samples_per_second": 33.327,
1184
+ "eval_bigbrother_steps_per_second": 1.046,
1185
+ "eval_bigbrother_wer": 0.49536775960686374,
1186
+ "step": 36764
1187
+ },
1188
+ {
1189
+ "epoch": 28.0,
1190
+ "eval_NB_RUND_loss": 0.34609299898147583,
1191
+ "eval_NB_RUND_runtime": 86.3212,
1192
+ "eval_NB_RUND_samples_per_second": 21.756,
1193
+ "eval_NB_RUND_steps_per_second": 0.683,
1194
+ "eval_NB_RUND_wer": 0.14569589702333066,
1195
+ "step": 36764
1196
+ },
1197
+ {
1198
+ "epoch": 28.0,
1199
+ "eval_rundkast_loss": 0.31334388256073,
1200
+ "eval_rundkast_runtime": 32.8372,
1201
+ "eval_rundkast_samples_per_second": 40.96,
1202
+ "eval_rundkast_steps_per_second": 1.309,
1203
+ "eval_rundkast_wer": 0.12368755965637926,
1204
+ "step": 36764
1205
+ },
1206
+ {
1207
+ "epoch": 28.0,
1208
+ "eval_nb_samtale_loss": 0.42863455414772034,
1209
+ "eval_nb_samtale_runtime": 37.033,
1210
+ "eval_nb_samtale_samples_per_second": 14.393,
1211
+ "eval_nb_samtale_steps_per_second": 0.459,
1212
+ "eval_nb_samtale_wer": 0.16829427083333334,
1213
+ "step": 36764
1214
+ },
1215
+ {
1216
+ "epoch": 29.0,
1217
+ "grad_norm": 3.9038360118865967,
1218
+ "learning_rate": 2.8337906703253626e-05,
1219
+ "loss": 0.1883,
1220
+ "step": 38077
1221
+ },
1222
+ {
1223
+ "epoch": 29.0,
1224
+ "eval_bigbrother_loss": 2.32140851020813,
1225
+ "eval_bigbrother_runtime": 40.9774,
1226
+ "eval_bigbrother_samples_per_second": 33.433,
1227
+ "eval_bigbrother_steps_per_second": 1.049,
1228
+ "eval_bigbrother_wer": 0.4960122452267784,
1229
+ "step": 38077
1230
+ },
1231
+ {
1232
+ "epoch": 29.0,
1233
+ "eval_NB_RUND_loss": 0.35102906823158264,
1234
+ "eval_NB_RUND_runtime": 84.5748,
1235
+ "eval_NB_RUND_samples_per_second": 22.205,
1236
+ "eval_NB_RUND_steps_per_second": 0.698,
1237
+ "eval_NB_RUND_wer": 0.14646017699115044,
1238
+ "step": 38077
1239
+ },
1240
+ {
1241
+ "epoch": 29.0,
1242
+ "eval_rundkast_loss": 0.31922492384910583,
1243
+ "eval_rundkast_runtime": 32.7767,
1244
+ "eval_rundkast_samples_per_second": 41.035,
1245
+ "eval_rundkast_steps_per_second": 1.312,
1246
+ "eval_rundkast_wer": 0.12496022908049634,
1247
+ "step": 38077
1248
+ },
1249
+ {
1250
+ "epoch": 29.0,
1251
+ "eval_nb_samtale_loss": 0.4308791756629944,
1252
+ "eval_nb_samtale_runtime": 36.5599,
1253
+ "eval_nb_samtale_samples_per_second": 14.579,
1254
+ "eval_nb_samtale_steps_per_second": 0.465,
1255
+ "eval_nb_samtale_wer": 0.167724609375,
1256
+ "step": 38077
1257
+ },
1258
+ {
1259
+ "epoch": 30.0,
1260
+ "grad_norm": 2.5224320888519287,
1261
+ "learning_rate": 2.5764406115248922e-05,
1262
+ "loss": 0.1857,
1263
+ "step": 39390
1264
+ },
1265
+ {
1266
+ "epoch": 30.0,
1267
+ "eval_bigbrother_loss": 2.3643717765808105,
1268
+ "eval_bigbrother_runtime": 41.8962,
1269
+ "eval_bigbrother_samples_per_second": 32.7,
1270
+ "eval_bigbrother_steps_per_second": 1.026,
1271
+ "eval_bigbrother_wer": 0.496415048739225,
1272
+ "step": 39390
1273
+ },
1274
+ {
1275
+ "epoch": 30.0,
1276
+ "eval_NB_RUND_loss": 0.3557446002960205,
1277
+ "eval_NB_RUND_runtime": 143.0815,
1278
+ "eval_NB_RUND_samples_per_second": 13.125,
1279
+ "eval_NB_RUND_steps_per_second": 0.412,
1280
+ "eval_NB_RUND_wer": 0.14617860016090103,
1281
+ "step": 39390
1282
+ },
1283
+ {
1284
+ "epoch": 30.0,
1285
+ "eval_rundkast_loss": 0.3290172815322876,
1286
+ "eval_rundkast_runtime": 32.9137,
1287
+ "eval_rundkast_samples_per_second": 40.864,
1288
+ "eval_rundkast_steps_per_second": 1.306,
1289
+ "eval_rundkast_wer": 0.12623289850461342,
1290
+ "step": 39390
1291
+ },
1292
+ {
1293
+ "epoch": 30.0,
1294
+ "eval_nb_samtale_loss": 0.42328643798828125,
1295
+ "eval_nb_samtale_runtime": 36.9569,
1296
+ "eval_nb_samtale_samples_per_second": 14.422,
1297
+ "eval_nb_samtale_steps_per_second": 0.46,
1298
+ "eval_nb_samtale_wer": 0.16715494791666666,
1299
+ "step": 39390
1300
+ },
1301
+ {
1302
+ "epoch": 31.0,
1303
+ "grad_norm": 8.252937316894531,
1304
+ "learning_rate": 2.319090552724422e-05,
1305
+ "loss": 0.184,
1306
+ "step": 40703
1307
+ },
1308
+ {
1309
+ "epoch": 31.0,
1310
+ "eval_bigbrother_loss": 2.3923449516296387,
1311
+ "eval_bigbrother_runtime": 41.1367,
1312
+ "eval_bigbrother_samples_per_second": 33.304,
1313
+ "eval_bigbrother_steps_per_second": 1.045,
1314
+ "eval_bigbrother_wer": 0.4919036493998228,
1315
+ "step": 40703
1316
+ },
1317
+ {
1318
+ "epoch": 31.0,
1319
+ "eval_NB_RUND_loss": 0.35123324394226074,
1320
+ "eval_NB_RUND_runtime": 85.5059,
1321
+ "eval_NB_RUND_samples_per_second": 21.963,
1322
+ "eval_NB_RUND_steps_per_second": 0.69,
1323
+ "eval_NB_RUND_wer": 0.14473049074818986,
1324
+ "step": 40703
1325
+ },
1326
+ {
1327
+ "epoch": 31.0,
1328
+ "eval_rundkast_loss": 0.31798994541168213,
1329
+ "eval_rundkast_runtime": 33.3879,
1330
+ "eval_rundkast_samples_per_second": 40.284,
1331
+ "eval_rundkast_steps_per_second": 1.288,
1332
+ "eval_rundkast_wer": 0.1243238943684378,
1333
+ "step": 40703
1334
+ },
1335
+ {
1336
+ "epoch": 31.0,
1337
+ "eval_nb_samtale_loss": 0.4355938732624054,
1338
+ "eval_nb_samtale_runtime": 37.0847,
1339
+ "eval_nb_samtale_samples_per_second": 14.372,
1340
+ "eval_nb_samtale_steps_per_second": 0.458,
1341
+ "eval_nb_samtale_wer": 0.16536458333333334,
1342
+ "step": 40703
1343
+ },
1344
+ {
1345
+ "epoch": 32.0,
1346
+ "grad_norm": 4.856067657470703,
1347
+ "learning_rate": 2.061936495491964e-05,
1348
+ "loss": 0.1795,
1349
+ "step": 42016
1350
+ },
1351
+ {
1352
+ "epoch": 32.0,
1353
+ "eval_bigbrother_loss": 2.3352065086364746,
1354
+ "eval_bigbrother_runtime": 41.1475,
1355
+ "eval_bigbrother_samples_per_second": 33.295,
1356
+ "eval_bigbrother_steps_per_second": 1.045,
1357
+ "eval_bigbrother_wer": 0.4930314992346733,
1358
+ "step": 42016
1359
+ },
1360
+ {
1361
+ "epoch": 32.0,
1362
+ "eval_NB_RUND_loss": 0.3572410047054291,
1363
+ "eval_NB_RUND_runtime": 105.2885,
1364
+ "eval_NB_RUND_samples_per_second": 17.837,
1365
+ "eval_NB_RUND_steps_per_second": 0.56,
1366
+ "eval_NB_RUND_wer": 0.14477071600965405,
1367
+ "step": 42016
1368
+ },
1369
+ {
1370
+ "epoch": 32.0,
1371
+ "eval_rundkast_loss": 0.32713791728019714,
1372
+ "eval_rundkast_runtime": 33.0242,
1373
+ "eval_rundkast_samples_per_second": 40.728,
1374
+ "eval_rundkast_steps_per_second": 1.302,
1375
+ "eval_rundkast_wer": 0.1243238943684378,
1376
+ "step": 42016
1377
+ },
1378
+ {
1379
+ "epoch": 32.0,
1380
+ "eval_nb_samtale_loss": 0.43330347537994385,
1381
+ "eval_nb_samtale_runtime": 36.808,
1382
+ "eval_nb_samtale_samples_per_second": 14.481,
1383
+ "eval_nb_samtale_steps_per_second": 0.462,
1384
+ "eval_nb_samtale_wer": 0.16560872395833334,
1385
+ "step": 42016
1386
+ },
1387
+ {
1388
+ "epoch": 33.0,
1389
+ "grad_norm": 5.330729961395264,
1390
+ "learning_rate": 1.8045864366914934e-05,
1391
+ "loss": 0.1797,
1392
+ "step": 43329
1393
+ },
1394
+ {
1395
+ "epoch": 33.0,
1396
+ "eval_bigbrother_loss": 2.4726736545562744,
1397
+ "eval_bigbrother_runtime": 41.9313,
1398
+ "eval_bigbrother_samples_per_second": 32.672,
1399
+ "eval_bigbrother_steps_per_second": 1.025,
1400
+ "eval_bigbrother_wer": 0.4939982276645452,
1401
+ "step": 43329
1402
+ },
1403
+ {
1404
+ "epoch": 33.0,
1405
+ "eval_NB_RUND_loss": 0.36341410875320435,
1406
+ "eval_NB_RUND_runtime": 84.523,
1407
+ "eval_NB_RUND_samples_per_second": 22.219,
1408
+ "eval_NB_RUND_steps_per_second": 0.698,
1409
+ "eval_NB_RUND_wer": 0.14710378117457765,
1410
+ "step": 43329
1411
+ },
1412
+ {
1413
+ "epoch": 33.0,
1414
+ "eval_rundkast_loss": 0.3344702422618866,
1415
+ "eval_rundkast_runtime": 32.8493,
1416
+ "eval_rundkast_samples_per_second": 40.945,
1417
+ "eval_rundkast_steps_per_second": 1.309,
1418
+ "eval_rundkast_wer": 0.12750556792873052,
1419
+ "step": 43329
1420
+ },
1421
+ {
1422
+ "epoch": 33.0,
1423
+ "eval_nb_samtale_loss": 0.43638789653778076,
1424
+ "eval_nb_samtale_runtime": 36.859,
1425
+ "eval_nb_samtale_samples_per_second": 14.46,
1426
+ "eval_nb_samtale_steps_per_second": 0.461,
1427
+ "eval_nb_samtale_wer": 0.16756184895833334,
1428
+ "step": 43329
1429
+ },
1430
+ {
1431
+ "epoch": 34.0,
1432
+ "grad_norm": 2.472703218460083,
1433
+ "learning_rate": 1.5474323794590357e-05,
1434
+ "loss": 0.1792,
1435
+ "step": 44642
1436
+ },
1437
+ {
1438
+ "epoch": 34.0,
1439
+ "eval_bigbrother_loss": 2.369781255722046,
1440
+ "eval_bigbrother_runtime": 41.5232,
1441
+ "eval_bigbrother_samples_per_second": 32.994,
1442
+ "eval_bigbrother_steps_per_second": 1.036,
1443
+ "eval_bigbrother_wer": 0.4910980423749295,
1444
+ "step": 44642
1445
+ },
1446
+ {
1447
+ "epoch": 34.0,
1448
+ "eval_NB_RUND_loss": 0.3522418141365051,
1449
+ "eval_NB_RUND_runtime": 86.176,
1450
+ "eval_NB_RUND_samples_per_second": 21.793,
1451
+ "eval_NB_RUND_steps_per_second": 0.685,
1452
+ "eval_NB_RUND_wer": 0.14509251810136767,
1453
+ "step": 44642
1454
+ },
1455
+ {
1456
+ "epoch": 34.0,
1457
+ "eval_rundkast_loss": 0.3241034746170044,
1458
+ "eval_rundkast_runtime": 32.7879,
1459
+ "eval_rundkast_samples_per_second": 41.021,
1460
+ "eval_rundkast_steps_per_second": 1.311,
1461
+ "eval_rundkast_wer": 0.12360801781737193,
1462
+ "step": 44642
1463
+ },
1464
+ {
1465
+ "epoch": 34.0,
1466
+ "eval_nb_samtale_loss": 0.42330336570739746,
1467
+ "eval_nb_samtale_runtime": 37.3979,
1468
+ "eval_nb_samtale_samples_per_second": 14.252,
1469
+ "eval_nb_samtale_steps_per_second": 0.455,
1470
+ "eval_nb_samtale_wer": 0.16707356770833334,
1471
+ "step": 44642
1472
+ },
1473
+ {
1474
+ "epoch": 35.0,
1475
+ "grad_norm": 5.900428295135498,
1476
+ "learning_rate": 1.2900823206585652e-05,
1477
+ "loss": 0.175,
1478
+ "step": 45955
1479
+ },
1480
+ {
1481
+ "epoch": 35.0,
1482
+ "eval_bigbrother_loss": 2.3766987323760986,
1483
+ "eval_bigbrother_runtime": 42.6165,
1484
+ "eval_bigbrother_samples_per_second": 32.147,
1485
+ "eval_bigbrother_steps_per_second": 1.009,
1486
+ "eval_bigbrother_wer": 0.48900346411020706,
1487
+ "step": 45955
1488
+ },
1489
+ {
1490
+ "epoch": 35.0,
1491
+ "eval_NB_RUND_loss": 0.359698623418808,
1492
+ "eval_NB_RUND_runtime": 84.4668,
1493
+ "eval_NB_RUND_samples_per_second": 22.234,
1494
+ "eval_NB_RUND_steps_per_second": 0.698,
1495
+ "eval_NB_RUND_wer": 0.14400643604183427,
1496
+ "step": 45955
1497
+ },
1498
+ {
1499
+ "epoch": 35.0,
1500
+ "eval_rundkast_loss": 0.32820314168930054,
1501
+ "eval_rundkast_runtime": 33.145,
1502
+ "eval_rundkast_samples_per_second": 40.579,
1503
+ "eval_rundkast_steps_per_second": 1.297,
1504
+ "eval_rundkast_wer": 0.12313076678332803,
1505
+ "step": 45955
1506
+ },
1507
+ {
1508
+ "epoch": 35.0,
1509
+ "eval_nb_samtale_loss": 0.438490629196167,
1510
+ "eval_nb_samtale_runtime": 36.783,
1511
+ "eval_nb_samtale_samples_per_second": 14.49,
1512
+ "eval_nb_samtale_steps_per_second": 0.462,
1513
+ "eval_nb_samtale_wer": 0.16552734375,
1514
+ "step": 45955
1515
+ },
1516
+ {
1517
+ "epoch": 36.0,
1518
+ "grad_norm": 9.065438270568848,
1519
+ "learning_rate": 1.0329282634261075e-05,
1520
+ "loss": 0.1735,
1521
+ "step": 47268
1522
+ },
1523
+ {
1524
+ "epoch": 36.0,
1525
+ "eval_bigbrother_loss": 2.3984920978546143,
1526
+ "eval_bigbrother_runtime": 41.0024,
1527
+ "eval_bigbrother_samples_per_second": 33.413,
1528
+ "eval_bigbrother_steps_per_second": 1.049,
1529
+ "eval_bigbrother_wer": 0.4888423427052284,
1530
+ "step": 47268
1531
+ },
1532
+ {
1533
+ "epoch": 36.0,
1534
+ "eval_NB_RUND_loss": 0.3673810064792633,
1535
+ "eval_NB_RUND_runtime": 85.7426,
1536
+ "eval_NB_RUND_samples_per_second": 21.903,
1537
+ "eval_NB_RUND_steps_per_second": 0.688,
1538
+ "eval_NB_RUND_wer": 0.14384553499597746,
1539
+ "step": 47268
1540
+ },
1541
+ {
1542
+ "epoch": 36.0,
1543
+ "eval_rundkast_loss": 0.3365083932876587,
1544
+ "eval_rundkast_runtime": 32.9335,
1545
+ "eval_rundkast_samples_per_second": 40.84,
1546
+ "eval_rundkast_steps_per_second": 1.306,
1547
+ "eval_rundkast_wer": 0.12336939230034999,
1548
+ "step": 47268
1549
+ },
1550
+ {
1551
+ "epoch": 36.0,
1552
+ "eval_nb_samtale_loss": 0.44506001472473145,
1553
+ "eval_nb_samtale_runtime": 36.8094,
1554
+ "eval_nb_samtale_samples_per_second": 14.48,
1555
+ "eval_nb_samtale_steps_per_second": 0.462,
1556
+ "eval_nb_samtale_wer": 0.16463216145833334,
1557
+ "step": 47268
1558
+ },
1559
+ {
1560
+ "epoch": 37.0,
1561
+ "grad_norm": 18.192665100097656,
1562
+ "learning_rate": 7.757742061936496e-06,
1563
+ "loss": 0.1731,
1564
+ "step": 48581
1565
+ },
1566
+ {
1567
+ "epoch": 37.0,
1568
+ "eval_bigbrother_loss": 2.399851083755493,
1569
+ "eval_bigbrother_runtime": 41.7999,
1570
+ "eval_bigbrother_samples_per_second": 32.775,
1571
+ "eval_bigbrother_steps_per_second": 1.029,
1572
+ "eval_bigbrother_wer": 0.49117860307741884,
1573
+ "step": 48581
1574
+ },
1575
+ {
1576
+ "epoch": 37.0,
1577
+ "eval_NB_RUND_loss": 0.3626614809036255,
1578
+ "eval_NB_RUND_runtime": 84.556,
1579
+ "eval_NB_RUND_samples_per_second": 22.21,
1580
+ "eval_NB_RUND_steps_per_second": 0.698,
1581
+ "eval_NB_RUND_wer": 0.14368463395012068,
1582
+ "step": 48581
1583
+ },
1584
+ {
1585
+ "epoch": 37.0,
1586
+ "eval_rundkast_loss": 0.33268144726753235,
1587
+ "eval_rundkast_runtime": 33.4742,
1588
+ "eval_rundkast_samples_per_second": 40.18,
1589
+ "eval_rundkast_steps_per_second": 1.285,
1590
+ "eval_rundkast_wer": 0.12289214126630607,
1591
+ "step": 48581
1592
+ },
1593
+ {
1594
+ "epoch": 37.0,
1595
+ "eval_nb_samtale_loss": 0.4378024637699127,
1596
+ "eval_nb_samtale_runtime": 36.9248,
1597
+ "eval_nb_samtale_samples_per_second": 14.435,
1598
+ "eval_nb_samtale_steps_per_second": 0.46,
1599
+ "eval_nb_samtale_wer": 0.164794921875,
1600
+ "step": 48581
1601
+ }
1602
+ ],
1603
+ "logging_steps": 500,
1604
+ "max_steps": 52520,
1605
+ "num_input_tokens_seen": 0,
1606
+ "num_train_epochs": 40,
1607
+ "save_steps": 500,
1608
+ "total_flos": 2.707743528931453e+20,
1609
+ "train_batch_size": 48,
1610
+ "trial_name": null,
1611
+ "trial_params": null
1612
+ }
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:aff432154be65fa2934e4e3ecad3fa9e0f006e696150ceb8313931ee493060cd
3
+ size 4984
vocab.json ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "[PAD]": 31,
3
+ "[UNK]": 30,
4
+ "a": 1,
5
+ "b": 2,
6
+ "c": 3,
7
+ "d": 4,
8
+ "e": 5,
9
+ "f": 6,
10
+ "g": 7,
11
+ "h": 8,
12
+ "i": 9,
13
+ "j": 10,
14
+ "k": 11,
15
+ "l": 12,
16
+ "m": 13,
17
+ "n": 14,
18
+ "o": 15,
19
+ "p": 16,
20
+ "q": 17,
21
+ "r": 18,
22
+ "s": 19,
23
+ "t": 20,
24
+ "u": 21,
25
+ "v": 22,
26
+ "w": 23,
27
+ "x": 24,
28
+ "y": 25,
29
+ "z": 26,
30
+ "|": 0,
31
+ "å": 27,
32
+ "æ": 28,
33
+ "ø": 29
34
+ }