Tflatval commited on
Commit
9ddc0c9
1 Parent(s): 63f822d

Upload folder using huggingface_hub

Browse files
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ language_model/unigrams.txt filter=lfs diff=lfs merge=lfs -text
added_tokens.json ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ {
2
+ "</s>": 33,
3
+ "<s>": 32
4
+ }
alphabet.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"labels": [" ", "a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k", "l", "m", "n", "o", "p", "q", "r", "s", "t", "u", "v", "w", "x", "y", "z", "\u00e5", "\u00e6", "\u00f8" ,"\u2047", "", "<s>", "</s>"], "is_bpe": false}
checkpoint-3344/config.json ADDED
@@ -0,0 +1,116 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "NbAiLab/nb-wav2vec2-300m-bokmaal",
3
+ "activation_dropout": 0.055,
4
+ "adapter_attn_dim": null,
5
+ "adapter_kernel_size": 3,
6
+ "adapter_stride": 2,
7
+ "add_adapter": false,
8
+ "apply_spec_augment": true,
9
+ "architectures": [
10
+ "Wav2Vec2ForCTC"
11
+ ],
12
+ "attention_dropout": 0.094,
13
+ "bos_token_id": 1,
14
+ "classifier_proj_size": 256,
15
+ "codevector_dim": 768,
16
+ "contrastive_logits_temperature": 0.1,
17
+ "conv_bias": true,
18
+ "conv_dim": [
19
+ 512,
20
+ 512,
21
+ 512,
22
+ 512,
23
+ 512,
24
+ 512,
25
+ 512
26
+ ],
27
+ "conv_kernel": [
28
+ 10,
29
+ 3,
30
+ 3,
31
+ 3,
32
+ 3,
33
+ 2,
34
+ 2
35
+ ],
36
+ "conv_stride": [
37
+ 5,
38
+ 2,
39
+ 2,
40
+ 2,
41
+ 2,
42
+ 2,
43
+ 2
44
+ ],
45
+ "ctc_loss_reduction": "mean",
46
+ "ctc_zero_infinity": true,
47
+ "diversity_loss_weight": 0.1,
48
+ "do_stable_layer_norm": true,
49
+ "eos_token_id": 2,
50
+ "feat_extract_activation": "gelu",
51
+ "feat_extract_dropout": 0.0,
52
+ "feat_extract_norm": "layer",
53
+ "feat_proj_dropout": 0.04,
54
+ "feat_quantizer_dropout": 0.0,
55
+ "final_dropout": 0.0,
56
+ "hidden_act": "gelu",
57
+ "hidden_dropout": 0.047,
58
+ "hidden_size": 1024,
59
+ "initializer_range": 0.02,
60
+ "intermediate_size": 4096,
61
+ "layer_norm_eps": 1e-05,
62
+ "layerdrop": 0.041,
63
+ "mask_channel_length": 10,
64
+ "mask_channel_min_space": 1,
65
+ "mask_channel_other": 0.0,
66
+ "mask_channel_prob": 0.0,
67
+ "mask_channel_selection": "static",
68
+ "mask_feature_length": 64,
69
+ "mask_feature_min_masks": 0,
70
+ "mask_feature_prob": 0.25,
71
+ "mask_time_length": 10,
72
+ "mask_time_min_masks": 2,
73
+ "mask_time_min_space": 1,
74
+ "mask_time_other": 0.0,
75
+ "mask_time_prob": 0.082,
76
+ "mask_time_selection": "static",
77
+ "model_type": "wav2vec2",
78
+ "num_adapter_layers": 3,
79
+ "num_attention_heads": 16,
80
+ "num_codevector_groups": 2,
81
+ "num_codevectors_per_group": 320,
82
+ "num_conv_pos_embedding_groups": 16,
83
+ "num_conv_pos_embeddings": 128,
84
+ "num_feat_extract_layers": 7,
85
+ "num_hidden_layers": 24,
86
+ "num_negatives": 100,
87
+ "output_hidden_size": 1024,
88
+ "pad_token_id": 31,
89
+ "proj_codevector_dim": 768,
90
+ "tdnn_dilation": [
91
+ 1,
92
+ 2,
93
+ 3,
94
+ 1,
95
+ 1
96
+ ],
97
+ "tdnn_dim": [
98
+ 512,
99
+ 512,
100
+ 512,
101
+ 512,
102
+ 1500
103
+ ],
104
+ "tdnn_kernel": [
105
+ 5,
106
+ 3,
107
+ 3,
108
+ 1,
109
+ 1
110
+ ],
111
+ "torch_dtype": "float32",
112
+ "transformers_version": "4.38.1",
113
+ "use_weighted_layer_sum": false,
114
+ "vocab_size": 34,
115
+ "xvector_output_dim": 512
116
+ }
checkpoint-3344/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a49ebd1fbe915c75bc22fef6e8187c0457faa591865e0930c906d76125b301ec
3
+ size 1261946880
checkpoint-3344/optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:254ef657e33211edcceebd1912e651944351ac5da63566f5a9206b22de9f395e
3
+ size 2490438582
checkpoint-3344/preprocessor_config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "do_normalize": true,
3
+ "feature_extractor_type": "Wav2Vec2FeatureExtractor",
4
+ "feature_size": 1,
5
+ "padding_side": "right",
6
+ "padding_value": 0,
7
+ "processor_class": "Wav2Vec2ProcessorWithLM",
8
+ "return_attention_mask": true,
9
+ "sampling_rate": 16000
10
+ }
checkpoint-3344/rng_state.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:061d68e20c4b1b4a0c9073acf26e9f42d085d61d775efbd3618d107b7d9eeb92
3
+ size 14244
checkpoint-3344/scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:42cdc65d168946a328dd00ec312000a623bd4f580edbc3cb035177e261a78d07
3
+ size 1064
checkpoint-3344/trainer_state.json ADDED
@@ -0,0 +1,1313 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_metric": 0.17350260416666666,
3
+ "best_model_checkpoint": "/cluster/home/torstefl/Master/saved_model/W2V/single/NB/30.05/checkpoint-3344",
4
+ "epoch": 38.0,
5
+ "eval_steps": 500,
6
+ "global_step": 3344,
7
+ "is_hyper_param_search": false,
8
+ "is_local_process_zero": true,
9
+ "is_world_process_zero": true,
10
+ "log_history": [
11
+ {
12
+ "epoch": 1.0,
13
+ "grad_norm": 3.9170310497283936,
14
+ "learning_rate": 5.8e-06,
15
+ "loss": 0.9591,
16
+ "step": 88
17
+ },
18
+ {
19
+ "epoch": 1.0,
20
+ "eval_rundkast_loss": 0.5365053415298462,
21
+ "eval_rundkast_runtime": 35.6704,
22
+ "eval_rundkast_samples_per_second": 37.706,
23
+ "eval_rundkast_steps_per_second": 1.205,
24
+ "eval_rundkast_wer": 0.23027362392618517,
25
+ "step": 88
26
+ },
27
+ {
28
+ "epoch": 1.0,
29
+ "eval_nb_samtale_loss": 0.67843097448349,
30
+ "eval_nb_samtale_runtime": 39.2213,
31
+ "eval_nb_samtale_samples_per_second": 13.59,
32
+ "eval_nb_samtale_steps_per_second": 0.433,
33
+ "eval_nb_samtale_wer": 0.24666341145833334,
34
+ "step": 88
35
+ },
36
+ {
37
+ "epoch": 1.0,
38
+ "eval_bigbrother_loss": 2.74082612991333,
39
+ "eval_bigbrother_runtime": 41.8079,
40
+ "eval_bigbrother_samples_per_second": 32.769,
41
+ "eval_bigbrother_steps_per_second": 1.029,
42
+ "eval_bigbrother_wer": 0.6191089986304681,
43
+ "step": 88
44
+ },
45
+ {
46
+ "epoch": 2.0,
47
+ "grad_norm": 4.4212822914123535,
48
+ "learning_rate": 1.1666666666666668e-05,
49
+ "loss": 0.8123,
50
+ "step": 176
51
+ },
52
+ {
53
+ "epoch": 2.0,
54
+ "eval_rundkast_loss": 0.4252748191356659,
55
+ "eval_rundkast_runtime": 33.6441,
56
+ "eval_rundkast_samples_per_second": 39.977,
57
+ "eval_rundkast_steps_per_second": 1.278,
58
+ "eval_rundkast_wer": 0.2241489023226217,
59
+ "step": 176
60
+ },
61
+ {
62
+ "epoch": 2.0,
63
+ "eval_nb_samtale_loss": 0.5754267573356628,
64
+ "eval_nb_samtale_runtime": 38.5834,
65
+ "eval_nb_samtale_samples_per_second": 13.814,
66
+ "eval_nb_samtale_steps_per_second": 0.441,
67
+ "eval_nb_samtale_wer": 0.23527018229166666,
68
+ "step": 176
69
+ },
70
+ {
71
+ "epoch": 2.0,
72
+ "eval_bigbrother_loss": 2.240948438644409,
73
+ "eval_bigbrother_runtime": 41.5488,
74
+ "eval_bigbrother_samples_per_second": 32.973,
75
+ "eval_bigbrother_steps_per_second": 1.035,
76
+ "eval_bigbrother_wer": 0.6037219044550068,
77
+ "step": 176
78
+ },
79
+ {
80
+ "epoch": 3.0,
81
+ "grad_norm": 3.1233577728271484,
82
+ "learning_rate": 1.7533333333333334e-05,
83
+ "loss": 0.7517,
84
+ "step": 264
85
+ },
86
+ {
87
+ "epoch": 3.0,
88
+ "eval_rundkast_loss": 0.419950932264328,
89
+ "eval_rundkast_runtime": 33.5339,
90
+ "eval_rundkast_samples_per_second": 40.109,
91
+ "eval_rundkast_steps_per_second": 1.282,
92
+ "eval_rundkast_wer": 0.2205695195672924,
93
+ "step": 264
94
+ },
95
+ {
96
+ "epoch": 3.0,
97
+ "eval_nb_samtale_loss": 0.5531216263771057,
98
+ "eval_nb_samtale_runtime": 38.5893,
99
+ "eval_nb_samtale_samples_per_second": 13.812,
100
+ "eval_nb_samtale_steps_per_second": 0.441,
101
+ "eval_nb_samtale_wer": 0.22932942708333334,
102
+ "step": 264
103
+ },
104
+ {
105
+ "epoch": 3.0,
106
+ "eval_bigbrother_loss": 2.1344377994537354,
107
+ "eval_bigbrother_runtime": 41.2938,
108
+ "eval_bigbrother_samples_per_second": 33.177,
109
+ "eval_bigbrother_steps_per_second": 1.041,
110
+ "eval_bigbrother_wer": 0.5964714412309675,
111
+ "step": 264
112
+ },
113
+ {
114
+ "epoch": 4.0,
115
+ "grad_norm": 3.1834347248077393,
116
+ "learning_rate": 2.3400000000000003e-05,
117
+ "loss": 0.7074,
118
+ "step": 352
119
+ },
120
+ {
121
+ "epoch": 4.0,
122
+ "eval_rundkast_loss": 0.4088253676891327,
123
+ "eval_rundkast_runtime": 33.3755,
124
+ "eval_rundkast_samples_per_second": 40.299,
125
+ "eval_rundkast_steps_per_second": 1.288,
126
+ "eval_rundkast_wer": 0.22009226853324848,
127
+ "step": 352
128
+ },
129
+ {
130
+ "epoch": 4.0,
131
+ "eval_nb_samtale_loss": 0.5255401134490967,
132
+ "eval_nb_samtale_runtime": 38.2361,
133
+ "eval_nb_samtale_samples_per_second": 13.94,
134
+ "eval_nb_samtale_steps_per_second": 0.445,
135
+ "eval_nb_samtale_wer": 0.22696940104166666,
136
+ "step": 352
137
+ },
138
+ {
139
+ "epoch": 4.0,
140
+ "eval_bigbrother_loss": 2.036323070526123,
141
+ "eval_bigbrother_runtime": 41.2821,
142
+ "eval_bigbrother_samples_per_second": 33.186,
143
+ "eval_bigbrother_steps_per_second": 1.042,
144
+ "eval_bigbrother_wer": 0.5905099492467575,
145
+ "step": 352
146
+ },
147
+ {
148
+ "epoch": 5.0,
149
+ "grad_norm": 1.5792173147201538,
150
+ "learning_rate": 2.926666666666667e-05,
151
+ "loss": 0.6874,
152
+ "step": 440
153
+ },
154
+ {
155
+ "epoch": 5.0,
156
+ "eval_rundkast_loss": 0.40116986632347107,
157
+ "eval_rundkast_runtime": 33.3522,
158
+ "eval_rundkast_samples_per_second": 40.327,
159
+ "eval_rundkast_steps_per_second": 1.289,
160
+ "eval_rundkast_wer": 0.21985364301622654,
161
+ "step": 440
162
+ },
163
+ {
164
+ "epoch": 5.0,
165
+ "eval_nb_samtale_loss": 0.5132110714912415,
166
+ "eval_nb_samtale_runtime": 38.393,
167
+ "eval_nb_samtale_samples_per_second": 13.883,
168
+ "eval_nb_samtale_steps_per_second": 0.443,
169
+ "eval_nb_samtale_wer": 0.224365234375,
170
+ "step": 440
171
+ },
172
+ {
173
+ "epoch": 5.0,
174
+ "eval_bigbrother_loss": 1.9978336095809937,
175
+ "eval_bigbrother_runtime": 41.45,
176
+ "eval_bigbrother_samples_per_second": 33.052,
177
+ "eval_bigbrother_steps_per_second": 1.037,
178
+ "eval_bigbrother_wer": 0.5905099492467575,
179
+ "step": 440
180
+ },
181
+ {
182
+ "epoch": 6.0,
183
+ "grad_norm": 2.002620220184326,
184
+ "learning_rate": 3.513333333333334e-05,
185
+ "loss": 0.6548,
186
+ "step": 528
187
+ },
188
+ {
189
+ "epoch": 6.0,
190
+ "eval_rundkast_loss": 0.4032253921031952,
191
+ "eval_rundkast_runtime": 33.4235,
192
+ "eval_rundkast_samples_per_second": 40.241,
193
+ "eval_rundkast_steps_per_second": 1.287,
194
+ "eval_rundkast_wer": 0.217467387846007,
195
+ "step": 528
196
+ },
197
+ {
198
+ "epoch": 6.0,
199
+ "eval_nb_samtale_loss": 0.5047765374183655,
200
+ "eval_nb_samtale_runtime": 38.2135,
201
+ "eval_nb_samtale_samples_per_second": 13.948,
202
+ "eval_nb_samtale_steps_per_second": 0.445,
203
+ "eval_nb_samtale_wer": 0.220947265625,
204
+ "step": 528
205
+ },
206
+ {
207
+ "epoch": 6.0,
208
+ "eval_bigbrother_loss": 1.9670743942260742,
209
+ "eval_bigbrother_runtime": 41.4322,
210
+ "eval_bigbrother_samples_per_second": 33.066,
211
+ "eval_bigbrother_steps_per_second": 1.038,
212
+ "eval_bigbrother_wer": 0.5801176186256344,
213
+ "step": 528
214
+ },
215
+ {
216
+ "epoch": 7.0,
217
+ "grad_norm": 1.4368079900741577,
218
+ "learning_rate": 4.1e-05,
219
+ "loss": 0.6439,
220
+ "step": 616
221
+ },
222
+ {
223
+ "epoch": 7.0,
224
+ "eval_rundkast_loss": 0.39594346284866333,
225
+ "eval_rundkast_runtime": 33.337,
226
+ "eval_rundkast_samples_per_second": 40.346,
227
+ "eval_rundkast_steps_per_second": 1.29,
228
+ "eval_rundkast_wer": 0.22009226853324848,
229
+ "step": 616
230
+ },
231
+ {
232
+ "epoch": 7.0,
233
+ "eval_nb_samtale_loss": 0.48273980617523193,
234
+ "eval_nb_samtale_runtime": 38.0773,
235
+ "eval_nb_samtale_samples_per_second": 13.998,
236
+ "eval_nb_samtale_steps_per_second": 0.446,
237
+ "eval_nb_samtale_wer": 0.21964518229166666,
238
+ "step": 616
239
+ },
240
+ {
241
+ "epoch": 7.0,
242
+ "eval_bigbrother_loss": 1.921434998512268,
243
+ "eval_bigbrother_runtime": 41.5595,
244
+ "eval_bigbrother_samples_per_second": 32.965,
245
+ "eval_bigbrother_steps_per_second": 1.035,
246
+ "eval_bigbrother_wer": 0.5865624748247805,
247
+ "step": 616
248
+ },
249
+ {
250
+ "epoch": 8.0,
251
+ "grad_norm": 1.1675041913986206,
252
+ "learning_rate": 4.686666666666667e-05,
253
+ "loss": 0.6158,
254
+ "step": 704
255
+ },
256
+ {
257
+ "epoch": 8.0,
258
+ "eval_rundkast_loss": 0.40993189811706543,
259
+ "eval_rundkast_runtime": 33.4667,
260
+ "eval_rundkast_samples_per_second": 40.189,
261
+ "eval_rundkast_steps_per_second": 1.285,
262
+ "eval_rundkast_wer": 0.22128539611835826,
263
+ "step": 704
264
+ },
265
+ {
266
+ "epoch": 8.0,
267
+ "eval_nb_samtale_loss": 0.494111031293869,
268
+ "eval_nb_samtale_runtime": 38.0186,
269
+ "eval_nb_samtale_samples_per_second": 14.019,
270
+ "eval_nb_samtale_steps_per_second": 0.447,
271
+ "eval_nb_samtale_wer": 0.21712239583333334,
272
+ "step": 704
273
+ },
274
+ {
275
+ "epoch": 8.0,
276
+ "eval_bigbrother_loss": 1.9780834913253784,
277
+ "eval_bigbrother_runtime": 41.757,
278
+ "eval_bigbrother_samples_per_second": 32.809,
279
+ "eval_bigbrother_steps_per_second": 1.03,
280
+ "eval_bigbrother_wer": 0.5809232256505277,
281
+ "step": 704
282
+ },
283
+ {
284
+ "epoch": 9.0,
285
+ "grad_norm": 2.030735731124878,
286
+ "learning_rate": 5.273333333333333e-05,
287
+ "loss": 0.6041,
288
+ "step": 792
289
+ },
290
+ {
291
+ "epoch": 9.0,
292
+ "eval_rundkast_loss": 0.3950729966163635,
293
+ "eval_rundkast_runtime": 33.5538,
294
+ "eval_rundkast_samples_per_second": 40.085,
295
+ "eval_rundkast_steps_per_second": 1.282,
296
+ "eval_rundkast_wer": 0.21762647152402165,
297
+ "step": 792
298
+ },
299
+ {
300
+ "epoch": 9.0,
301
+ "eval_nb_samtale_loss": 0.45818740129470825,
302
+ "eval_nb_samtale_runtime": 37.9662,
303
+ "eval_nb_samtale_samples_per_second": 14.039,
304
+ "eval_nb_samtale_steps_per_second": 0.448,
305
+ "eval_nb_samtale_wer": 0.21183268229166666,
306
+ "step": 792
307
+ },
308
+ {
309
+ "epoch": 9.0,
310
+ "eval_bigbrother_loss": 1.8719202280044556,
311
+ "eval_bigbrother_runtime": 42.0553,
312
+ "eval_bigbrother_samples_per_second": 32.576,
313
+ "eval_bigbrother_steps_per_second": 1.022,
314
+ "eval_bigbrother_wer": 0.5723032304841699,
315
+ "step": 792
316
+ },
317
+ {
318
+ "epoch": 10.0,
319
+ "grad_norm": 1.995400071144104,
320
+ "learning_rate": 5.86e-05,
321
+ "loss": 0.586,
322
+ "step": 880
323
+ },
324
+ {
325
+ "epoch": 10.0,
326
+ "eval_rundkast_loss": 0.39550164341926575,
327
+ "eval_rundkast_runtime": 33.4182,
328
+ "eval_rundkast_samples_per_second": 40.248,
329
+ "eval_rundkast_steps_per_second": 1.287,
330
+ "eval_rundkast_wer": 0.21762647152402165,
331
+ "step": 880
332
+ },
333
+ {
334
+ "epoch": 10.0,
335
+ "eval_nb_samtale_loss": 0.46760231256484985,
336
+ "eval_nb_samtale_runtime": 38.1072,
337
+ "eval_nb_samtale_samples_per_second": 13.987,
338
+ "eval_nb_samtale_steps_per_second": 0.446,
339
+ "eval_nb_samtale_wer": 0.208251953125,
340
+ "step": 880
341
+ },
342
+ {
343
+ "epoch": 10.0,
344
+ "eval_bigbrother_loss": 1.8893344402313232,
345
+ "eval_bigbrother_runtime": 41.5152,
346
+ "eval_bigbrother_samples_per_second": 33.0,
347
+ "eval_bigbrother_steps_per_second": 1.036,
348
+ "eval_bigbrother_wer": 0.5765729477161041,
349
+ "step": 880
350
+ },
351
+ {
352
+ "epoch": 11.0,
353
+ "grad_norm": 1.3565304279327393,
354
+ "learning_rate": 6.446666666666667e-05,
355
+ "loss": 0.5779,
356
+ "step": 968
357
+ },
358
+ {
359
+ "epoch": 11.0,
360
+ "eval_rundkast_loss": 0.39460697770118713,
361
+ "eval_rundkast_runtime": 33.627,
362
+ "eval_rundkast_samples_per_second": 39.998,
363
+ "eval_rundkast_steps_per_second": 1.279,
364
+ "eval_rundkast_wer": 0.21810372255806554,
365
+ "step": 968
366
+ },
367
+ {
368
+ "epoch": 11.0,
369
+ "eval_nb_samtale_loss": 0.45650386810302734,
370
+ "eval_nb_samtale_runtime": 37.9631,
371
+ "eval_nb_samtale_samples_per_second": 14.04,
372
+ "eval_nb_samtale_steps_per_second": 0.448,
373
+ "eval_nb_samtale_wer": 0.20613606770833334,
374
+ "step": 968
375
+ },
376
+ {
377
+ "epoch": 11.0,
378
+ "eval_bigbrother_loss": 1.9142467975616455,
379
+ "eval_bigbrother_runtime": 41.4099,
380
+ "eval_bigbrother_samples_per_second": 33.084,
381
+ "eval_bigbrother_steps_per_second": 1.038,
382
+ "eval_bigbrother_wer": 0.5729477161040845,
383
+ "step": 968
384
+ },
385
+ {
386
+ "epoch": 12.0,
387
+ "grad_norm": 1.7810662984848022,
388
+ "learning_rate": 7.033333333333334e-05,
389
+ "loss": 0.5375,
390
+ "step": 1056
391
+ },
392
+ {
393
+ "epoch": 12.0,
394
+ "eval_rundkast_loss": 0.40589994192123413,
395
+ "eval_rundkast_runtime": 33.6087,
396
+ "eval_rundkast_samples_per_second": 40.019,
397
+ "eval_rundkast_steps_per_second": 1.279,
398
+ "eval_rundkast_wer": 0.2184218899140948,
399
+ "step": 1056
400
+ },
401
+ {
402
+ "epoch": 12.0,
403
+ "eval_nb_samtale_loss": 0.46365875005722046,
404
+ "eval_nb_samtale_runtime": 38.2581,
405
+ "eval_nb_samtale_samples_per_second": 13.932,
406
+ "eval_nb_samtale_steps_per_second": 0.444,
407
+ "eval_nb_samtale_wer": 0.201171875,
408
+ "step": 1056
409
+ },
410
+ {
411
+ "epoch": 12.0,
412
+ "eval_bigbrother_loss": 1.9760468006134033,
413
+ "eval_bigbrother_runtime": 42.201,
414
+ "eval_bigbrother_samples_per_second": 32.464,
415
+ "eval_bigbrother_steps_per_second": 1.019,
416
+ "eval_bigbrother_wer": 0.5706920164343833,
417
+ "step": 1056
418
+ },
419
+ {
420
+ "epoch": 13.0,
421
+ "grad_norm": 1.9388726949691772,
422
+ "learning_rate": 7.620000000000001e-05,
423
+ "loss": 0.5238,
424
+ "step": 1144
425
+ },
426
+ {
427
+ "epoch": 13.0,
428
+ "eval_rundkast_loss": 0.4222950339317322,
429
+ "eval_rundkast_runtime": 33.5989,
430
+ "eval_rundkast_samples_per_second": 40.031,
431
+ "eval_rundkast_steps_per_second": 1.28,
432
+ "eval_rundkast_wer": 0.21945593382118994,
433
+ "step": 1144
434
+ },
435
+ {
436
+ "epoch": 13.0,
437
+ "eval_nb_samtale_loss": 0.47446364164352417,
438
+ "eval_nb_samtale_runtime": 37.9325,
439
+ "eval_nb_samtale_samples_per_second": 14.051,
440
+ "eval_nb_samtale_steps_per_second": 0.448,
441
+ "eval_nb_samtale_wer": 0.20345052083333334,
442
+ "step": 1144
443
+ },
444
+ {
445
+ "epoch": 13.0,
446
+ "eval_bigbrother_loss": 1.956613540649414,
447
+ "eval_bigbrother_runtime": 41.6064,
448
+ "eval_bigbrother_samples_per_second": 32.928,
449
+ "eval_bigbrother_steps_per_second": 1.033,
450
+ "eval_bigbrother_wer": 0.564408281640216,
451
+ "step": 1144
452
+ },
453
+ {
454
+ "epoch": 14.0,
455
+ "grad_norm": 1.7794352769851685,
456
+ "learning_rate": 8.206666666666666e-05,
457
+ "loss": 0.5002,
458
+ "step": 1232
459
+ },
460
+ {
461
+ "epoch": 14.0,
462
+ "eval_rundkast_loss": 0.40369686484336853,
463
+ "eval_rundkast_runtime": 33.4225,
464
+ "eval_rundkast_samples_per_second": 40.242,
465
+ "eval_rundkast_steps_per_second": 1.287,
466
+ "eval_rundkast_wer": 0.2170696786509704,
467
+ "step": 1232
468
+ },
469
+ {
470
+ "epoch": 14.0,
471
+ "eval_nb_samtale_loss": 0.4533463418483734,
472
+ "eval_nb_samtale_runtime": 38.1602,
473
+ "eval_nb_samtale_samples_per_second": 13.967,
474
+ "eval_nb_samtale_steps_per_second": 0.445,
475
+ "eval_nb_samtale_wer": 0.19881184895833334,
476
+ "step": 1232
477
+ },
478
+ {
479
+ "epoch": 14.0,
480
+ "eval_bigbrother_loss": 1.9202650785446167,
481
+ "eval_bigbrother_runtime": 41.8647,
482
+ "eval_bigbrother_samples_per_second": 32.724,
483
+ "eval_bigbrother_steps_per_second": 1.027,
484
+ "eval_bigbrother_wer": 0.5650527672601305,
485
+ "step": 1232
486
+ },
487
+ {
488
+ "epoch": 15.0,
489
+ "grad_norm": 2.007899284362793,
490
+ "learning_rate": 8.793333333333333e-05,
491
+ "loss": 0.4939,
492
+ "step": 1320
493
+ },
494
+ {
495
+ "epoch": 15.0,
496
+ "eval_rundkast_loss": 0.39848214387893677,
497
+ "eval_rundkast_runtime": 33.4922,
498
+ "eval_rundkast_samples_per_second": 40.159,
499
+ "eval_rundkast_steps_per_second": 1.284,
500
+ "eval_rundkast_wer": 0.21794463888005092,
501
+ "step": 1320
502
+ },
503
+ {
504
+ "epoch": 15.0,
505
+ "eval_nb_samtale_loss": 0.43362265825271606,
506
+ "eval_nb_samtale_runtime": 37.9431,
507
+ "eval_nb_samtale_samples_per_second": 14.047,
508
+ "eval_nb_samtale_steps_per_second": 0.448,
509
+ "eval_nb_samtale_wer": 0.20182291666666666,
510
+ "step": 1320
511
+ },
512
+ {
513
+ "epoch": 15.0,
514
+ "eval_bigbrother_loss": 1.824406385421753,
515
+ "eval_bigbrother_runtime": 41.6014,
516
+ "eval_bigbrother_samples_per_second": 32.932,
517
+ "eval_bigbrother_steps_per_second": 1.034,
518
+ "eval_bigbrother_wer": 0.5605413679207283,
519
+ "step": 1320
520
+ },
521
+ {
522
+ "epoch": 16.0,
523
+ "grad_norm": 1.890428066253662,
524
+ "learning_rate": 9.38e-05,
525
+ "loss": 0.493,
526
+ "step": 1408
527
+ },
528
+ {
529
+ "epoch": 16.0,
530
+ "eval_rundkast_loss": 0.42038509249687195,
531
+ "eval_rundkast_runtime": 33.8742,
532
+ "eval_rundkast_samples_per_second": 39.706,
533
+ "eval_rundkast_steps_per_second": 1.269,
534
+ "eval_rundkast_wer": 0.22009226853324848,
535
+ "step": 1408
536
+ },
537
+ {
538
+ "epoch": 16.0,
539
+ "eval_nb_samtale_loss": 0.4756682515144348,
540
+ "eval_nb_samtale_runtime": 38.2223,
541
+ "eval_nb_samtale_samples_per_second": 13.945,
542
+ "eval_nb_samtale_steps_per_second": 0.445,
543
+ "eval_nb_samtale_wer": 0.20003255208333334,
544
+ "step": 1408
545
+ },
546
+ {
547
+ "epoch": 16.0,
548
+ "eval_bigbrother_loss": 2.017906665802002,
549
+ "eval_bigbrother_runtime": 41.7123,
550
+ "eval_bigbrother_samples_per_second": 32.844,
551
+ "eval_bigbrother_steps_per_second": 1.031,
552
+ "eval_bigbrother_wer": 0.5624748247804721,
553
+ "step": 1408
554
+ },
555
+ {
556
+ "epoch": 17.0,
557
+ "grad_norm": 1.8507376909255981,
558
+ "learning_rate": 9.966666666666667e-05,
559
+ "loss": 0.4734,
560
+ "step": 1496
561
+ },
562
+ {
563
+ "epoch": 17.0,
564
+ "eval_rundkast_loss": 0.4344000518321991,
565
+ "eval_rundkast_runtime": 34.0756,
566
+ "eval_rundkast_samples_per_second": 39.471,
567
+ "eval_rundkast_steps_per_second": 1.262,
568
+ "eval_rundkast_wer": 0.21699013681196308,
569
+ "step": 1496
570
+ },
571
+ {
572
+ "epoch": 17.0,
573
+ "eval_nb_samtale_loss": 0.4668411314487457,
574
+ "eval_nb_samtale_runtime": 37.9422,
575
+ "eval_nb_samtale_samples_per_second": 14.048,
576
+ "eval_nb_samtale_steps_per_second": 0.448,
577
+ "eval_nb_samtale_wer": 0.19694010416666666,
578
+ "step": 1496
579
+ },
580
+ {
581
+ "epoch": 17.0,
582
+ "eval_bigbrother_loss": 1.9858715534210205,
583
+ "eval_bigbrother_runtime": 41.6236,
584
+ "eval_bigbrother_samples_per_second": 32.914,
585
+ "eval_bigbrother_steps_per_second": 1.033,
586
+ "eval_bigbrother_wer": 0.5611052928381536,
587
+ "step": 1496
588
+ },
589
+ {
590
+ "epoch": 18.0,
591
+ "grad_norm": 1.6648415327072144,
592
+ "learning_rate": 9.589108910891089e-05,
593
+ "loss": 0.4444,
594
+ "step": 1584
595
+ },
596
+ {
597
+ "epoch": 18.0,
598
+ "eval_rundkast_loss": 0.459636926651001,
599
+ "eval_rundkast_runtime": 33.6234,
600
+ "eval_rundkast_samples_per_second": 40.002,
601
+ "eval_rundkast_steps_per_second": 1.279,
602
+ "eval_rundkast_wer": 0.2184218899140948,
603
+ "step": 1584
604
+ },
605
+ {
606
+ "epoch": 18.0,
607
+ "eval_nb_samtale_loss": 0.4778790771961212,
608
+ "eval_nb_samtale_runtime": 37.93,
609
+ "eval_nb_samtale_samples_per_second": 14.052,
610
+ "eval_nb_samtale_steps_per_second": 0.448,
611
+ "eval_nb_samtale_wer": 0.19482421875,
612
+ "step": 1584
613
+ },
614
+ {
615
+ "epoch": 18.0,
616
+ "eval_bigbrother_loss": 2.19545316696167,
617
+ "eval_bigbrother_runtime": 41.4756,
618
+ "eval_bigbrother_samples_per_second": 33.031,
619
+ "eval_bigbrother_steps_per_second": 1.037,
620
+ "eval_bigbrother_wer": 0.5647305244501732,
621
+ "step": 1584
622
+ },
623
+ {
624
+ "epoch": 19.0,
625
+ "grad_norm": 2.001049041748047,
626
+ "learning_rate": 9.153465346534654e-05,
627
+ "loss": 0.4535,
628
+ "step": 1672
629
+ },
630
+ {
631
+ "epoch": 19.0,
632
+ "eval_rundkast_loss": 0.4358086585998535,
633
+ "eval_rundkast_runtime": 33.8712,
634
+ "eval_rundkast_samples_per_second": 39.709,
635
+ "eval_rundkast_steps_per_second": 1.27,
636
+ "eval_rundkast_wer": 0.21317212853961184,
637
+ "step": 1672
638
+ },
639
+ {
640
+ "epoch": 19.0,
641
+ "eval_nb_samtale_loss": 0.4770139157772064,
642
+ "eval_nb_samtale_runtime": 38.0225,
643
+ "eval_nb_samtale_samples_per_second": 14.018,
644
+ "eval_nb_samtale_steps_per_second": 0.447,
645
+ "eval_nb_samtale_wer": 0.19514973958333334,
646
+ "step": 1672
647
+ },
648
+ {
649
+ "epoch": 19.0,
650
+ "eval_bigbrother_loss": 2.057053804397583,
651
+ "eval_bigbrother_runtime": 41.9595,
652
+ "eval_bigbrother_samples_per_second": 32.651,
653
+ "eval_bigbrother_steps_per_second": 1.025,
654
+ "eval_bigbrother_wer": 0.5624748247804721,
655
+ "step": 1672
656
+ },
657
+ {
658
+ "epoch": 20.0,
659
+ "grad_norm": 1.6764415502548218,
660
+ "learning_rate": 8.717821782178219e-05,
661
+ "loss": 0.4231,
662
+ "step": 1760
663
+ },
664
+ {
665
+ "epoch": 20.0,
666
+ "eval_rundkast_loss": 0.41307970881462097,
667
+ "eval_rundkast_runtime": 34.1234,
668
+ "eval_rundkast_samples_per_second": 39.416,
669
+ "eval_rundkast_steps_per_second": 1.26,
670
+ "eval_rundkast_wer": 0.21603563474387527,
671
+ "step": 1760
672
+ },
673
+ {
674
+ "epoch": 20.0,
675
+ "eval_nb_samtale_loss": 0.43975648283958435,
676
+ "eval_nb_samtale_runtime": 37.7496,
677
+ "eval_nb_samtale_samples_per_second": 14.119,
678
+ "eval_nb_samtale_steps_per_second": 0.45,
679
+ "eval_nb_samtale_wer": 0.18717447916666666,
680
+ "step": 1760
681
+ },
682
+ {
683
+ "epoch": 20.0,
684
+ "eval_bigbrother_loss": 1.9533370733261108,
685
+ "eval_bigbrother_runtime": 41.7155,
686
+ "eval_bigbrother_samples_per_second": 32.842,
687
+ "eval_bigbrother_steps_per_second": 1.031,
688
+ "eval_bigbrother_wer": 0.5524852976717957,
689
+ "step": 1760
690
+ },
691
+ {
692
+ "epoch": 21.0,
693
+ "grad_norm": 1.599152684211731,
694
+ "learning_rate": 8.282178217821782e-05,
695
+ "loss": 0.4086,
696
+ "step": 1848
697
+ },
698
+ {
699
+ "epoch": 21.0,
700
+ "eval_rundkast_loss": 0.4404522776603699,
701
+ "eval_rundkast_runtime": 33.7971,
702
+ "eval_rundkast_samples_per_second": 39.796,
703
+ "eval_rundkast_steps_per_second": 1.272,
704
+ "eval_rundkast_wer": 0.2138880050906777,
705
+ "step": 1848
706
+ },
707
+ {
708
+ "epoch": 21.0,
709
+ "eval_nb_samtale_loss": 0.4675068259239197,
710
+ "eval_nb_samtale_runtime": 37.8694,
711
+ "eval_nb_samtale_samples_per_second": 14.075,
712
+ "eval_nb_samtale_steps_per_second": 0.449,
713
+ "eval_nb_samtale_wer": 0.19132486979166666,
714
+ "step": 1848
715
+ },
716
+ {
717
+ "epoch": 21.0,
718
+ "eval_bigbrother_loss": 2.153977632522583,
719
+ "eval_bigbrother_runtime": 41.6263,
720
+ "eval_bigbrother_samples_per_second": 32.912,
721
+ "eval_bigbrother_steps_per_second": 1.033,
722
+ "eval_bigbrother_wer": 0.5508740836220092,
723
+ "step": 1848
724
+ },
725
+ {
726
+ "epoch": 22.0,
727
+ "grad_norm": 2.0951850414276123,
728
+ "learning_rate": 7.846534653465347e-05,
729
+ "loss": 0.3963,
730
+ "step": 1936
731
+ },
732
+ {
733
+ "epoch": 22.0,
734
+ "eval_rundkast_loss": 0.455175518989563,
735
+ "eval_rundkast_runtime": 33.5764,
736
+ "eval_rundkast_samples_per_second": 40.058,
737
+ "eval_rundkast_steps_per_second": 1.281,
738
+ "eval_rundkast_wer": 0.21643334393891187,
739
+ "step": 1936
740
+ },
741
+ {
742
+ "epoch": 22.0,
743
+ "eval_nb_samtale_loss": 0.4630807340145111,
744
+ "eval_nb_samtale_runtime": 37.7668,
745
+ "eval_nb_samtale_samples_per_second": 14.113,
746
+ "eval_nb_samtale_steps_per_second": 0.45,
747
+ "eval_nb_samtale_wer": 0.18977864583333334,
748
+ "step": 1936
749
+ },
750
+ {
751
+ "epoch": 22.0,
752
+ "eval_bigbrother_loss": 2.1877636909484863,
753
+ "eval_bigbrother_runtime": 41.6656,
754
+ "eval_bigbrother_samples_per_second": 32.881,
755
+ "eval_bigbrother_steps_per_second": 1.032,
756
+ "eval_bigbrother_wer": 0.5527269797792637,
757
+ "step": 1936
758
+ },
759
+ {
760
+ "epoch": 23.0,
761
+ "grad_norm": 1.5935795307159424,
762
+ "learning_rate": 7.410891089108911e-05,
763
+ "loss": 0.3858,
764
+ "step": 2024
765
+ },
766
+ {
767
+ "epoch": 23.0,
768
+ "eval_rundkast_loss": 0.46708443760871887,
769
+ "eval_rundkast_runtime": 33.6671,
770
+ "eval_rundkast_samples_per_second": 39.95,
771
+ "eval_rundkast_steps_per_second": 1.277,
772
+ "eval_rundkast_wer": 0.2152402163538021,
773
+ "step": 2024
774
+ },
775
+ {
776
+ "epoch": 23.0,
777
+ "eval_nb_samtale_loss": 0.4683707356452942,
778
+ "eval_nb_samtale_runtime": 37.9855,
779
+ "eval_nb_samtale_samples_per_second": 14.032,
780
+ "eval_nb_samtale_steps_per_second": 0.448,
781
+ "eval_nb_samtale_wer": 0.185791015625,
782
+ "step": 2024
783
+ },
784
+ {
785
+ "epoch": 23.0,
786
+ "eval_bigbrother_loss": 2.148423910140991,
787
+ "eval_bigbrother_runtime": 41.6837,
788
+ "eval_bigbrother_samples_per_second": 32.867,
789
+ "eval_bigbrother_steps_per_second": 1.032,
790
+ "eval_bigbrother_wer": 0.5511157657294772,
791
+ "step": 2024
792
+ },
793
+ {
794
+ "epoch": 24.0,
795
+ "grad_norm": 2.292874813079834,
796
+ "learning_rate": 6.975247524752476e-05,
797
+ "loss": 0.3763,
798
+ "step": 2112
799
+ },
800
+ {
801
+ "epoch": 24.0,
802
+ "eval_rundkast_loss": 0.47619959712028503,
803
+ "eval_rundkast_runtime": 33.4679,
804
+ "eval_rundkast_samples_per_second": 40.188,
805
+ "eval_rundkast_steps_per_second": 1.285,
806
+ "eval_rundkast_wer": 0.21810372255806554,
807
+ "step": 2112
808
+ },
809
+ {
810
+ "epoch": 24.0,
811
+ "eval_nb_samtale_loss": 0.47193431854248047,
812
+ "eval_nb_samtale_runtime": 37.7044,
813
+ "eval_nb_samtale_samples_per_second": 14.136,
814
+ "eval_nb_samtale_steps_per_second": 0.451,
815
+ "eval_nb_samtale_wer": 0.18709309895833334,
816
+ "step": 2112
817
+ },
818
+ {
819
+ "epoch": 24.0,
820
+ "eval_bigbrother_loss": 2.1468334197998047,
821
+ "eval_bigbrother_runtime": 41.3571,
822
+ "eval_bigbrother_samples_per_second": 33.126,
823
+ "eval_bigbrother_steps_per_second": 1.04,
824
+ "eval_bigbrother_wer": 0.559735760895835,
825
+ "step": 2112
826
+ },
827
+ {
828
+ "epoch": 25.0,
829
+ "grad_norm": 2.466032028198242,
830
+ "learning_rate": 6.53960396039604e-05,
831
+ "loss": 0.3741,
832
+ "step": 2200
833
+ },
834
+ {
835
+ "epoch": 25.0,
836
+ "eval_rundkast_loss": 0.4504246711730957,
837
+ "eval_rundkast_runtime": 33.5467,
838
+ "eval_rundkast_samples_per_second": 40.093,
839
+ "eval_rundkast_steps_per_second": 1.282,
840
+ "eval_rundkast_wer": 0.2134107540566338,
841
+ "step": 2200
842
+ },
843
+ {
844
+ "epoch": 25.0,
845
+ "eval_nb_samtale_loss": 0.4489670693874359,
846
+ "eval_nb_samtale_runtime": 37.6649,
847
+ "eval_nb_samtale_samples_per_second": 14.151,
848
+ "eval_nb_samtale_steps_per_second": 0.451,
849
+ "eval_nb_samtale_wer": 0.185546875,
850
+ "step": 2200
851
+ },
852
+ {
853
+ "epoch": 25.0,
854
+ "eval_bigbrother_loss": 2.1423308849334717,
855
+ "eval_bigbrother_runtime": 41.5117,
856
+ "eval_bigbrother_samples_per_second": 33.003,
857
+ "eval_bigbrother_steps_per_second": 1.036,
858
+ "eval_bigbrother_wer": 0.553290904696689,
859
+ "step": 2200
860
+ },
861
+ {
862
+ "epoch": 26.0,
863
+ "grad_norm": 1.5329217910766602,
864
+ "learning_rate": 6.103960396039604e-05,
865
+ "loss": 0.3723,
866
+ "step": 2288
867
+ },
868
+ {
869
+ "epoch": 26.0,
870
+ "eval_rundkast_loss": 0.4836730659008026,
871
+ "eval_rundkast_runtime": 33.6087,
872
+ "eval_rundkast_samples_per_second": 40.019,
873
+ "eval_rundkast_steps_per_second": 1.279,
874
+ "eval_rundkast_wer": 0.2152402163538021,
875
+ "step": 2288
876
+ },
877
+ {
878
+ "epoch": 26.0,
879
+ "eval_nb_samtale_loss": 0.4578171968460083,
880
+ "eval_nb_samtale_runtime": 37.9306,
881
+ "eval_nb_samtale_samples_per_second": 14.052,
882
+ "eval_nb_samtale_steps_per_second": 0.448,
883
+ "eval_nb_samtale_wer": 0.18424479166666666,
884
+ "step": 2288
885
+ },
886
+ {
887
+ "epoch": 26.0,
888
+ "eval_bigbrother_loss": 2.1947550773620605,
889
+ "eval_bigbrother_runtime": 41.8164,
890
+ "eval_bigbrother_samples_per_second": 32.762,
891
+ "eval_bigbrother_steps_per_second": 1.028,
892
+ "eval_bigbrother_wer": 0.549101748167244,
893
+ "step": 2288
894
+ },
895
+ {
896
+ "epoch": 27.0,
897
+ "grad_norm": 1.3041456937789917,
898
+ "learning_rate": 5.668316831683168e-05,
899
+ "loss": 0.3684,
900
+ "step": 2376
901
+ },
902
+ {
903
+ "epoch": 27.0,
904
+ "eval_rundkast_loss": 0.4755971133708954,
905
+ "eval_rundkast_runtime": 33.7598,
906
+ "eval_rundkast_samples_per_second": 39.84,
907
+ "eval_rundkast_steps_per_second": 1.274,
908
+ "eval_rundkast_wer": 0.21197900095450206,
909
+ "step": 2376
910
+ },
911
+ {
912
+ "epoch": 27.0,
913
+ "eval_nb_samtale_loss": 0.4704474210739136,
914
+ "eval_nb_samtale_runtime": 37.5334,
915
+ "eval_nb_samtale_samples_per_second": 14.201,
916
+ "eval_nb_samtale_steps_per_second": 0.453,
917
+ "eval_nb_samtale_wer": 0.180419921875,
918
+ "step": 2376
919
+ },
920
+ {
921
+ "epoch": 27.0,
922
+ "eval_bigbrother_loss": 2.2346787452697754,
923
+ "eval_bigbrother_runtime": 41.523,
924
+ "eval_bigbrother_samples_per_second": 32.994,
925
+ "eval_bigbrother_steps_per_second": 1.036,
926
+ "eval_bigbrother_wer": 0.549101748167244,
927
+ "step": 2376
928
+ },
929
+ {
930
+ "epoch": 28.0,
931
+ "grad_norm": 1.4035800695419312,
932
+ "learning_rate": 5.232673267326733e-05,
933
+ "loss": 0.3536,
934
+ "step": 2464
935
+ },
936
+ {
937
+ "epoch": 28.0,
938
+ "eval_rundkast_loss": 0.4591149687767029,
939
+ "eval_rundkast_runtime": 33.8672,
940
+ "eval_rundkast_samples_per_second": 39.714,
941
+ "eval_rundkast_steps_per_second": 1.27,
942
+ "eval_rundkast_wer": 0.2085587018771874,
943
+ "step": 2464
944
+ },
945
+ {
946
+ "epoch": 28.0,
947
+ "eval_nb_samtale_loss": 0.4490753710269928,
948
+ "eval_nb_samtale_runtime": 37.5138,
949
+ "eval_nb_samtale_samples_per_second": 14.208,
950
+ "eval_nb_samtale_steps_per_second": 0.453,
951
+ "eval_nb_samtale_wer": 0.18074544270833334,
952
+ "step": 2464
953
+ },
954
+ {
955
+ "epoch": 28.0,
956
+ "eval_bigbrother_loss": 2.1479501724243164,
957
+ "eval_bigbrother_runtime": 41.5368,
958
+ "eval_bigbrother_samples_per_second": 32.983,
959
+ "eval_bigbrother_steps_per_second": 1.035,
960
+ "eval_bigbrother_wer": 0.5502295980020946,
961
+ "step": 2464
962
+ },
963
+ {
964
+ "epoch": 29.0,
965
+ "grad_norm": 2.3787569999694824,
966
+ "learning_rate": 4.797029702970297e-05,
967
+ "loss": 0.3422,
968
+ "step": 2552
969
+ },
970
+ {
971
+ "epoch": 29.0,
972
+ "eval_rundkast_loss": 0.45249199867248535,
973
+ "eval_rundkast_runtime": 33.8412,
974
+ "eval_rundkast_samples_per_second": 39.745,
975
+ "eval_rundkast_steps_per_second": 1.271,
976
+ "eval_rundkast_wer": 0.2116608335984728,
977
+ "step": 2552
978
+ },
979
+ {
980
+ "epoch": 29.0,
981
+ "eval_nb_samtale_loss": 0.42878830432891846,
982
+ "eval_nb_samtale_runtime": 37.8412,
983
+ "eval_nb_samtale_samples_per_second": 14.085,
984
+ "eval_nb_samtale_steps_per_second": 0.449,
985
+ "eval_nb_samtale_wer": 0.179931640625,
986
+ "step": 2552
987
+ },
988
+ {
989
+ "epoch": 29.0,
990
+ "eval_bigbrother_loss": 2.074361562728882,
991
+ "eval_bigbrother_runtime": 41.584,
992
+ "eval_bigbrother_samples_per_second": 32.945,
993
+ "eval_bigbrother_steps_per_second": 1.034,
994
+ "eval_bigbrother_wer": 0.5470877306050109,
995
+ "step": 2552
996
+ },
997
+ {
998
+ "epoch": 30.0,
999
+ "grad_norm": 1.0397918224334717,
1000
+ "learning_rate": 4.3613861386138617e-05,
1001
+ "loss": 0.3271,
1002
+ "step": 2640
1003
+ },
1004
+ {
1005
+ "epoch": 30.0,
1006
+ "eval_rundkast_loss": 0.4873400926589966,
1007
+ "eval_rundkast_runtime": 33.7561,
1008
+ "eval_rundkast_samples_per_second": 39.845,
1009
+ "eval_rundkast_steps_per_second": 1.274,
1010
+ "eval_rundkast_wer": 0.21174037543748012,
1011
+ "step": 2640
1012
+ },
1013
+ {
1014
+ "epoch": 30.0,
1015
+ "eval_nb_samtale_loss": 0.4706071615219116,
1016
+ "eval_nb_samtale_runtime": 37.8498,
1017
+ "eval_nb_samtale_samples_per_second": 14.082,
1018
+ "eval_nb_samtale_steps_per_second": 0.449,
1019
+ "eval_nb_samtale_wer": 0.17960611979166666,
1020
+ "step": 2640
1021
+ },
1022
+ {
1023
+ "epoch": 30.0,
1024
+ "eval_bigbrother_loss": 2.284547805786133,
1025
+ "eval_bigbrother_runtime": 43.2872,
1026
+ "eval_bigbrother_samples_per_second": 31.649,
1027
+ "eval_bigbrother_steps_per_second": 0.993,
1028
+ "eval_bigbrother_wer": 0.5479738983323934,
1029
+ "step": 2640
1030
+ },
1031
+ {
1032
+ "epoch": 31.0,
1033
+ "grad_norm": 1.832560420036316,
1034
+ "learning_rate": 3.925742574257426e-05,
1035
+ "loss": 0.3351,
1036
+ "step": 2728
1037
+ },
1038
+ {
1039
+ "epoch": 31.0,
1040
+ "eval_rundkast_loss": 0.4675346612930298,
1041
+ "eval_rundkast_runtime": 33.6595,
1042
+ "eval_rundkast_samples_per_second": 39.959,
1043
+ "eval_rundkast_steps_per_second": 1.277,
1044
+ "eval_rundkast_wer": 0.21094495704740693,
1045
+ "step": 2728
1046
+ },
1047
+ {
1048
+ "epoch": 31.0,
1049
+ "eval_nb_samtale_loss": 0.44606560468673706,
1050
+ "eval_nb_samtale_runtime": 37.8399,
1051
+ "eval_nb_samtale_samples_per_second": 14.086,
1052
+ "eval_nb_samtale_steps_per_second": 0.449,
1053
+ "eval_nb_samtale_wer": 0.1767578125,
1054
+ "step": 2728
1055
+ },
1056
+ {
1057
+ "epoch": 31.0,
1058
+ "eval_bigbrother_loss": 2.2436790466308594,
1059
+ "eval_bigbrother_runtime": 41.8909,
1060
+ "eval_bigbrother_samples_per_second": 32.704,
1061
+ "eval_bigbrother_steps_per_second": 1.026,
1062
+ "eval_bigbrother_wer": 0.5497462337871586,
1063
+ "step": 2728
1064
+ },
1065
+ {
1066
+ "epoch": 32.0,
1067
+ "grad_norm": 2.598604917526245,
1068
+ "learning_rate": 3.49009900990099e-05,
1069
+ "loss": 0.3266,
1070
+ "step": 2816
1071
+ },
1072
+ {
1073
+ "epoch": 32.0,
1074
+ "eval_rundkast_loss": 0.47241419553756714,
1075
+ "eval_rundkast_runtime": 33.6802,
1076
+ "eval_rundkast_samples_per_second": 39.934,
1077
+ "eval_rundkast_steps_per_second": 1.277,
1078
+ "eval_rundkast_wer": 0.2099109131403118,
1079
+ "step": 2816
1080
+ },
1081
+ {
1082
+ "epoch": 32.0,
1083
+ "eval_nb_samtale_loss": 0.46288925409317017,
1084
+ "eval_nb_samtale_runtime": 37.3999,
1085
+ "eval_nb_samtale_samples_per_second": 14.251,
1086
+ "eval_nb_samtale_steps_per_second": 0.455,
1087
+ "eval_nb_samtale_wer": 0.17952473958333334,
1088
+ "step": 2816
1089
+ },
1090
+ {
1091
+ "epoch": 32.0,
1092
+ "eval_bigbrother_loss": 2.2838997840881348,
1093
+ "eval_bigbrother_runtime": 41.6372,
1094
+ "eval_bigbrother_samples_per_second": 32.903,
1095
+ "eval_bigbrother_steps_per_second": 1.033,
1096
+ "eval_bigbrother_wer": 0.5497462337871586,
1097
+ "step": 2816
1098
+ },
1099
+ {
1100
+ "epoch": 33.0,
1101
+ "grad_norm": 2.8319473266601562,
1102
+ "learning_rate": 3.054455445544554e-05,
1103
+ "loss": 0.3164,
1104
+ "step": 2904
1105
+ },
1106
+ {
1107
+ "epoch": 33.0,
1108
+ "eval_rundkast_loss": 0.49051862955093384,
1109
+ "eval_rundkast_runtime": 33.5657,
1110
+ "eval_rundkast_samples_per_second": 40.071,
1111
+ "eval_rundkast_steps_per_second": 1.281,
1112
+ "eval_rundkast_wer": 0.21102449888641425,
1113
+ "step": 2904
1114
+ },
1115
+ {
1116
+ "epoch": 33.0,
1117
+ "eval_nb_samtale_loss": 0.47550272941589355,
1118
+ "eval_nb_samtale_runtime": 37.719,
1119
+ "eval_nb_samtale_samples_per_second": 14.131,
1120
+ "eval_nb_samtale_steps_per_second": 0.451,
1121
+ "eval_nb_samtale_wer": 0.17708333333333334,
1122
+ "step": 2904
1123
+ },
1124
+ {
1125
+ "epoch": 33.0,
1126
+ "eval_bigbrother_loss": 2.360140085220337,
1127
+ "eval_bigbrother_runtime": 41.7548,
1128
+ "eval_bigbrother_samples_per_second": 32.811,
1129
+ "eval_bigbrother_steps_per_second": 1.03,
1130
+ "eval_bigbrother_wer": 0.549343430274712,
1131
+ "step": 2904
1132
+ },
1133
+ {
1134
+ "epoch": 34.0,
1135
+ "grad_norm": 1.165239930152893,
1136
+ "learning_rate": 2.6188118811881192e-05,
1137
+ "loss": 0.325,
1138
+ "step": 2992
1139
+ },
1140
+ {
1141
+ "epoch": 34.0,
1142
+ "eval_rundkast_loss": 0.4772399365901947,
1143
+ "eval_rundkast_runtime": 34.1144,
1144
+ "eval_rundkast_samples_per_second": 39.426,
1145
+ "eval_rundkast_steps_per_second": 1.26,
1146
+ "eval_rundkast_wer": 0.21142220808145085,
1147
+ "step": 2992
1148
+ },
1149
+ {
1150
+ "epoch": 34.0,
1151
+ "eval_nb_samtale_loss": 0.45948928594589233,
1152
+ "eval_nb_samtale_runtime": 37.8345,
1153
+ "eval_nb_samtale_samples_per_second": 14.088,
1154
+ "eval_nb_samtale_steps_per_second": 0.449,
1155
+ "eval_nb_samtale_wer": 0.17692057291666666,
1156
+ "step": 2992
1157
+ },
1158
+ {
1159
+ "epoch": 34.0,
1160
+ "eval_bigbrother_loss": 2.2873830795288086,
1161
+ "eval_bigbrother_runtime": 41.8269,
1162
+ "eval_bigbrother_samples_per_second": 32.754,
1163
+ "eval_bigbrother_steps_per_second": 1.028,
1164
+ "eval_bigbrother_wer": 0.5465238056875856,
1165
+ "step": 2992
1166
+ },
1167
+ {
1168
+ "epoch": 35.0,
1169
+ "grad_norm": 1.3796650171279907,
1170
+ "learning_rate": 2.1831683168316834e-05,
1171
+ "loss": 0.3195,
1172
+ "step": 3080
1173
+ },
1174
+ {
1175
+ "epoch": 35.0,
1176
+ "eval_rundkast_loss": 0.48370474576950073,
1177
+ "eval_rundkast_runtime": 33.7199,
1178
+ "eval_rundkast_samples_per_second": 39.887,
1179
+ "eval_rundkast_steps_per_second": 1.275,
1180
+ "eval_rundkast_wer": 0.21006999681832644,
1181
+ "step": 3080
1182
+ },
1183
+ {
1184
+ "epoch": 35.0,
1185
+ "eval_nb_samtale_loss": 0.4662381708621979,
1186
+ "eval_nb_samtale_runtime": 38.5158,
1187
+ "eval_nb_samtale_samples_per_second": 13.838,
1188
+ "eval_nb_samtale_steps_per_second": 0.441,
1189
+ "eval_nb_samtale_wer": 0.17635091145833334,
1190
+ "step": 3080
1191
+ },
1192
+ {
1193
+ "epoch": 35.0,
1194
+ "eval_bigbrother_loss": 2.249934434890747,
1195
+ "eval_bigbrother_runtime": 41.5219,
1196
+ "eval_bigbrother_samples_per_second": 32.995,
1197
+ "eval_bigbrother_steps_per_second": 1.036,
1198
+ "eval_bigbrother_wer": 0.5461210021751389,
1199
+ "step": 3080
1200
+ },
1201
+ {
1202
+ "epoch": 36.0,
1203
+ "grad_norm": 2.232914686203003,
1204
+ "learning_rate": 1.7475247524752476e-05,
1205
+ "loss": 0.3169,
1206
+ "step": 3168
1207
+ },
1208
+ {
1209
+ "epoch": 36.0,
1210
+ "eval_rundkast_loss": 0.4873496890068054,
1211
+ "eval_rundkast_runtime": 33.9486,
1212
+ "eval_rundkast_samples_per_second": 39.619,
1213
+ "eval_rundkast_steps_per_second": 1.267,
1214
+ "eval_rundkast_wer": 0.20935412026726058,
1215
+ "step": 3168
1216
+ },
1217
+ {
1218
+ "epoch": 36.0,
1219
+ "eval_nb_samtale_loss": 0.4695265591144562,
1220
+ "eval_nb_samtale_runtime": 37.5912,
1221
+ "eval_nb_samtale_samples_per_second": 14.179,
1222
+ "eval_nb_samtale_steps_per_second": 0.452,
1223
+ "eval_nb_samtale_wer": 0.175048828125,
1224
+ "step": 3168
1225
+ },
1226
+ {
1227
+ "epoch": 36.0,
1228
+ "eval_bigbrother_loss": 2.2812929153442383,
1229
+ "eval_bigbrother_runtime": 42.1676,
1230
+ "eval_bigbrother_samples_per_second": 32.489,
1231
+ "eval_bigbrother_steps_per_second": 1.02,
1232
+ "eval_bigbrother_wer": 0.545879320067671,
1233
+ "step": 3168
1234
+ },
1235
+ {
1236
+ "epoch": 37.0,
1237
+ "grad_norm": 1.227518916130066,
1238
+ "learning_rate": 1.311881188118812e-05,
1239
+ "loss": 0.3205,
1240
+ "step": 3256
1241
+ },
1242
+ {
1243
+ "epoch": 37.0,
1244
+ "eval_rundkast_loss": 0.4848962128162384,
1245
+ "eval_rundkast_runtime": 34.1615,
1246
+ "eval_rundkast_samples_per_second": 39.372,
1247
+ "eval_rundkast_steps_per_second": 1.259,
1248
+ "eval_rundkast_wer": 0.21046770601336304,
1249
+ "step": 3256
1250
+ },
1251
+ {
1252
+ "epoch": 37.0,
1253
+ "eval_nb_samtale_loss": 0.45955517888069153,
1254
+ "eval_nb_samtale_runtime": 37.7962,
1255
+ "eval_nb_samtale_samples_per_second": 14.102,
1256
+ "eval_nb_samtale_steps_per_second": 0.45,
1257
+ "eval_nb_samtale_wer": 0.17521158854166666,
1258
+ "step": 3256
1259
+ },
1260
+ {
1261
+ "epoch": 37.0,
1262
+ "eval_bigbrother_loss": 2.2612295150756836,
1263
+ "eval_bigbrother_runtime": 41.8835,
1264
+ "eval_bigbrother_samples_per_second": 32.71,
1265
+ "eval_bigbrother_steps_per_second": 1.027,
1266
+ "eval_bigbrother_wer": 0.5478127769274148,
1267
+ "step": 3256
1268
+ },
1269
+ {
1270
+ "epoch": 38.0,
1271
+ "grad_norm": 1.3873769044876099,
1272
+ "learning_rate": 8.762376237623762e-06,
1273
+ "loss": 0.3164,
1274
+ "step": 3344
1275
+ },
1276
+ {
1277
+ "epoch": 38.0,
1278
+ "eval_rundkast_loss": 0.4849892556667328,
1279
+ "eval_rundkast_runtime": 33.7856,
1280
+ "eval_rundkast_samples_per_second": 39.81,
1281
+ "eval_rundkast_steps_per_second": 1.273,
1282
+ "eval_rundkast_wer": 0.20999045497931912,
1283
+ "step": 3344
1284
+ },
1285
+ {
1286
+ "epoch": 38.0,
1287
+ "eval_nb_samtale_loss": 0.45760098099708557,
1288
+ "eval_nb_samtale_runtime": 37.6736,
1289
+ "eval_nb_samtale_samples_per_second": 14.148,
1290
+ "eval_nb_samtale_steps_per_second": 0.451,
1291
+ "eval_nb_samtale_wer": 0.17350260416666666,
1292
+ "step": 3344
1293
+ },
1294
+ {
1295
+ "epoch": 38.0,
1296
+ "eval_bigbrother_loss": 2.2668609619140625,
1297
+ "eval_bigbrother_runtime": 42.1887,
1298
+ "eval_bigbrother_samples_per_second": 32.473,
1299
+ "eval_bigbrother_steps_per_second": 1.019,
1300
+ "eval_bigbrother_wer": 0.5437041811004591,
1301
+ "step": 3344
1302
+ }
1303
+ ],
1304
+ "logging_steps": 500,
1305
+ "max_steps": 3520,
1306
+ "num_input_tokens_seen": 0,
1307
+ "num_train_epochs": 40,
1308
+ "save_steps": 500,
1309
+ "total_flos": 4.566788837469744e+19,
1310
+ "train_batch_size": 48,
1311
+ "trial_name": null,
1312
+ "trial_params": null
1313
+ }
checkpoint-3344/training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ce5bbada60c6d2047edbe4ab0c2c6c8ea2d90d2d867f77eaa978b26a077a2a59
3
+ size 4984
checkpoint-3520/config.json ADDED
@@ -0,0 +1,116 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "NbAiLab/nb-wav2vec2-300m-bokmaal",
3
+ "activation_dropout": 0.055,
4
+ "adapter_attn_dim": null,
5
+ "adapter_kernel_size": 3,
6
+ "adapter_stride": 2,
7
+ "add_adapter": false,
8
+ "apply_spec_augment": true,
9
+ "architectures": [
10
+ "Wav2Vec2ForCTC"
11
+ ],
12
+ "attention_dropout": 0.094,
13
+ "bos_token_id": 1,
14
+ "classifier_proj_size": 256,
15
+ "codevector_dim": 768,
16
+ "contrastive_logits_temperature": 0.1,
17
+ "conv_bias": true,
18
+ "conv_dim": [
19
+ 512,
20
+ 512,
21
+ 512,
22
+ 512,
23
+ 512,
24
+ 512,
25
+ 512
26
+ ],
27
+ "conv_kernel": [
28
+ 10,
29
+ 3,
30
+ 3,
31
+ 3,
32
+ 3,
33
+ 2,
34
+ 2
35
+ ],
36
+ "conv_stride": [
37
+ 5,
38
+ 2,
39
+ 2,
40
+ 2,
41
+ 2,
42
+ 2,
43
+ 2
44
+ ],
45
+ "ctc_loss_reduction": "mean",
46
+ "ctc_zero_infinity": true,
47
+ "diversity_loss_weight": 0.1,
48
+ "do_stable_layer_norm": true,
49
+ "eos_token_id": 2,
50
+ "feat_extract_activation": "gelu",
51
+ "feat_extract_dropout": 0.0,
52
+ "feat_extract_norm": "layer",
53
+ "feat_proj_dropout": 0.04,
54
+ "feat_quantizer_dropout": 0.0,
55
+ "final_dropout": 0.0,
56
+ "hidden_act": "gelu",
57
+ "hidden_dropout": 0.047,
58
+ "hidden_size": 1024,
59
+ "initializer_range": 0.02,
60
+ "intermediate_size": 4096,
61
+ "layer_norm_eps": 1e-05,
62
+ "layerdrop": 0.041,
63
+ "mask_channel_length": 10,
64
+ "mask_channel_min_space": 1,
65
+ "mask_channel_other": 0.0,
66
+ "mask_channel_prob": 0.0,
67
+ "mask_channel_selection": "static",
68
+ "mask_feature_length": 64,
69
+ "mask_feature_min_masks": 0,
70
+ "mask_feature_prob": 0.25,
71
+ "mask_time_length": 10,
72
+ "mask_time_min_masks": 2,
73
+ "mask_time_min_space": 1,
74
+ "mask_time_other": 0.0,
75
+ "mask_time_prob": 0.082,
76
+ "mask_time_selection": "static",
77
+ "model_type": "wav2vec2",
78
+ "num_adapter_layers": 3,
79
+ "num_attention_heads": 16,
80
+ "num_codevector_groups": 2,
81
+ "num_codevectors_per_group": 320,
82
+ "num_conv_pos_embedding_groups": 16,
83
+ "num_conv_pos_embeddings": 128,
84
+ "num_feat_extract_layers": 7,
85
+ "num_hidden_layers": 24,
86
+ "num_negatives": 100,
87
+ "output_hidden_size": 1024,
88
+ "pad_token_id": 31,
89
+ "proj_codevector_dim": 768,
90
+ "tdnn_dilation": [
91
+ 1,
92
+ 2,
93
+ 3,
94
+ 1,
95
+ 1
96
+ ],
97
+ "tdnn_dim": [
98
+ 512,
99
+ 512,
100
+ 512,
101
+ 512,
102
+ 1500
103
+ ],
104
+ "tdnn_kernel": [
105
+ 5,
106
+ 3,
107
+ 3,
108
+ 1,
109
+ 1
110
+ ],
111
+ "torch_dtype": "float32",
112
+ "transformers_version": "4.38.1",
113
+ "use_weighted_layer_sum": false,
114
+ "vocab_size": 34,
115
+ "xvector_output_dim": 512
116
+ }
checkpoint-3520/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d52c2c355d018db6ae0e8afc6e0cb07039048394c7faad18790c861dffb1d642
3
+ size 1261946880
checkpoint-3520/optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a0622dea9bd85a59c78bd69b9ff83d6d5ca78e408b391b85dbde2361feadb58a
3
+ size 2490438582
checkpoint-3520/preprocessor_config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "do_normalize": true,
3
+ "feature_extractor_type": "Wav2Vec2FeatureExtractor",
4
+ "feature_size": 1,
5
+ "padding_side": "right",
6
+ "padding_value": 0,
7
+ "processor_class": "Wav2Vec2ProcessorWithLM",
8
+ "return_attention_mask": true,
9
+ "sampling_rate": 16000
10
+ }
checkpoint-3520/rng_state.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:145e031bb18d5a08396bc9cb0a0eb7b726020561eabcdc7c5591644b4eb6f0e0
3
+ size 14244
checkpoint-3520/scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d7e8e547c971b7e553be1f9a8a5d06782bf22b1e84244b613d58425880a6dfbe
3
+ size 1064
checkpoint-3520/trainer_state.json ADDED
@@ -0,0 +1,1381 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_metric": 0.17350260416666666,
3
+ "best_model_checkpoint": "/cluster/home/torstefl/Master/saved_model/W2V/single/NB/30.05/checkpoint-3344",
4
+ "epoch": 40.0,
5
+ "eval_steps": 500,
6
+ "global_step": 3520,
7
+ "is_hyper_param_search": false,
8
+ "is_local_process_zero": true,
9
+ "is_world_process_zero": true,
10
+ "log_history": [
11
+ {
12
+ "epoch": 1.0,
13
+ "grad_norm": 3.9170310497283936,
14
+ "learning_rate": 5.8e-06,
15
+ "loss": 0.9591,
16
+ "step": 88
17
+ },
18
+ {
19
+ "epoch": 1.0,
20
+ "eval_rundkast_loss": 0.5365053415298462,
21
+ "eval_rundkast_runtime": 35.6704,
22
+ "eval_rundkast_samples_per_second": 37.706,
23
+ "eval_rundkast_steps_per_second": 1.205,
24
+ "eval_rundkast_wer": 0.23027362392618517,
25
+ "step": 88
26
+ },
27
+ {
28
+ "epoch": 1.0,
29
+ "eval_nb_samtale_loss": 0.67843097448349,
30
+ "eval_nb_samtale_runtime": 39.2213,
31
+ "eval_nb_samtale_samples_per_second": 13.59,
32
+ "eval_nb_samtale_steps_per_second": 0.433,
33
+ "eval_nb_samtale_wer": 0.24666341145833334,
34
+ "step": 88
35
+ },
36
+ {
37
+ "epoch": 1.0,
38
+ "eval_bigbrother_loss": 2.74082612991333,
39
+ "eval_bigbrother_runtime": 41.8079,
40
+ "eval_bigbrother_samples_per_second": 32.769,
41
+ "eval_bigbrother_steps_per_second": 1.029,
42
+ "eval_bigbrother_wer": 0.6191089986304681,
43
+ "step": 88
44
+ },
45
+ {
46
+ "epoch": 2.0,
47
+ "grad_norm": 4.4212822914123535,
48
+ "learning_rate": 1.1666666666666668e-05,
49
+ "loss": 0.8123,
50
+ "step": 176
51
+ },
52
+ {
53
+ "epoch": 2.0,
54
+ "eval_rundkast_loss": 0.4252748191356659,
55
+ "eval_rundkast_runtime": 33.6441,
56
+ "eval_rundkast_samples_per_second": 39.977,
57
+ "eval_rundkast_steps_per_second": 1.278,
58
+ "eval_rundkast_wer": 0.2241489023226217,
59
+ "step": 176
60
+ },
61
+ {
62
+ "epoch": 2.0,
63
+ "eval_nb_samtale_loss": 0.5754267573356628,
64
+ "eval_nb_samtale_runtime": 38.5834,
65
+ "eval_nb_samtale_samples_per_second": 13.814,
66
+ "eval_nb_samtale_steps_per_second": 0.441,
67
+ "eval_nb_samtale_wer": 0.23527018229166666,
68
+ "step": 176
69
+ },
70
+ {
71
+ "epoch": 2.0,
72
+ "eval_bigbrother_loss": 2.240948438644409,
73
+ "eval_bigbrother_runtime": 41.5488,
74
+ "eval_bigbrother_samples_per_second": 32.973,
75
+ "eval_bigbrother_steps_per_second": 1.035,
76
+ "eval_bigbrother_wer": 0.6037219044550068,
77
+ "step": 176
78
+ },
79
+ {
80
+ "epoch": 3.0,
81
+ "grad_norm": 3.1233577728271484,
82
+ "learning_rate": 1.7533333333333334e-05,
83
+ "loss": 0.7517,
84
+ "step": 264
85
+ },
86
+ {
87
+ "epoch": 3.0,
88
+ "eval_rundkast_loss": 0.419950932264328,
89
+ "eval_rundkast_runtime": 33.5339,
90
+ "eval_rundkast_samples_per_second": 40.109,
91
+ "eval_rundkast_steps_per_second": 1.282,
92
+ "eval_rundkast_wer": 0.2205695195672924,
93
+ "step": 264
94
+ },
95
+ {
96
+ "epoch": 3.0,
97
+ "eval_nb_samtale_loss": 0.5531216263771057,
98
+ "eval_nb_samtale_runtime": 38.5893,
99
+ "eval_nb_samtale_samples_per_second": 13.812,
100
+ "eval_nb_samtale_steps_per_second": 0.441,
101
+ "eval_nb_samtale_wer": 0.22932942708333334,
102
+ "step": 264
103
+ },
104
+ {
105
+ "epoch": 3.0,
106
+ "eval_bigbrother_loss": 2.1344377994537354,
107
+ "eval_bigbrother_runtime": 41.2938,
108
+ "eval_bigbrother_samples_per_second": 33.177,
109
+ "eval_bigbrother_steps_per_second": 1.041,
110
+ "eval_bigbrother_wer": 0.5964714412309675,
111
+ "step": 264
112
+ },
113
+ {
114
+ "epoch": 4.0,
115
+ "grad_norm": 3.1834347248077393,
116
+ "learning_rate": 2.3400000000000003e-05,
117
+ "loss": 0.7074,
118
+ "step": 352
119
+ },
120
+ {
121
+ "epoch": 4.0,
122
+ "eval_rundkast_loss": 0.4088253676891327,
123
+ "eval_rundkast_runtime": 33.3755,
124
+ "eval_rundkast_samples_per_second": 40.299,
125
+ "eval_rundkast_steps_per_second": 1.288,
126
+ "eval_rundkast_wer": 0.22009226853324848,
127
+ "step": 352
128
+ },
129
+ {
130
+ "epoch": 4.0,
131
+ "eval_nb_samtale_loss": 0.5255401134490967,
132
+ "eval_nb_samtale_runtime": 38.2361,
133
+ "eval_nb_samtale_samples_per_second": 13.94,
134
+ "eval_nb_samtale_steps_per_second": 0.445,
135
+ "eval_nb_samtale_wer": 0.22696940104166666,
136
+ "step": 352
137
+ },
138
+ {
139
+ "epoch": 4.0,
140
+ "eval_bigbrother_loss": 2.036323070526123,
141
+ "eval_bigbrother_runtime": 41.2821,
142
+ "eval_bigbrother_samples_per_second": 33.186,
143
+ "eval_bigbrother_steps_per_second": 1.042,
144
+ "eval_bigbrother_wer": 0.5905099492467575,
145
+ "step": 352
146
+ },
147
+ {
148
+ "epoch": 5.0,
149
+ "grad_norm": 1.5792173147201538,
150
+ "learning_rate": 2.926666666666667e-05,
151
+ "loss": 0.6874,
152
+ "step": 440
153
+ },
154
+ {
155
+ "epoch": 5.0,
156
+ "eval_rundkast_loss": 0.40116986632347107,
157
+ "eval_rundkast_runtime": 33.3522,
158
+ "eval_rundkast_samples_per_second": 40.327,
159
+ "eval_rundkast_steps_per_second": 1.289,
160
+ "eval_rundkast_wer": 0.21985364301622654,
161
+ "step": 440
162
+ },
163
+ {
164
+ "epoch": 5.0,
165
+ "eval_nb_samtale_loss": 0.5132110714912415,
166
+ "eval_nb_samtale_runtime": 38.393,
167
+ "eval_nb_samtale_samples_per_second": 13.883,
168
+ "eval_nb_samtale_steps_per_second": 0.443,
169
+ "eval_nb_samtale_wer": 0.224365234375,
170
+ "step": 440
171
+ },
172
+ {
173
+ "epoch": 5.0,
174
+ "eval_bigbrother_loss": 1.9978336095809937,
175
+ "eval_bigbrother_runtime": 41.45,
176
+ "eval_bigbrother_samples_per_second": 33.052,
177
+ "eval_bigbrother_steps_per_second": 1.037,
178
+ "eval_bigbrother_wer": 0.5905099492467575,
179
+ "step": 440
180
+ },
181
+ {
182
+ "epoch": 6.0,
183
+ "grad_norm": 2.002620220184326,
184
+ "learning_rate": 3.513333333333334e-05,
185
+ "loss": 0.6548,
186
+ "step": 528
187
+ },
188
+ {
189
+ "epoch": 6.0,
190
+ "eval_rundkast_loss": 0.4032253921031952,
191
+ "eval_rundkast_runtime": 33.4235,
192
+ "eval_rundkast_samples_per_second": 40.241,
193
+ "eval_rundkast_steps_per_second": 1.287,
194
+ "eval_rundkast_wer": 0.217467387846007,
195
+ "step": 528
196
+ },
197
+ {
198
+ "epoch": 6.0,
199
+ "eval_nb_samtale_loss": 0.5047765374183655,
200
+ "eval_nb_samtale_runtime": 38.2135,
201
+ "eval_nb_samtale_samples_per_second": 13.948,
202
+ "eval_nb_samtale_steps_per_second": 0.445,
203
+ "eval_nb_samtale_wer": 0.220947265625,
204
+ "step": 528
205
+ },
206
+ {
207
+ "epoch": 6.0,
208
+ "eval_bigbrother_loss": 1.9670743942260742,
209
+ "eval_bigbrother_runtime": 41.4322,
210
+ "eval_bigbrother_samples_per_second": 33.066,
211
+ "eval_bigbrother_steps_per_second": 1.038,
212
+ "eval_bigbrother_wer": 0.5801176186256344,
213
+ "step": 528
214
+ },
215
+ {
216
+ "epoch": 7.0,
217
+ "grad_norm": 1.4368079900741577,
218
+ "learning_rate": 4.1e-05,
219
+ "loss": 0.6439,
220
+ "step": 616
221
+ },
222
+ {
223
+ "epoch": 7.0,
224
+ "eval_rundkast_loss": 0.39594346284866333,
225
+ "eval_rundkast_runtime": 33.337,
226
+ "eval_rundkast_samples_per_second": 40.346,
227
+ "eval_rundkast_steps_per_second": 1.29,
228
+ "eval_rundkast_wer": 0.22009226853324848,
229
+ "step": 616
230
+ },
231
+ {
232
+ "epoch": 7.0,
233
+ "eval_nb_samtale_loss": 0.48273980617523193,
234
+ "eval_nb_samtale_runtime": 38.0773,
235
+ "eval_nb_samtale_samples_per_second": 13.998,
236
+ "eval_nb_samtale_steps_per_second": 0.446,
237
+ "eval_nb_samtale_wer": 0.21964518229166666,
238
+ "step": 616
239
+ },
240
+ {
241
+ "epoch": 7.0,
242
+ "eval_bigbrother_loss": 1.921434998512268,
243
+ "eval_bigbrother_runtime": 41.5595,
244
+ "eval_bigbrother_samples_per_second": 32.965,
245
+ "eval_bigbrother_steps_per_second": 1.035,
246
+ "eval_bigbrother_wer": 0.5865624748247805,
247
+ "step": 616
248
+ },
249
+ {
250
+ "epoch": 8.0,
251
+ "grad_norm": 1.1675041913986206,
252
+ "learning_rate": 4.686666666666667e-05,
253
+ "loss": 0.6158,
254
+ "step": 704
255
+ },
256
+ {
257
+ "epoch": 8.0,
258
+ "eval_rundkast_loss": 0.40993189811706543,
259
+ "eval_rundkast_runtime": 33.4667,
260
+ "eval_rundkast_samples_per_second": 40.189,
261
+ "eval_rundkast_steps_per_second": 1.285,
262
+ "eval_rundkast_wer": 0.22128539611835826,
263
+ "step": 704
264
+ },
265
+ {
266
+ "epoch": 8.0,
267
+ "eval_nb_samtale_loss": 0.494111031293869,
268
+ "eval_nb_samtale_runtime": 38.0186,
269
+ "eval_nb_samtale_samples_per_second": 14.019,
270
+ "eval_nb_samtale_steps_per_second": 0.447,
271
+ "eval_nb_samtale_wer": 0.21712239583333334,
272
+ "step": 704
273
+ },
274
+ {
275
+ "epoch": 8.0,
276
+ "eval_bigbrother_loss": 1.9780834913253784,
277
+ "eval_bigbrother_runtime": 41.757,
278
+ "eval_bigbrother_samples_per_second": 32.809,
279
+ "eval_bigbrother_steps_per_second": 1.03,
280
+ "eval_bigbrother_wer": 0.5809232256505277,
281
+ "step": 704
282
+ },
283
+ {
284
+ "epoch": 9.0,
285
+ "grad_norm": 2.030735731124878,
286
+ "learning_rate": 5.273333333333333e-05,
287
+ "loss": 0.6041,
288
+ "step": 792
289
+ },
290
+ {
291
+ "epoch": 9.0,
292
+ "eval_rundkast_loss": 0.3950729966163635,
293
+ "eval_rundkast_runtime": 33.5538,
294
+ "eval_rundkast_samples_per_second": 40.085,
295
+ "eval_rundkast_steps_per_second": 1.282,
296
+ "eval_rundkast_wer": 0.21762647152402165,
297
+ "step": 792
298
+ },
299
+ {
300
+ "epoch": 9.0,
301
+ "eval_nb_samtale_loss": 0.45818740129470825,
302
+ "eval_nb_samtale_runtime": 37.9662,
303
+ "eval_nb_samtale_samples_per_second": 14.039,
304
+ "eval_nb_samtale_steps_per_second": 0.448,
305
+ "eval_nb_samtale_wer": 0.21183268229166666,
306
+ "step": 792
307
+ },
308
+ {
309
+ "epoch": 9.0,
310
+ "eval_bigbrother_loss": 1.8719202280044556,
311
+ "eval_bigbrother_runtime": 42.0553,
312
+ "eval_bigbrother_samples_per_second": 32.576,
313
+ "eval_bigbrother_steps_per_second": 1.022,
314
+ "eval_bigbrother_wer": 0.5723032304841699,
315
+ "step": 792
316
+ },
317
+ {
318
+ "epoch": 10.0,
319
+ "grad_norm": 1.995400071144104,
320
+ "learning_rate": 5.86e-05,
321
+ "loss": 0.586,
322
+ "step": 880
323
+ },
324
+ {
325
+ "epoch": 10.0,
326
+ "eval_rundkast_loss": 0.39550164341926575,
327
+ "eval_rundkast_runtime": 33.4182,
328
+ "eval_rundkast_samples_per_second": 40.248,
329
+ "eval_rundkast_steps_per_second": 1.287,
330
+ "eval_rundkast_wer": 0.21762647152402165,
331
+ "step": 880
332
+ },
333
+ {
334
+ "epoch": 10.0,
335
+ "eval_nb_samtale_loss": 0.46760231256484985,
336
+ "eval_nb_samtale_runtime": 38.1072,
337
+ "eval_nb_samtale_samples_per_second": 13.987,
338
+ "eval_nb_samtale_steps_per_second": 0.446,
339
+ "eval_nb_samtale_wer": 0.208251953125,
340
+ "step": 880
341
+ },
342
+ {
343
+ "epoch": 10.0,
344
+ "eval_bigbrother_loss": 1.8893344402313232,
345
+ "eval_bigbrother_runtime": 41.5152,
346
+ "eval_bigbrother_samples_per_second": 33.0,
347
+ "eval_bigbrother_steps_per_second": 1.036,
348
+ "eval_bigbrother_wer": 0.5765729477161041,
349
+ "step": 880
350
+ },
351
+ {
352
+ "epoch": 11.0,
353
+ "grad_norm": 1.3565304279327393,
354
+ "learning_rate": 6.446666666666667e-05,
355
+ "loss": 0.5779,
356
+ "step": 968
357
+ },
358
+ {
359
+ "epoch": 11.0,
360
+ "eval_rundkast_loss": 0.39460697770118713,
361
+ "eval_rundkast_runtime": 33.627,
362
+ "eval_rundkast_samples_per_second": 39.998,
363
+ "eval_rundkast_steps_per_second": 1.279,
364
+ "eval_rundkast_wer": 0.21810372255806554,
365
+ "step": 968
366
+ },
367
+ {
368
+ "epoch": 11.0,
369
+ "eval_nb_samtale_loss": 0.45650386810302734,
370
+ "eval_nb_samtale_runtime": 37.9631,
371
+ "eval_nb_samtale_samples_per_second": 14.04,
372
+ "eval_nb_samtale_steps_per_second": 0.448,
373
+ "eval_nb_samtale_wer": 0.20613606770833334,
374
+ "step": 968
375
+ },
376
+ {
377
+ "epoch": 11.0,
378
+ "eval_bigbrother_loss": 1.9142467975616455,
379
+ "eval_bigbrother_runtime": 41.4099,
380
+ "eval_bigbrother_samples_per_second": 33.084,
381
+ "eval_bigbrother_steps_per_second": 1.038,
382
+ "eval_bigbrother_wer": 0.5729477161040845,
383
+ "step": 968
384
+ },
385
+ {
386
+ "epoch": 12.0,
387
+ "grad_norm": 1.7810662984848022,
388
+ "learning_rate": 7.033333333333334e-05,
389
+ "loss": 0.5375,
390
+ "step": 1056
391
+ },
392
+ {
393
+ "epoch": 12.0,
394
+ "eval_rundkast_loss": 0.40589994192123413,
395
+ "eval_rundkast_runtime": 33.6087,
396
+ "eval_rundkast_samples_per_second": 40.019,
397
+ "eval_rundkast_steps_per_second": 1.279,
398
+ "eval_rundkast_wer": 0.2184218899140948,
399
+ "step": 1056
400
+ },
401
+ {
402
+ "epoch": 12.0,
403
+ "eval_nb_samtale_loss": 0.46365875005722046,
404
+ "eval_nb_samtale_runtime": 38.2581,
405
+ "eval_nb_samtale_samples_per_second": 13.932,
406
+ "eval_nb_samtale_steps_per_second": 0.444,
407
+ "eval_nb_samtale_wer": 0.201171875,
408
+ "step": 1056
409
+ },
410
+ {
411
+ "epoch": 12.0,
412
+ "eval_bigbrother_loss": 1.9760468006134033,
413
+ "eval_bigbrother_runtime": 42.201,
414
+ "eval_bigbrother_samples_per_second": 32.464,
415
+ "eval_bigbrother_steps_per_second": 1.019,
416
+ "eval_bigbrother_wer": 0.5706920164343833,
417
+ "step": 1056
418
+ },
419
+ {
420
+ "epoch": 13.0,
421
+ "grad_norm": 1.9388726949691772,
422
+ "learning_rate": 7.620000000000001e-05,
423
+ "loss": 0.5238,
424
+ "step": 1144
425
+ },
426
+ {
427
+ "epoch": 13.0,
428
+ "eval_rundkast_loss": 0.4222950339317322,
429
+ "eval_rundkast_runtime": 33.5989,
430
+ "eval_rundkast_samples_per_second": 40.031,
431
+ "eval_rundkast_steps_per_second": 1.28,
432
+ "eval_rundkast_wer": 0.21945593382118994,
433
+ "step": 1144
434
+ },
435
+ {
436
+ "epoch": 13.0,
437
+ "eval_nb_samtale_loss": 0.47446364164352417,
438
+ "eval_nb_samtale_runtime": 37.9325,
439
+ "eval_nb_samtale_samples_per_second": 14.051,
440
+ "eval_nb_samtale_steps_per_second": 0.448,
441
+ "eval_nb_samtale_wer": 0.20345052083333334,
442
+ "step": 1144
443
+ },
444
+ {
445
+ "epoch": 13.0,
446
+ "eval_bigbrother_loss": 1.956613540649414,
447
+ "eval_bigbrother_runtime": 41.6064,
448
+ "eval_bigbrother_samples_per_second": 32.928,
449
+ "eval_bigbrother_steps_per_second": 1.033,
450
+ "eval_bigbrother_wer": 0.564408281640216,
451
+ "step": 1144
452
+ },
453
+ {
454
+ "epoch": 14.0,
455
+ "grad_norm": 1.7794352769851685,
456
+ "learning_rate": 8.206666666666666e-05,
457
+ "loss": 0.5002,
458
+ "step": 1232
459
+ },
460
+ {
461
+ "epoch": 14.0,
462
+ "eval_rundkast_loss": 0.40369686484336853,
463
+ "eval_rundkast_runtime": 33.4225,
464
+ "eval_rundkast_samples_per_second": 40.242,
465
+ "eval_rundkast_steps_per_second": 1.287,
466
+ "eval_rundkast_wer": 0.2170696786509704,
467
+ "step": 1232
468
+ },
469
+ {
470
+ "epoch": 14.0,
471
+ "eval_nb_samtale_loss": 0.4533463418483734,
472
+ "eval_nb_samtale_runtime": 38.1602,
473
+ "eval_nb_samtale_samples_per_second": 13.967,
474
+ "eval_nb_samtale_steps_per_second": 0.445,
475
+ "eval_nb_samtale_wer": 0.19881184895833334,
476
+ "step": 1232
477
+ },
478
+ {
479
+ "epoch": 14.0,
480
+ "eval_bigbrother_loss": 1.9202650785446167,
481
+ "eval_bigbrother_runtime": 41.8647,
482
+ "eval_bigbrother_samples_per_second": 32.724,
483
+ "eval_bigbrother_steps_per_second": 1.027,
484
+ "eval_bigbrother_wer": 0.5650527672601305,
485
+ "step": 1232
486
+ },
487
+ {
488
+ "epoch": 15.0,
489
+ "grad_norm": 2.007899284362793,
490
+ "learning_rate": 8.793333333333333e-05,
491
+ "loss": 0.4939,
492
+ "step": 1320
493
+ },
494
+ {
495
+ "epoch": 15.0,
496
+ "eval_rundkast_loss": 0.39848214387893677,
497
+ "eval_rundkast_runtime": 33.4922,
498
+ "eval_rundkast_samples_per_second": 40.159,
499
+ "eval_rundkast_steps_per_second": 1.284,
500
+ "eval_rundkast_wer": 0.21794463888005092,
501
+ "step": 1320
502
+ },
503
+ {
504
+ "epoch": 15.0,
505
+ "eval_nb_samtale_loss": 0.43362265825271606,
506
+ "eval_nb_samtale_runtime": 37.9431,
507
+ "eval_nb_samtale_samples_per_second": 14.047,
508
+ "eval_nb_samtale_steps_per_second": 0.448,
509
+ "eval_nb_samtale_wer": 0.20182291666666666,
510
+ "step": 1320
511
+ },
512
+ {
513
+ "epoch": 15.0,
514
+ "eval_bigbrother_loss": 1.824406385421753,
515
+ "eval_bigbrother_runtime": 41.6014,
516
+ "eval_bigbrother_samples_per_second": 32.932,
517
+ "eval_bigbrother_steps_per_second": 1.034,
518
+ "eval_bigbrother_wer": 0.5605413679207283,
519
+ "step": 1320
520
+ },
521
+ {
522
+ "epoch": 16.0,
523
+ "grad_norm": 1.890428066253662,
524
+ "learning_rate": 9.38e-05,
525
+ "loss": 0.493,
526
+ "step": 1408
527
+ },
528
+ {
529
+ "epoch": 16.0,
530
+ "eval_rundkast_loss": 0.42038509249687195,
531
+ "eval_rundkast_runtime": 33.8742,
532
+ "eval_rundkast_samples_per_second": 39.706,
533
+ "eval_rundkast_steps_per_second": 1.269,
534
+ "eval_rundkast_wer": 0.22009226853324848,
535
+ "step": 1408
536
+ },
537
+ {
538
+ "epoch": 16.0,
539
+ "eval_nb_samtale_loss": 0.4756682515144348,
540
+ "eval_nb_samtale_runtime": 38.2223,
541
+ "eval_nb_samtale_samples_per_second": 13.945,
542
+ "eval_nb_samtale_steps_per_second": 0.445,
543
+ "eval_nb_samtale_wer": 0.20003255208333334,
544
+ "step": 1408
545
+ },
546
+ {
547
+ "epoch": 16.0,
548
+ "eval_bigbrother_loss": 2.017906665802002,
549
+ "eval_bigbrother_runtime": 41.7123,
550
+ "eval_bigbrother_samples_per_second": 32.844,
551
+ "eval_bigbrother_steps_per_second": 1.031,
552
+ "eval_bigbrother_wer": 0.5624748247804721,
553
+ "step": 1408
554
+ },
555
+ {
556
+ "epoch": 17.0,
557
+ "grad_norm": 1.8507376909255981,
558
+ "learning_rate": 9.966666666666667e-05,
559
+ "loss": 0.4734,
560
+ "step": 1496
561
+ },
562
+ {
563
+ "epoch": 17.0,
564
+ "eval_rundkast_loss": 0.4344000518321991,
565
+ "eval_rundkast_runtime": 34.0756,
566
+ "eval_rundkast_samples_per_second": 39.471,
567
+ "eval_rundkast_steps_per_second": 1.262,
568
+ "eval_rundkast_wer": 0.21699013681196308,
569
+ "step": 1496
570
+ },
571
+ {
572
+ "epoch": 17.0,
573
+ "eval_nb_samtale_loss": 0.4668411314487457,
574
+ "eval_nb_samtale_runtime": 37.9422,
575
+ "eval_nb_samtale_samples_per_second": 14.048,
576
+ "eval_nb_samtale_steps_per_second": 0.448,
577
+ "eval_nb_samtale_wer": 0.19694010416666666,
578
+ "step": 1496
579
+ },
580
+ {
581
+ "epoch": 17.0,
582
+ "eval_bigbrother_loss": 1.9858715534210205,
583
+ "eval_bigbrother_runtime": 41.6236,
584
+ "eval_bigbrother_samples_per_second": 32.914,
585
+ "eval_bigbrother_steps_per_second": 1.033,
586
+ "eval_bigbrother_wer": 0.5611052928381536,
587
+ "step": 1496
588
+ },
589
+ {
590
+ "epoch": 18.0,
591
+ "grad_norm": 1.6648415327072144,
592
+ "learning_rate": 9.589108910891089e-05,
593
+ "loss": 0.4444,
594
+ "step": 1584
595
+ },
596
+ {
597
+ "epoch": 18.0,
598
+ "eval_rundkast_loss": 0.459636926651001,
599
+ "eval_rundkast_runtime": 33.6234,
600
+ "eval_rundkast_samples_per_second": 40.002,
601
+ "eval_rundkast_steps_per_second": 1.279,
602
+ "eval_rundkast_wer": 0.2184218899140948,
603
+ "step": 1584
604
+ },
605
+ {
606
+ "epoch": 18.0,
607
+ "eval_nb_samtale_loss": 0.4778790771961212,
608
+ "eval_nb_samtale_runtime": 37.93,
609
+ "eval_nb_samtale_samples_per_second": 14.052,
610
+ "eval_nb_samtale_steps_per_second": 0.448,
611
+ "eval_nb_samtale_wer": 0.19482421875,
612
+ "step": 1584
613
+ },
614
+ {
615
+ "epoch": 18.0,
616
+ "eval_bigbrother_loss": 2.19545316696167,
617
+ "eval_bigbrother_runtime": 41.4756,
618
+ "eval_bigbrother_samples_per_second": 33.031,
619
+ "eval_bigbrother_steps_per_second": 1.037,
620
+ "eval_bigbrother_wer": 0.5647305244501732,
621
+ "step": 1584
622
+ },
623
+ {
624
+ "epoch": 19.0,
625
+ "grad_norm": 2.001049041748047,
626
+ "learning_rate": 9.153465346534654e-05,
627
+ "loss": 0.4535,
628
+ "step": 1672
629
+ },
630
+ {
631
+ "epoch": 19.0,
632
+ "eval_rundkast_loss": 0.4358086585998535,
633
+ "eval_rundkast_runtime": 33.8712,
634
+ "eval_rundkast_samples_per_second": 39.709,
635
+ "eval_rundkast_steps_per_second": 1.27,
636
+ "eval_rundkast_wer": 0.21317212853961184,
637
+ "step": 1672
638
+ },
639
+ {
640
+ "epoch": 19.0,
641
+ "eval_nb_samtale_loss": 0.4770139157772064,
642
+ "eval_nb_samtale_runtime": 38.0225,
643
+ "eval_nb_samtale_samples_per_second": 14.018,
644
+ "eval_nb_samtale_steps_per_second": 0.447,
645
+ "eval_nb_samtale_wer": 0.19514973958333334,
646
+ "step": 1672
647
+ },
648
+ {
649
+ "epoch": 19.0,
650
+ "eval_bigbrother_loss": 2.057053804397583,
651
+ "eval_bigbrother_runtime": 41.9595,
652
+ "eval_bigbrother_samples_per_second": 32.651,
653
+ "eval_bigbrother_steps_per_second": 1.025,
654
+ "eval_bigbrother_wer": 0.5624748247804721,
655
+ "step": 1672
656
+ },
657
+ {
658
+ "epoch": 20.0,
659
+ "grad_norm": 1.6764415502548218,
660
+ "learning_rate": 8.717821782178219e-05,
661
+ "loss": 0.4231,
662
+ "step": 1760
663
+ },
664
+ {
665
+ "epoch": 20.0,
666
+ "eval_rundkast_loss": 0.41307970881462097,
667
+ "eval_rundkast_runtime": 34.1234,
668
+ "eval_rundkast_samples_per_second": 39.416,
669
+ "eval_rundkast_steps_per_second": 1.26,
670
+ "eval_rundkast_wer": 0.21603563474387527,
671
+ "step": 1760
672
+ },
673
+ {
674
+ "epoch": 20.0,
675
+ "eval_nb_samtale_loss": 0.43975648283958435,
676
+ "eval_nb_samtale_runtime": 37.7496,
677
+ "eval_nb_samtale_samples_per_second": 14.119,
678
+ "eval_nb_samtale_steps_per_second": 0.45,
679
+ "eval_nb_samtale_wer": 0.18717447916666666,
680
+ "step": 1760
681
+ },
682
+ {
683
+ "epoch": 20.0,
684
+ "eval_bigbrother_loss": 1.9533370733261108,
685
+ "eval_bigbrother_runtime": 41.7155,
686
+ "eval_bigbrother_samples_per_second": 32.842,
687
+ "eval_bigbrother_steps_per_second": 1.031,
688
+ "eval_bigbrother_wer": 0.5524852976717957,
689
+ "step": 1760
690
+ },
691
+ {
692
+ "epoch": 21.0,
693
+ "grad_norm": 1.599152684211731,
694
+ "learning_rate": 8.282178217821782e-05,
695
+ "loss": 0.4086,
696
+ "step": 1848
697
+ },
698
+ {
699
+ "epoch": 21.0,
700
+ "eval_rundkast_loss": 0.4404522776603699,
701
+ "eval_rundkast_runtime": 33.7971,
702
+ "eval_rundkast_samples_per_second": 39.796,
703
+ "eval_rundkast_steps_per_second": 1.272,
704
+ "eval_rundkast_wer": 0.2138880050906777,
705
+ "step": 1848
706
+ },
707
+ {
708
+ "epoch": 21.0,
709
+ "eval_nb_samtale_loss": 0.4675068259239197,
710
+ "eval_nb_samtale_runtime": 37.8694,
711
+ "eval_nb_samtale_samples_per_second": 14.075,
712
+ "eval_nb_samtale_steps_per_second": 0.449,
713
+ "eval_nb_samtale_wer": 0.19132486979166666,
714
+ "step": 1848
715
+ },
716
+ {
717
+ "epoch": 21.0,
718
+ "eval_bigbrother_loss": 2.153977632522583,
719
+ "eval_bigbrother_runtime": 41.6263,
720
+ "eval_bigbrother_samples_per_second": 32.912,
721
+ "eval_bigbrother_steps_per_second": 1.033,
722
+ "eval_bigbrother_wer": 0.5508740836220092,
723
+ "step": 1848
724
+ },
725
+ {
726
+ "epoch": 22.0,
727
+ "grad_norm": 2.0951850414276123,
728
+ "learning_rate": 7.846534653465347e-05,
729
+ "loss": 0.3963,
730
+ "step": 1936
731
+ },
732
+ {
733
+ "epoch": 22.0,
734
+ "eval_rundkast_loss": 0.455175518989563,
735
+ "eval_rundkast_runtime": 33.5764,
736
+ "eval_rundkast_samples_per_second": 40.058,
737
+ "eval_rundkast_steps_per_second": 1.281,
738
+ "eval_rundkast_wer": 0.21643334393891187,
739
+ "step": 1936
740
+ },
741
+ {
742
+ "epoch": 22.0,
743
+ "eval_nb_samtale_loss": 0.4630807340145111,
744
+ "eval_nb_samtale_runtime": 37.7668,
745
+ "eval_nb_samtale_samples_per_second": 14.113,
746
+ "eval_nb_samtale_steps_per_second": 0.45,
747
+ "eval_nb_samtale_wer": 0.18977864583333334,
748
+ "step": 1936
749
+ },
750
+ {
751
+ "epoch": 22.0,
752
+ "eval_bigbrother_loss": 2.1877636909484863,
753
+ "eval_bigbrother_runtime": 41.6656,
754
+ "eval_bigbrother_samples_per_second": 32.881,
755
+ "eval_bigbrother_steps_per_second": 1.032,
756
+ "eval_bigbrother_wer": 0.5527269797792637,
757
+ "step": 1936
758
+ },
759
+ {
760
+ "epoch": 23.0,
761
+ "grad_norm": 1.5935795307159424,
762
+ "learning_rate": 7.410891089108911e-05,
763
+ "loss": 0.3858,
764
+ "step": 2024
765
+ },
766
+ {
767
+ "epoch": 23.0,
768
+ "eval_rundkast_loss": 0.46708443760871887,
769
+ "eval_rundkast_runtime": 33.6671,
770
+ "eval_rundkast_samples_per_second": 39.95,
771
+ "eval_rundkast_steps_per_second": 1.277,
772
+ "eval_rundkast_wer": 0.2152402163538021,
773
+ "step": 2024
774
+ },
775
+ {
776
+ "epoch": 23.0,
777
+ "eval_nb_samtale_loss": 0.4683707356452942,
778
+ "eval_nb_samtale_runtime": 37.9855,
779
+ "eval_nb_samtale_samples_per_second": 14.032,
780
+ "eval_nb_samtale_steps_per_second": 0.448,
781
+ "eval_nb_samtale_wer": 0.185791015625,
782
+ "step": 2024
783
+ },
784
+ {
785
+ "epoch": 23.0,
786
+ "eval_bigbrother_loss": 2.148423910140991,
787
+ "eval_bigbrother_runtime": 41.6837,
788
+ "eval_bigbrother_samples_per_second": 32.867,
789
+ "eval_bigbrother_steps_per_second": 1.032,
790
+ "eval_bigbrother_wer": 0.5511157657294772,
791
+ "step": 2024
792
+ },
793
+ {
794
+ "epoch": 24.0,
795
+ "grad_norm": 2.292874813079834,
796
+ "learning_rate": 6.975247524752476e-05,
797
+ "loss": 0.3763,
798
+ "step": 2112
799
+ },
800
+ {
801
+ "epoch": 24.0,
802
+ "eval_rundkast_loss": 0.47619959712028503,
803
+ "eval_rundkast_runtime": 33.4679,
804
+ "eval_rundkast_samples_per_second": 40.188,
805
+ "eval_rundkast_steps_per_second": 1.285,
806
+ "eval_rundkast_wer": 0.21810372255806554,
807
+ "step": 2112
808
+ },
809
+ {
810
+ "epoch": 24.0,
811
+ "eval_nb_samtale_loss": 0.47193431854248047,
812
+ "eval_nb_samtale_runtime": 37.7044,
813
+ "eval_nb_samtale_samples_per_second": 14.136,
814
+ "eval_nb_samtale_steps_per_second": 0.451,
815
+ "eval_nb_samtale_wer": 0.18709309895833334,
816
+ "step": 2112
817
+ },
818
+ {
819
+ "epoch": 24.0,
820
+ "eval_bigbrother_loss": 2.1468334197998047,
821
+ "eval_bigbrother_runtime": 41.3571,
822
+ "eval_bigbrother_samples_per_second": 33.126,
823
+ "eval_bigbrother_steps_per_second": 1.04,
824
+ "eval_bigbrother_wer": 0.559735760895835,
825
+ "step": 2112
826
+ },
827
+ {
828
+ "epoch": 25.0,
829
+ "grad_norm": 2.466032028198242,
830
+ "learning_rate": 6.53960396039604e-05,
831
+ "loss": 0.3741,
832
+ "step": 2200
833
+ },
834
+ {
835
+ "epoch": 25.0,
836
+ "eval_rundkast_loss": 0.4504246711730957,
837
+ "eval_rundkast_runtime": 33.5467,
838
+ "eval_rundkast_samples_per_second": 40.093,
839
+ "eval_rundkast_steps_per_second": 1.282,
840
+ "eval_rundkast_wer": 0.2134107540566338,
841
+ "step": 2200
842
+ },
843
+ {
844
+ "epoch": 25.0,
845
+ "eval_nb_samtale_loss": 0.4489670693874359,
846
+ "eval_nb_samtale_runtime": 37.6649,
847
+ "eval_nb_samtale_samples_per_second": 14.151,
848
+ "eval_nb_samtale_steps_per_second": 0.451,
849
+ "eval_nb_samtale_wer": 0.185546875,
850
+ "step": 2200
851
+ },
852
+ {
853
+ "epoch": 25.0,
854
+ "eval_bigbrother_loss": 2.1423308849334717,
855
+ "eval_bigbrother_runtime": 41.5117,
856
+ "eval_bigbrother_samples_per_second": 33.003,
857
+ "eval_bigbrother_steps_per_second": 1.036,
858
+ "eval_bigbrother_wer": 0.553290904696689,
859
+ "step": 2200
860
+ },
861
+ {
862
+ "epoch": 26.0,
863
+ "grad_norm": 1.5329217910766602,
864
+ "learning_rate": 6.103960396039604e-05,
865
+ "loss": 0.3723,
866
+ "step": 2288
867
+ },
868
+ {
869
+ "epoch": 26.0,
870
+ "eval_rundkast_loss": 0.4836730659008026,
871
+ "eval_rundkast_runtime": 33.6087,
872
+ "eval_rundkast_samples_per_second": 40.019,
873
+ "eval_rundkast_steps_per_second": 1.279,
874
+ "eval_rundkast_wer": 0.2152402163538021,
875
+ "step": 2288
876
+ },
877
+ {
878
+ "epoch": 26.0,
879
+ "eval_nb_samtale_loss": 0.4578171968460083,
880
+ "eval_nb_samtale_runtime": 37.9306,
881
+ "eval_nb_samtale_samples_per_second": 14.052,
882
+ "eval_nb_samtale_steps_per_second": 0.448,
883
+ "eval_nb_samtale_wer": 0.18424479166666666,
884
+ "step": 2288
885
+ },
886
+ {
887
+ "epoch": 26.0,
888
+ "eval_bigbrother_loss": 2.1947550773620605,
889
+ "eval_bigbrother_runtime": 41.8164,
890
+ "eval_bigbrother_samples_per_second": 32.762,
891
+ "eval_bigbrother_steps_per_second": 1.028,
892
+ "eval_bigbrother_wer": 0.549101748167244,
893
+ "step": 2288
894
+ },
895
+ {
896
+ "epoch": 27.0,
897
+ "grad_norm": 1.3041456937789917,
898
+ "learning_rate": 5.668316831683168e-05,
899
+ "loss": 0.3684,
900
+ "step": 2376
901
+ },
902
+ {
903
+ "epoch": 27.0,
904
+ "eval_rundkast_loss": 0.4755971133708954,
905
+ "eval_rundkast_runtime": 33.7598,
906
+ "eval_rundkast_samples_per_second": 39.84,
907
+ "eval_rundkast_steps_per_second": 1.274,
908
+ "eval_rundkast_wer": 0.21197900095450206,
909
+ "step": 2376
910
+ },
911
+ {
912
+ "epoch": 27.0,
913
+ "eval_nb_samtale_loss": 0.4704474210739136,
914
+ "eval_nb_samtale_runtime": 37.5334,
915
+ "eval_nb_samtale_samples_per_second": 14.201,
916
+ "eval_nb_samtale_steps_per_second": 0.453,
917
+ "eval_nb_samtale_wer": 0.180419921875,
918
+ "step": 2376
919
+ },
920
+ {
921
+ "epoch": 27.0,
922
+ "eval_bigbrother_loss": 2.2346787452697754,
923
+ "eval_bigbrother_runtime": 41.523,
924
+ "eval_bigbrother_samples_per_second": 32.994,
925
+ "eval_bigbrother_steps_per_second": 1.036,
926
+ "eval_bigbrother_wer": 0.549101748167244,
927
+ "step": 2376
928
+ },
929
+ {
930
+ "epoch": 28.0,
931
+ "grad_norm": 1.4035800695419312,
932
+ "learning_rate": 5.232673267326733e-05,
933
+ "loss": 0.3536,
934
+ "step": 2464
935
+ },
936
+ {
937
+ "epoch": 28.0,
938
+ "eval_rundkast_loss": 0.4591149687767029,
939
+ "eval_rundkast_runtime": 33.8672,
940
+ "eval_rundkast_samples_per_second": 39.714,
941
+ "eval_rundkast_steps_per_second": 1.27,
942
+ "eval_rundkast_wer": 0.2085587018771874,
943
+ "step": 2464
944
+ },
945
+ {
946
+ "epoch": 28.0,
947
+ "eval_nb_samtale_loss": 0.4490753710269928,
948
+ "eval_nb_samtale_runtime": 37.5138,
949
+ "eval_nb_samtale_samples_per_second": 14.208,
950
+ "eval_nb_samtale_steps_per_second": 0.453,
951
+ "eval_nb_samtale_wer": 0.18074544270833334,
952
+ "step": 2464
953
+ },
954
+ {
955
+ "epoch": 28.0,
956
+ "eval_bigbrother_loss": 2.1479501724243164,
957
+ "eval_bigbrother_runtime": 41.5368,
958
+ "eval_bigbrother_samples_per_second": 32.983,
959
+ "eval_bigbrother_steps_per_second": 1.035,
960
+ "eval_bigbrother_wer": 0.5502295980020946,
961
+ "step": 2464
962
+ },
963
+ {
964
+ "epoch": 29.0,
965
+ "grad_norm": 2.3787569999694824,
966
+ "learning_rate": 4.797029702970297e-05,
967
+ "loss": 0.3422,
968
+ "step": 2552
969
+ },
970
+ {
971
+ "epoch": 29.0,
972
+ "eval_rundkast_loss": 0.45249199867248535,
973
+ "eval_rundkast_runtime": 33.8412,
974
+ "eval_rundkast_samples_per_second": 39.745,
975
+ "eval_rundkast_steps_per_second": 1.271,
976
+ "eval_rundkast_wer": 0.2116608335984728,
977
+ "step": 2552
978
+ },
979
+ {
980
+ "epoch": 29.0,
981
+ "eval_nb_samtale_loss": 0.42878830432891846,
982
+ "eval_nb_samtale_runtime": 37.8412,
983
+ "eval_nb_samtale_samples_per_second": 14.085,
984
+ "eval_nb_samtale_steps_per_second": 0.449,
985
+ "eval_nb_samtale_wer": 0.179931640625,
986
+ "step": 2552
987
+ },
988
+ {
989
+ "epoch": 29.0,
990
+ "eval_bigbrother_loss": 2.074361562728882,
991
+ "eval_bigbrother_runtime": 41.584,
992
+ "eval_bigbrother_samples_per_second": 32.945,
993
+ "eval_bigbrother_steps_per_second": 1.034,
994
+ "eval_bigbrother_wer": 0.5470877306050109,
995
+ "step": 2552
996
+ },
997
+ {
998
+ "epoch": 30.0,
999
+ "grad_norm": 1.0397918224334717,
1000
+ "learning_rate": 4.3613861386138617e-05,
1001
+ "loss": 0.3271,
1002
+ "step": 2640
1003
+ },
1004
+ {
1005
+ "epoch": 30.0,
1006
+ "eval_rundkast_loss": 0.4873400926589966,
1007
+ "eval_rundkast_runtime": 33.7561,
1008
+ "eval_rundkast_samples_per_second": 39.845,
1009
+ "eval_rundkast_steps_per_second": 1.274,
1010
+ "eval_rundkast_wer": 0.21174037543748012,
1011
+ "step": 2640
1012
+ },
1013
+ {
1014
+ "epoch": 30.0,
1015
+ "eval_nb_samtale_loss": 0.4706071615219116,
1016
+ "eval_nb_samtale_runtime": 37.8498,
1017
+ "eval_nb_samtale_samples_per_second": 14.082,
1018
+ "eval_nb_samtale_steps_per_second": 0.449,
1019
+ "eval_nb_samtale_wer": 0.17960611979166666,
1020
+ "step": 2640
1021
+ },
1022
+ {
1023
+ "epoch": 30.0,
1024
+ "eval_bigbrother_loss": 2.284547805786133,
1025
+ "eval_bigbrother_runtime": 43.2872,
1026
+ "eval_bigbrother_samples_per_second": 31.649,
1027
+ "eval_bigbrother_steps_per_second": 0.993,
1028
+ "eval_bigbrother_wer": 0.5479738983323934,
1029
+ "step": 2640
1030
+ },
1031
+ {
1032
+ "epoch": 31.0,
1033
+ "grad_norm": 1.832560420036316,
1034
+ "learning_rate": 3.925742574257426e-05,
1035
+ "loss": 0.3351,
1036
+ "step": 2728
1037
+ },
1038
+ {
1039
+ "epoch": 31.0,
1040
+ "eval_rundkast_loss": 0.4675346612930298,
1041
+ "eval_rundkast_runtime": 33.6595,
1042
+ "eval_rundkast_samples_per_second": 39.959,
1043
+ "eval_rundkast_steps_per_second": 1.277,
1044
+ "eval_rundkast_wer": 0.21094495704740693,
1045
+ "step": 2728
1046
+ },
1047
+ {
1048
+ "epoch": 31.0,
1049
+ "eval_nb_samtale_loss": 0.44606560468673706,
1050
+ "eval_nb_samtale_runtime": 37.8399,
1051
+ "eval_nb_samtale_samples_per_second": 14.086,
1052
+ "eval_nb_samtale_steps_per_second": 0.449,
1053
+ "eval_nb_samtale_wer": 0.1767578125,
1054
+ "step": 2728
1055
+ },
1056
+ {
1057
+ "epoch": 31.0,
1058
+ "eval_bigbrother_loss": 2.2436790466308594,
1059
+ "eval_bigbrother_runtime": 41.8909,
1060
+ "eval_bigbrother_samples_per_second": 32.704,
1061
+ "eval_bigbrother_steps_per_second": 1.026,
1062
+ "eval_bigbrother_wer": 0.5497462337871586,
1063
+ "step": 2728
1064
+ },
1065
+ {
1066
+ "epoch": 32.0,
1067
+ "grad_norm": 2.598604917526245,
1068
+ "learning_rate": 3.49009900990099e-05,
1069
+ "loss": 0.3266,
1070
+ "step": 2816
1071
+ },
1072
+ {
1073
+ "epoch": 32.0,
1074
+ "eval_rundkast_loss": 0.47241419553756714,
1075
+ "eval_rundkast_runtime": 33.6802,
1076
+ "eval_rundkast_samples_per_second": 39.934,
1077
+ "eval_rundkast_steps_per_second": 1.277,
1078
+ "eval_rundkast_wer": 0.2099109131403118,
1079
+ "step": 2816
1080
+ },
1081
+ {
1082
+ "epoch": 32.0,
1083
+ "eval_nb_samtale_loss": 0.46288925409317017,
1084
+ "eval_nb_samtale_runtime": 37.3999,
1085
+ "eval_nb_samtale_samples_per_second": 14.251,
1086
+ "eval_nb_samtale_steps_per_second": 0.455,
1087
+ "eval_nb_samtale_wer": 0.17952473958333334,
1088
+ "step": 2816
1089
+ },
1090
+ {
1091
+ "epoch": 32.0,
1092
+ "eval_bigbrother_loss": 2.2838997840881348,
1093
+ "eval_bigbrother_runtime": 41.6372,
1094
+ "eval_bigbrother_samples_per_second": 32.903,
1095
+ "eval_bigbrother_steps_per_second": 1.033,
1096
+ "eval_bigbrother_wer": 0.5497462337871586,
1097
+ "step": 2816
1098
+ },
1099
+ {
1100
+ "epoch": 33.0,
1101
+ "grad_norm": 2.8319473266601562,
1102
+ "learning_rate": 3.054455445544554e-05,
1103
+ "loss": 0.3164,
1104
+ "step": 2904
1105
+ },
1106
+ {
1107
+ "epoch": 33.0,
1108
+ "eval_rundkast_loss": 0.49051862955093384,
1109
+ "eval_rundkast_runtime": 33.5657,
1110
+ "eval_rundkast_samples_per_second": 40.071,
1111
+ "eval_rundkast_steps_per_second": 1.281,
1112
+ "eval_rundkast_wer": 0.21102449888641425,
1113
+ "step": 2904
1114
+ },
1115
+ {
1116
+ "epoch": 33.0,
1117
+ "eval_nb_samtale_loss": 0.47550272941589355,
1118
+ "eval_nb_samtale_runtime": 37.719,
1119
+ "eval_nb_samtale_samples_per_second": 14.131,
1120
+ "eval_nb_samtale_steps_per_second": 0.451,
1121
+ "eval_nb_samtale_wer": 0.17708333333333334,
1122
+ "step": 2904
1123
+ },
1124
+ {
1125
+ "epoch": 33.0,
1126
+ "eval_bigbrother_loss": 2.360140085220337,
1127
+ "eval_bigbrother_runtime": 41.7548,
1128
+ "eval_bigbrother_samples_per_second": 32.811,
1129
+ "eval_bigbrother_steps_per_second": 1.03,
1130
+ "eval_bigbrother_wer": 0.549343430274712,
1131
+ "step": 2904
1132
+ },
1133
+ {
1134
+ "epoch": 34.0,
1135
+ "grad_norm": 1.165239930152893,
1136
+ "learning_rate": 2.6188118811881192e-05,
1137
+ "loss": 0.325,
1138
+ "step": 2992
1139
+ },
1140
+ {
1141
+ "epoch": 34.0,
1142
+ "eval_rundkast_loss": 0.4772399365901947,
1143
+ "eval_rundkast_runtime": 34.1144,
1144
+ "eval_rundkast_samples_per_second": 39.426,
1145
+ "eval_rundkast_steps_per_second": 1.26,
1146
+ "eval_rundkast_wer": 0.21142220808145085,
1147
+ "step": 2992
1148
+ },
1149
+ {
1150
+ "epoch": 34.0,
1151
+ "eval_nb_samtale_loss": 0.45948928594589233,
1152
+ "eval_nb_samtale_runtime": 37.8345,
1153
+ "eval_nb_samtale_samples_per_second": 14.088,
1154
+ "eval_nb_samtale_steps_per_second": 0.449,
1155
+ "eval_nb_samtale_wer": 0.17692057291666666,
1156
+ "step": 2992
1157
+ },
1158
+ {
1159
+ "epoch": 34.0,
1160
+ "eval_bigbrother_loss": 2.2873830795288086,
1161
+ "eval_bigbrother_runtime": 41.8269,
1162
+ "eval_bigbrother_samples_per_second": 32.754,
1163
+ "eval_bigbrother_steps_per_second": 1.028,
1164
+ "eval_bigbrother_wer": 0.5465238056875856,
1165
+ "step": 2992
1166
+ },
1167
+ {
1168
+ "epoch": 35.0,
1169
+ "grad_norm": 1.3796650171279907,
1170
+ "learning_rate": 2.1831683168316834e-05,
1171
+ "loss": 0.3195,
1172
+ "step": 3080
1173
+ },
1174
+ {
1175
+ "epoch": 35.0,
1176
+ "eval_rundkast_loss": 0.48370474576950073,
1177
+ "eval_rundkast_runtime": 33.7199,
1178
+ "eval_rundkast_samples_per_second": 39.887,
1179
+ "eval_rundkast_steps_per_second": 1.275,
1180
+ "eval_rundkast_wer": 0.21006999681832644,
1181
+ "step": 3080
1182
+ },
1183
+ {
1184
+ "epoch": 35.0,
1185
+ "eval_nb_samtale_loss": 0.4662381708621979,
1186
+ "eval_nb_samtale_runtime": 38.5158,
1187
+ "eval_nb_samtale_samples_per_second": 13.838,
1188
+ "eval_nb_samtale_steps_per_second": 0.441,
1189
+ "eval_nb_samtale_wer": 0.17635091145833334,
1190
+ "step": 3080
1191
+ },
1192
+ {
1193
+ "epoch": 35.0,
1194
+ "eval_bigbrother_loss": 2.249934434890747,
1195
+ "eval_bigbrother_runtime": 41.5219,
1196
+ "eval_bigbrother_samples_per_second": 32.995,
1197
+ "eval_bigbrother_steps_per_second": 1.036,
1198
+ "eval_bigbrother_wer": 0.5461210021751389,
1199
+ "step": 3080
1200
+ },
1201
+ {
1202
+ "epoch": 36.0,
1203
+ "grad_norm": 2.232914686203003,
1204
+ "learning_rate": 1.7475247524752476e-05,
1205
+ "loss": 0.3169,
1206
+ "step": 3168
1207
+ },
1208
+ {
1209
+ "epoch": 36.0,
1210
+ "eval_rundkast_loss": 0.4873496890068054,
1211
+ "eval_rundkast_runtime": 33.9486,
1212
+ "eval_rundkast_samples_per_second": 39.619,
1213
+ "eval_rundkast_steps_per_second": 1.267,
1214
+ "eval_rundkast_wer": 0.20935412026726058,
1215
+ "step": 3168
1216
+ },
1217
+ {
1218
+ "epoch": 36.0,
1219
+ "eval_nb_samtale_loss": 0.4695265591144562,
1220
+ "eval_nb_samtale_runtime": 37.5912,
1221
+ "eval_nb_samtale_samples_per_second": 14.179,
1222
+ "eval_nb_samtale_steps_per_second": 0.452,
1223
+ "eval_nb_samtale_wer": 0.175048828125,
1224
+ "step": 3168
1225
+ },
1226
+ {
1227
+ "epoch": 36.0,
1228
+ "eval_bigbrother_loss": 2.2812929153442383,
1229
+ "eval_bigbrother_runtime": 42.1676,
1230
+ "eval_bigbrother_samples_per_second": 32.489,
1231
+ "eval_bigbrother_steps_per_second": 1.02,
1232
+ "eval_bigbrother_wer": 0.545879320067671,
1233
+ "step": 3168
1234
+ },
1235
+ {
1236
+ "epoch": 37.0,
1237
+ "grad_norm": 1.227518916130066,
1238
+ "learning_rate": 1.311881188118812e-05,
1239
+ "loss": 0.3205,
1240
+ "step": 3256
1241
+ },
1242
+ {
1243
+ "epoch": 37.0,
1244
+ "eval_rundkast_loss": 0.4848962128162384,
1245
+ "eval_rundkast_runtime": 34.1615,
1246
+ "eval_rundkast_samples_per_second": 39.372,
1247
+ "eval_rundkast_steps_per_second": 1.259,
1248
+ "eval_rundkast_wer": 0.21046770601336304,
1249
+ "step": 3256
1250
+ },
1251
+ {
1252
+ "epoch": 37.0,
1253
+ "eval_nb_samtale_loss": 0.45955517888069153,
1254
+ "eval_nb_samtale_runtime": 37.7962,
1255
+ "eval_nb_samtale_samples_per_second": 14.102,
1256
+ "eval_nb_samtale_steps_per_second": 0.45,
1257
+ "eval_nb_samtale_wer": 0.17521158854166666,
1258
+ "step": 3256
1259
+ },
1260
+ {
1261
+ "epoch": 37.0,
1262
+ "eval_bigbrother_loss": 2.2612295150756836,
1263
+ "eval_bigbrother_runtime": 41.8835,
1264
+ "eval_bigbrother_samples_per_second": 32.71,
1265
+ "eval_bigbrother_steps_per_second": 1.027,
1266
+ "eval_bigbrother_wer": 0.5478127769274148,
1267
+ "step": 3256
1268
+ },
1269
+ {
1270
+ "epoch": 38.0,
1271
+ "grad_norm": 1.3873769044876099,
1272
+ "learning_rate": 8.762376237623762e-06,
1273
+ "loss": 0.3164,
1274
+ "step": 3344
1275
+ },
1276
+ {
1277
+ "epoch": 38.0,
1278
+ "eval_rundkast_loss": 0.4849892556667328,
1279
+ "eval_rundkast_runtime": 33.7856,
1280
+ "eval_rundkast_samples_per_second": 39.81,
1281
+ "eval_rundkast_steps_per_second": 1.273,
1282
+ "eval_rundkast_wer": 0.20999045497931912,
1283
+ "step": 3344
1284
+ },
1285
+ {
1286
+ "epoch": 38.0,
1287
+ "eval_nb_samtale_loss": 0.45760098099708557,
1288
+ "eval_nb_samtale_runtime": 37.6736,
1289
+ "eval_nb_samtale_samples_per_second": 14.148,
1290
+ "eval_nb_samtale_steps_per_second": 0.451,
1291
+ "eval_nb_samtale_wer": 0.17350260416666666,
1292
+ "step": 3344
1293
+ },
1294
+ {
1295
+ "epoch": 38.0,
1296
+ "eval_bigbrother_loss": 2.2668609619140625,
1297
+ "eval_bigbrother_runtime": 42.1887,
1298
+ "eval_bigbrother_samples_per_second": 32.473,
1299
+ "eval_bigbrother_steps_per_second": 1.019,
1300
+ "eval_bigbrother_wer": 0.5437041811004591,
1301
+ "step": 3344
1302
+ },
1303
+ {
1304
+ "epoch": 39.0,
1305
+ "grad_norm": 1.6516066789627075,
1306
+ "learning_rate": 4.405940594059406e-06,
1307
+ "loss": 0.3145,
1308
+ "step": 3432
1309
+ },
1310
+ {
1311
+ "epoch": 39.0,
1312
+ "eval_rundkast_loss": 0.48802104592323303,
1313
+ "eval_rundkast_runtime": 34.1337,
1314
+ "eval_rundkast_samples_per_second": 39.404,
1315
+ "eval_rundkast_steps_per_second": 1.26,
1316
+ "eval_rundkast_wer": 0.20911549475023863,
1317
+ "step": 3432
1318
+ },
1319
+ {
1320
+ "epoch": 39.0,
1321
+ "eval_nb_samtale_loss": 0.4593607187271118,
1322
+ "eval_nb_samtale_runtime": 37.7724,
1323
+ "eval_nb_samtale_samples_per_second": 14.111,
1324
+ "eval_nb_samtale_steps_per_second": 0.45,
1325
+ "eval_nb_samtale_wer": 0.17390950520833334,
1326
+ "step": 3432
1327
+ },
1328
+ {
1329
+ "epoch": 39.0,
1330
+ "eval_bigbrother_loss": 2.2844576835632324,
1331
+ "eval_bigbrother_runtime": 41.9423,
1332
+ "eval_bigbrother_samples_per_second": 32.664,
1333
+ "eval_bigbrother_steps_per_second": 1.025,
1334
+ "eval_bigbrother_wer": 0.5430596954805446,
1335
+ "step": 3432
1336
+ },
1337
+ {
1338
+ "epoch": 40.0,
1339
+ "grad_norm": 1.1558164358139038,
1340
+ "learning_rate": 4.950495049504951e-08,
1341
+ "loss": 0.3122,
1342
+ "step": 3520
1343
+ },
1344
+ {
1345
+ "epoch": 40.0,
1346
+ "eval_rundkast_loss": 0.4869055449962616,
1347
+ "eval_rundkast_runtime": 33.9078,
1348
+ "eval_rundkast_samples_per_second": 39.666,
1349
+ "eval_rundkast_steps_per_second": 1.268,
1350
+ "eval_rundkast_wer": 0.20895641107222399,
1351
+ "step": 3520
1352
+ },
1353
+ {
1354
+ "epoch": 40.0,
1355
+ "eval_nb_samtale_loss": 0.4583870470523834,
1356
+ "eval_nb_samtale_runtime": 37.9869,
1357
+ "eval_nb_samtale_samples_per_second": 14.031,
1358
+ "eval_nb_samtale_steps_per_second": 0.448,
1359
+ "eval_nb_samtale_wer": 0.174072265625,
1360
+ "step": 3520
1361
+ },
1362
+ {
1363
+ "epoch": 40.0,
1364
+ "eval_bigbrother_loss": 2.280332565307617,
1365
+ "eval_bigbrother_runtime": 41.7851,
1366
+ "eval_bigbrother_samples_per_second": 32.787,
1367
+ "eval_bigbrother_steps_per_second": 1.029,
1368
+ "eval_bigbrother_wer": 0.5434624989929913,
1369
+ "step": 3520
1370
+ }
1371
+ ],
1372
+ "logging_steps": 500,
1373
+ "max_steps": 3520,
1374
+ "num_input_tokens_seen": 0,
1375
+ "num_train_epochs": 40,
1376
+ "save_steps": 500,
1377
+ "total_flos": 4.807600562770979e+19,
1378
+ "train_batch_size": 48,
1379
+ "trial_name": null,
1380
+ "trial_params": null
1381
+ }
checkpoint-3520/training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ce5bbada60c6d2047edbe4ab0c2c6c8ea2d90d2d867f77eaa978b26a077a2a59
3
+ size 4984
config.json ADDED
@@ -0,0 +1,116 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "NbAiLab/nb-wav2vec2-300m-bokmaal",
3
+ "activation_dropout": 0.055,
4
+ "adapter_attn_dim": null,
5
+ "adapter_kernel_size": 3,
6
+ "adapter_stride": 2,
7
+ "add_adapter": false,
8
+ "apply_spec_augment": true,
9
+ "architectures": [
10
+ "Wav2Vec2ForCTC"
11
+ ],
12
+ "attention_dropout": 0.094,
13
+ "bos_token_id": 1,
14
+ "classifier_proj_size": 256,
15
+ "codevector_dim": 768,
16
+ "contrastive_logits_temperature": 0.1,
17
+ "conv_bias": true,
18
+ "conv_dim": [
19
+ 512,
20
+ 512,
21
+ 512,
22
+ 512,
23
+ 512,
24
+ 512,
25
+ 512
26
+ ],
27
+ "conv_kernel": [
28
+ 10,
29
+ 3,
30
+ 3,
31
+ 3,
32
+ 3,
33
+ 2,
34
+ 2
35
+ ],
36
+ "conv_stride": [
37
+ 5,
38
+ 2,
39
+ 2,
40
+ 2,
41
+ 2,
42
+ 2,
43
+ 2
44
+ ],
45
+ "ctc_loss_reduction": "mean",
46
+ "ctc_zero_infinity": true,
47
+ "diversity_loss_weight": 0.1,
48
+ "do_stable_layer_norm": true,
49
+ "eos_token_id": 2,
50
+ "feat_extract_activation": "gelu",
51
+ "feat_extract_dropout": 0.0,
52
+ "feat_extract_norm": "layer",
53
+ "feat_proj_dropout": 0.04,
54
+ "feat_quantizer_dropout": 0.0,
55
+ "final_dropout": 0.0,
56
+ "hidden_act": "gelu",
57
+ "hidden_dropout": 0.047,
58
+ "hidden_size": 1024,
59
+ "initializer_range": 0.02,
60
+ "intermediate_size": 4096,
61
+ "layer_norm_eps": 1e-05,
62
+ "layerdrop": 0.041,
63
+ "mask_channel_length": 10,
64
+ "mask_channel_min_space": 1,
65
+ "mask_channel_other": 0.0,
66
+ "mask_channel_prob": 0.0,
67
+ "mask_channel_selection": "static",
68
+ "mask_feature_length": 64,
69
+ "mask_feature_min_masks": 0,
70
+ "mask_feature_prob": 0.25,
71
+ "mask_time_length": 10,
72
+ "mask_time_min_masks": 2,
73
+ "mask_time_min_space": 1,
74
+ "mask_time_other": 0.0,
75
+ "mask_time_prob": 0.082,
76
+ "mask_time_selection": "static",
77
+ "model_type": "wav2vec2",
78
+ "num_adapter_layers": 3,
79
+ "num_attention_heads": 16,
80
+ "num_codevector_groups": 2,
81
+ "num_codevectors_per_group": 320,
82
+ "num_conv_pos_embedding_groups": 16,
83
+ "num_conv_pos_embeddings": 128,
84
+ "num_feat_extract_layers": 7,
85
+ "num_hidden_layers": 24,
86
+ "num_negatives": 100,
87
+ "output_hidden_size": 1024,
88
+ "pad_token_id": 31,
89
+ "proj_codevector_dim": 768,
90
+ "tdnn_dilation": [
91
+ 1,
92
+ 2,
93
+ 3,
94
+ 1,
95
+ 1
96
+ ],
97
+ "tdnn_dim": [
98
+ 512,
99
+ 512,
100
+ 512,
101
+ 512,
102
+ 1500
103
+ ],
104
+ "tdnn_kernel": [
105
+ 5,
106
+ 3,
107
+ 3,
108
+ 1,
109
+ 1
110
+ ],
111
+ "torch_dtype": "float32",
112
+ "transformers_version": "4.38.1",
113
+ "use_weighted_layer_sum": false,
114
+ "vocab_size": 34,
115
+ "xvector_output_dim": 512
116
+ }
language_model/5gram.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7b41c24c63f2f0585bea83666369593f3b3e6d047f327a90f36ebca2c35ef0ff
3
+ size 4243671427
language_model/attrs.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"alpha": 0.5, "beta": 0.1, "unk_score_offset": -10.0, "score_boundary": true}
language_model/unigrams.txt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ac3e71ca49838ca355df6fdcb8d89344a5a9bf9e1a76587cdf5df1367c19b9a9
3
+ size 16759269
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a49ebd1fbe915c75bc22fef6e8187c0457faa591865e0930c906d76125b301ec
3
+ size 1261946880
preprocessor_config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "do_normalize": true,
3
+ "feature_extractor_type": "Wav2Vec2FeatureExtractor",
4
+ "feature_size": 1,
5
+ "padding_side": "right",
6
+ "padding_value": 0,
7
+ "processor_class": "Wav2Vec2ProcessorWithLM",
8
+ "return_attention_mask": true,
9
+ "sampling_rate": 16000
10
+ }
runs/May30_19-26-04_idun-04-10/events.out.tfevents.1717090184.idun-04-10.272557.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4591c89250738dd35c3bbafac04cab1799e13a0058f1f66143201035b12823ad
3
+ size 59515
special_tokens_map.json ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": "<s>",
3
+ "eos_token": "</s>",
4
+ "pad_token": "[PAD]",
5
+ "unk_token": "[UNK]"
6
+ }
tokenizer_config.json ADDED
@@ -0,0 +1,47 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "added_tokens_decoder": {
3
+ "30": {
4
+ "content": "[UNK]",
5
+ "lstrip": true,
6
+ "normalized": false,
7
+ "rstrip": true,
8
+ "single_word": false,
9
+ "special": false
10
+ },
11
+ "31": {
12
+ "content": "[PAD]",
13
+ "lstrip": true,
14
+ "normalized": false,
15
+ "rstrip": true,
16
+ "single_word": false,
17
+ "special": false
18
+ },
19
+ "32": {
20
+ "content": "<s>",
21
+ "lstrip": false,
22
+ "normalized": false,
23
+ "rstrip": false,
24
+ "single_word": false,
25
+ "special": true
26
+ },
27
+ "33": {
28
+ "content": "</s>",
29
+ "lstrip": false,
30
+ "normalized": false,
31
+ "rstrip": false,
32
+ "single_word": false,
33
+ "special": true
34
+ }
35
+ },
36
+ "bos_token": "<s>",
37
+ "clean_up_tokenization_spaces": true,
38
+ "do_lower_case": false,
39
+ "eos_token": "</s>",
40
+ "model_max_length": 1000000000000000019884624838656,
41
+ "pad_token": "[PAD]",
42
+ "replace_word_delimiter_char": " ",
43
+ "target_lang": null,
44
+ "tokenizer_class": "Wav2Vec2CTCTokenizer",
45
+ "unk_token": "[UNK]",
46
+ "word_delimiter_token": "|"
47
+ }
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ce5bbada60c6d2047edbe4ab0c2c6c8ea2d90d2d867f77eaa978b26a077a2a59
3
+ size 4984
vocab.json ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "[PAD]": 31,
3
+ "[UNK]": 30,
4
+ "a": 1,
5
+ "b": 2,
6
+ "c": 3,
7
+ "d": 4,
8
+ "e": 5,
9
+ "f": 6,
10
+ "g": 7,
11
+ "h": 8,
12
+ "i": 9,
13
+ "j": 10,
14
+ "k": 11,
15
+ "l": 12,
16
+ "m": 13,
17
+ "n": 14,
18
+ "o": 15,
19
+ "p": 16,
20
+ "q": 17,
21
+ "r": 18,
22
+ "s": 19,
23
+ "t": 20,
24
+ "u": 21,
25
+ "v": 22,
26
+ "w": 23,
27
+ "x": 24,
28
+ "y": 25,
29
+ "z": 26,
30
+ "|": 0,
31
+ "å": 27,
32
+ "æ": 28,
33
+ "ø": 29
34
+ }