Tflatval commited on
Commit
86e19f1
1 Parent(s): 4e99f41

Upload folder using huggingface_hub

Browse files
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ language_model/unigrams.txt filter=lfs diff=lfs merge=lfs -text
added_tokens.json ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ {
2
+ "</s>": 33,
3
+ "<s>": 32
4
+ }
alphabet.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"labels": [" ", "a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k", "l", "m", "n", "o", "p", "q", "r", "s", "t", "u", "v", "w", "x", "y", "z", "\u00e5", "\u00e6", "\u00f8" ,"\u2047", "", "<s>", "</s>"], "is_bpe": false}
checkpoint-13572/config.json ADDED
@@ -0,0 +1,116 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "NbAiLab/nb-wav2vec2-300m-bokmaal",
3
+ "activation_dropout": 0.055,
4
+ "adapter_attn_dim": null,
5
+ "adapter_kernel_size": 3,
6
+ "adapter_stride": 2,
7
+ "add_adapter": false,
8
+ "apply_spec_augment": true,
9
+ "architectures": [
10
+ "Wav2Vec2ForCTC"
11
+ ],
12
+ "attention_dropout": 0.094,
13
+ "bos_token_id": 1,
14
+ "classifier_proj_size": 256,
15
+ "codevector_dim": 768,
16
+ "contrastive_logits_temperature": 0.1,
17
+ "conv_bias": true,
18
+ "conv_dim": [
19
+ 512,
20
+ 512,
21
+ 512,
22
+ 512,
23
+ 512,
24
+ 512,
25
+ 512
26
+ ],
27
+ "conv_kernel": [
28
+ 10,
29
+ 3,
30
+ 3,
31
+ 3,
32
+ 3,
33
+ 2,
34
+ 2
35
+ ],
36
+ "conv_stride": [
37
+ 5,
38
+ 2,
39
+ 2,
40
+ 2,
41
+ 2,
42
+ 2,
43
+ 2
44
+ ],
45
+ "ctc_loss_reduction": "mean",
46
+ "ctc_zero_infinity": true,
47
+ "diversity_loss_weight": 0.1,
48
+ "do_stable_layer_norm": true,
49
+ "eos_token_id": 2,
50
+ "feat_extract_activation": "gelu",
51
+ "feat_extract_dropout": 0.0,
52
+ "feat_extract_norm": "layer",
53
+ "feat_proj_dropout": 0.04,
54
+ "feat_quantizer_dropout": 0.0,
55
+ "final_dropout": 0.0,
56
+ "hidden_act": "gelu",
57
+ "hidden_dropout": 0.047,
58
+ "hidden_size": 1024,
59
+ "initializer_range": 0.02,
60
+ "intermediate_size": 4096,
61
+ "layer_norm_eps": 1e-05,
62
+ "layerdrop": 0.041,
63
+ "mask_channel_length": 10,
64
+ "mask_channel_min_space": 1,
65
+ "mask_channel_other": 0.0,
66
+ "mask_channel_prob": 0.0,
67
+ "mask_channel_selection": "static",
68
+ "mask_feature_length": 64,
69
+ "mask_feature_min_masks": 0,
70
+ "mask_feature_prob": 0.25,
71
+ "mask_time_length": 10,
72
+ "mask_time_min_masks": 2,
73
+ "mask_time_min_space": 1,
74
+ "mask_time_other": 0.0,
75
+ "mask_time_prob": 0.082,
76
+ "mask_time_selection": "static",
77
+ "model_type": "wav2vec2",
78
+ "num_adapter_layers": 3,
79
+ "num_attention_heads": 16,
80
+ "num_codevector_groups": 2,
81
+ "num_codevectors_per_group": 320,
82
+ "num_conv_pos_embedding_groups": 16,
83
+ "num_conv_pos_embeddings": 128,
84
+ "num_feat_extract_layers": 7,
85
+ "num_hidden_layers": 24,
86
+ "num_negatives": 100,
87
+ "output_hidden_size": 1024,
88
+ "pad_token_id": 31,
89
+ "proj_codevector_dim": 768,
90
+ "tdnn_dilation": [
91
+ 1,
92
+ 2,
93
+ 3,
94
+ 1,
95
+ 1
96
+ ],
97
+ "tdnn_dim": [
98
+ 512,
99
+ 512,
100
+ 512,
101
+ 512,
102
+ 1500
103
+ ],
104
+ "tdnn_kernel": [
105
+ 5,
106
+ 3,
107
+ 3,
108
+ 1,
109
+ 1
110
+ ],
111
+ "torch_dtype": "float32",
112
+ "transformers_version": "4.38.1",
113
+ "use_weighted_layer_sum": false,
114
+ "vocab_size": 34,
115
+ "xvector_output_dim": 512
116
+ }
checkpoint-13572/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fa5e89f40e056dff1b1d3f46fa4de904588460556c26f78f3f3db7fae7c5dc81
3
+ size 1261946880
checkpoint-13572/optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:65dcc24eff766c546977d80e1a1349c93494fcb8bbddbba130adf211a7bcf017
3
+ size 2490438582
checkpoint-13572/preprocessor_config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "do_normalize": true,
3
+ "feature_extractor_type": "Wav2Vec2FeatureExtractor",
4
+ "feature_size": 1,
5
+ "padding_side": "right",
6
+ "padding_value": 0,
7
+ "processor_class": "Wav2Vec2ProcessorWithLM",
8
+ "return_attention_mask": true,
9
+ "sampling_rate": 16000
10
+ }
checkpoint-13572/rng_state.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b3f949b3855f2135651b82206bc63111a4640ba31c5f197f713dea3053c45a6c
3
+ size 14308
checkpoint-13572/scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:34202538ea2574b51c6920cb7cade8b1c5f272bbc2be6db487eb0484013a8713
3
+ size 1064
checkpoint-13572/trainer_state.json ADDED
@@ -0,0 +1,1245 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_metric": 0.404656408603883,
3
+ "best_model_checkpoint": "/cluster/home/torstefl/Master/saved_model/W2V/single/BB/30.05/checkpoint-13572",
4
+ "epoch": 36.0,
5
+ "eval_steps": 500,
6
+ "global_step": 13572,
7
+ "is_hyper_param_search": false,
8
+ "is_local_process_zero": true,
9
+ "is_world_process_zero": true,
10
+ "log_history": [
11
+ {
12
+ "epoch": 1.0,
13
+ "grad_norm": 6.709790229797363,
14
+ "learning_rate": 2.5e-05,
15
+ "loss": 2.4835,
16
+ "step": 377
17
+ },
18
+ {
19
+ "epoch": 1.0,
20
+ "eval_rundkast_loss": 0.419185996055603,
21
+ "eval_rundkast_runtime": 33.4408,
22
+ "eval_rundkast_samples_per_second": 40.22,
23
+ "eval_rundkast_steps_per_second": 1.286,
24
+ "eval_rundkast_wer": 0.24005727012408526,
25
+ "step": 377
26
+ },
27
+ {
28
+ "epoch": 1.0,
29
+ "eval_nb_samtale_loss": 0.5932812094688416,
30
+ "eval_nb_samtale_runtime": 38.8625,
31
+ "eval_nb_samtale_samples_per_second": 13.715,
32
+ "eval_nb_samtale_steps_per_second": 0.437,
33
+ "eval_nb_samtale_wer": 0.2626953125,
34
+ "step": 377
35
+ },
36
+ {
37
+ "epoch": 1.0,
38
+ "eval_bigbrother_loss": 1.838633418083191,
39
+ "eval_bigbrother_runtime": 41.4238,
40
+ "eval_bigbrother_samples_per_second": 33.073,
41
+ "eval_bigbrother_steps_per_second": 1.038,
42
+ "eval_bigbrother_wer": 0.5936518166438411,
43
+ "step": 377
44
+ },
45
+ {
46
+ "epoch": 2.0,
47
+ "grad_norm": 8.728084564208984,
48
+ "learning_rate": 5.013333333333333e-05,
49
+ "loss": 2.0585,
50
+ "step": 754
51
+ },
52
+ {
53
+ "epoch": 2.0,
54
+ "eval_rundkast_loss": 0.40199896693229675,
55
+ "eval_rundkast_runtime": 33.3039,
56
+ "eval_rundkast_samples_per_second": 40.386,
57
+ "eval_rundkast_steps_per_second": 1.291,
58
+ "eval_rundkast_wer": 0.2357620108176901,
59
+ "step": 754
60
+ },
61
+ {
62
+ "epoch": 2.0,
63
+ "eval_nb_samtale_loss": 0.5779798030853271,
64
+ "eval_nb_samtale_runtime": 37.921,
65
+ "eval_nb_samtale_samples_per_second": 14.056,
66
+ "eval_nb_samtale_steps_per_second": 0.448,
67
+ "eval_nb_samtale_wer": 0.253173828125,
68
+ "step": 754
69
+ },
70
+ {
71
+ "epoch": 2.0,
72
+ "eval_bigbrother_loss": 1.5922973155975342,
73
+ "eval_bigbrother_runtime": 41.1151,
74
+ "eval_bigbrother_samples_per_second": 33.321,
75
+ "eval_bigbrother_steps_per_second": 1.046,
76
+ "eval_bigbrother_wer": 0.5574800612261339,
77
+ "step": 754
78
+ },
79
+ {
80
+ "epoch": 3.0,
81
+ "grad_norm": 6.347009658813477,
82
+ "learning_rate": 7.526666666666668e-05,
83
+ "loss": 1.9142,
84
+ "step": 1131
85
+ },
86
+ {
87
+ "epoch": 3.0,
88
+ "eval_rundkast_loss": 0.3938983082771301,
89
+ "eval_rundkast_runtime": 33.5062,
90
+ "eval_rundkast_samples_per_second": 40.142,
91
+ "eval_rundkast_steps_per_second": 1.283,
92
+ "eval_rundkast_wer": 0.24029589564110723,
93
+ "step": 1131
94
+ },
95
+ {
96
+ "epoch": 3.0,
97
+ "eval_nb_samtale_loss": 0.5637267827987671,
98
+ "eval_nb_samtale_runtime": 38.0506,
99
+ "eval_nb_samtale_samples_per_second": 14.008,
100
+ "eval_nb_samtale_steps_per_second": 0.447,
101
+ "eval_nb_samtale_wer": 0.2652180989583333,
102
+ "step": 1131
103
+ },
104
+ {
105
+ "epoch": 3.0,
106
+ "eval_bigbrother_loss": 1.4027199745178223,
107
+ "eval_bigbrother_runtime": 41.4816,
108
+ "eval_bigbrother_samples_per_second": 33.027,
109
+ "eval_bigbrother_steps_per_second": 1.037,
110
+ "eval_bigbrother_wer": 0.5358897929589946,
111
+ "step": 1131
112
+ },
113
+ {
114
+ "epoch": 4.0,
115
+ "grad_norm": 13.580578804016113,
116
+ "learning_rate": 9.99558173784978e-05,
117
+ "loss": 1.816,
118
+ "step": 1508
119
+ },
120
+ {
121
+ "epoch": 4.0,
122
+ "eval_rundkast_loss": 0.3921523690223694,
123
+ "eval_rundkast_runtime": 33.1392,
124
+ "eval_rundkast_samples_per_second": 40.586,
125
+ "eval_rundkast_steps_per_second": 1.298,
126
+ "eval_rundkast_wer": 0.24315940184537066,
127
+ "step": 1508
128
+ },
129
+ {
130
+ "epoch": 4.0,
131
+ "eval_nb_samtale_loss": 0.5375664830207825,
132
+ "eval_nb_samtale_runtime": 37.7821,
133
+ "eval_nb_samtale_samples_per_second": 14.107,
134
+ "eval_nb_samtale_steps_per_second": 0.45,
135
+ "eval_nb_samtale_wer": 0.262939453125,
136
+ "step": 1508
137
+ },
138
+ {
139
+ "epoch": 4.0,
140
+ "eval_bigbrother_loss": 1.2783823013305664,
141
+ "eval_bigbrother_runtime": 41.1302,
142
+ "eval_bigbrother_samples_per_second": 33.309,
143
+ "eval_bigbrother_steps_per_second": 1.045,
144
+ "eval_bigbrother_wer": 0.5196165310561508,
145
+ "step": 1508
146
+ },
147
+ {
148
+ "epoch": 5.0,
149
+ "grad_norm": 5.716007709503174,
150
+ "learning_rate": 9.717967599410898e-05,
151
+ "loss": 1.7424,
152
+ "step": 1885
153
+ },
154
+ {
155
+ "epoch": 5.0,
156
+ "eval_rundkast_loss": 0.40847668051719666,
157
+ "eval_rundkast_runtime": 33.007,
158
+ "eval_rundkast_samples_per_second": 40.749,
159
+ "eval_rundkast_steps_per_second": 1.303,
160
+ "eval_rundkast_wer": 0.23870505886096086,
161
+ "step": 1885
162
+ },
163
+ {
164
+ "epoch": 5.0,
165
+ "eval_nb_samtale_loss": 0.5756209492683411,
166
+ "eval_nb_samtale_runtime": 37.9326,
167
+ "eval_nb_samtale_samples_per_second": 14.051,
168
+ "eval_nb_samtale_steps_per_second": 0.448,
169
+ "eval_nb_samtale_wer": 0.2607421875,
170
+ "step": 1885
171
+ },
172
+ {
173
+ "epoch": 5.0,
174
+ "eval_bigbrother_loss": 1.3229293823242188,
175
+ "eval_bigbrother_runtime": 40.9231,
176
+ "eval_bigbrother_samples_per_second": 33.477,
177
+ "eval_bigbrother_steps_per_second": 1.051,
178
+ "eval_bigbrother_wer": 0.5068879400628373,
179
+ "step": 1885
180
+ },
181
+ {
182
+ "epoch": 6.0,
183
+ "grad_norm": 4.037384510040283,
184
+ "learning_rate": 9.440353460972018e-05,
185
+ "loss": 1.6619,
186
+ "step": 2262
187
+ },
188
+ {
189
+ "epoch": 6.0,
190
+ "eval_rundkast_loss": 0.4441112279891968,
191
+ "eval_rundkast_runtime": 33.7921,
192
+ "eval_rundkast_samples_per_second": 39.802,
193
+ "eval_rundkast_steps_per_second": 1.272,
194
+ "eval_rundkast_wer": 0.24315940184537066,
195
+ "step": 2262
196
+ },
197
+ {
198
+ "epoch": 6.0,
199
+ "eval_nb_samtale_loss": 0.6132481694221497,
200
+ "eval_nb_samtale_runtime": 38.4613,
201
+ "eval_nb_samtale_samples_per_second": 13.858,
202
+ "eval_nb_samtale_steps_per_second": 0.442,
203
+ "eval_nb_samtale_wer": 0.2744140625,
204
+ "step": 2262
205
+ },
206
+ {
207
+ "epoch": 6.0,
208
+ "eval_bigbrother_loss": 1.3384240865707397,
209
+ "eval_bigbrother_runtime": 41.3107,
210
+ "eval_bigbrother_samples_per_second": 33.163,
211
+ "eval_bigbrother_steps_per_second": 1.041,
212
+ "eval_bigbrother_wer": 0.5068879400628373,
213
+ "step": 2262
214
+ },
215
+ {
216
+ "epoch": 7.0,
217
+ "grad_norm": 8.110424995422363,
218
+ "learning_rate": 9.163475699558174e-05,
219
+ "loss": 1.6139,
220
+ "step": 2639
221
+ },
222
+ {
223
+ "epoch": 7.0,
224
+ "eval_rundkast_loss": 0.45466309785842896,
225
+ "eval_rundkast_runtime": 33.255,
226
+ "eval_rundkast_samples_per_second": 40.445,
227
+ "eval_rundkast_steps_per_second": 1.293,
228
+ "eval_rundkast_wer": 0.24713649379573654,
229
+ "step": 2639
230
+ },
231
+ {
232
+ "epoch": 7.0,
233
+ "eval_nb_samtale_loss": 0.5841706991195679,
234
+ "eval_nb_samtale_runtime": 38.3016,
235
+ "eval_nb_samtale_samples_per_second": 13.916,
236
+ "eval_nb_samtale_steps_per_second": 0.444,
237
+ "eval_nb_samtale_wer": 0.2757975260416667,
238
+ "step": 2639
239
+ },
240
+ {
241
+ "epoch": 7.0,
242
+ "eval_bigbrother_loss": 1.338714838027954,
243
+ "eval_bigbrother_runtime": 41.6281,
244
+ "eval_bigbrother_samples_per_second": 32.91,
245
+ "eval_bigbrother_steps_per_second": 1.033,
246
+ "eval_bigbrother_wer": 0.49625392733424634,
247
+ "step": 2639
248
+ },
249
+ {
250
+ "epoch": 8.0,
251
+ "grad_norm": 19.2949275970459,
252
+ "learning_rate": 8.885861561119293e-05,
253
+ "loss": 1.5567,
254
+ "step": 3016
255
+ },
256
+ {
257
+ "epoch": 8.0,
258
+ "eval_rundkast_loss": 0.45816686749458313,
259
+ "eval_rundkast_runtime": 33.2799,
260
+ "eval_rundkast_samples_per_second": 40.415,
261
+ "eval_rundkast_steps_per_second": 1.292,
262
+ "eval_rundkast_wer": 0.25095450206808784,
263
+ "step": 3016
264
+ },
265
+ {
266
+ "epoch": 8.0,
267
+ "eval_nb_samtale_loss": 0.5909866690635681,
268
+ "eval_nb_samtale_runtime": 37.7066,
269
+ "eval_nb_samtale_samples_per_second": 14.135,
270
+ "eval_nb_samtale_steps_per_second": 0.451,
271
+ "eval_nb_samtale_wer": 0.2709147135416667,
272
+ "step": 3016
273
+ },
274
+ {
275
+ "epoch": 8.0,
276
+ "eval_bigbrother_loss": 1.2419942617416382,
277
+ "eval_bigbrother_runtime": 41.2436,
278
+ "eval_bigbrother_samples_per_second": 33.217,
279
+ "eval_bigbrother_steps_per_second": 1.043,
280
+ "eval_bigbrother_wer": 0.49238701361475873,
281
+ "step": 3016
282
+ },
283
+ {
284
+ "epoch": 9.0,
285
+ "grad_norm": 4.614249229431152,
286
+ "learning_rate": 8.608247422680413e-05,
287
+ "loss": 1.4942,
288
+ "step": 3393
289
+ },
290
+ {
291
+ "epoch": 9.0,
292
+ "eval_rundkast_loss": 0.4503733217716217,
293
+ "eval_rundkast_runtime": 33.2408,
294
+ "eval_rundkast_samples_per_second": 40.462,
295
+ "eval_rundkast_steps_per_second": 1.294,
296
+ "eval_rundkast_wer": 0.2419662742602609,
297
+ "step": 3393
298
+ },
299
+ {
300
+ "epoch": 9.0,
301
+ "eval_nb_samtale_loss": 0.5738528370857239,
302
+ "eval_nb_samtale_runtime": 37.6697,
303
+ "eval_nb_samtale_samples_per_second": 14.149,
304
+ "eval_nb_samtale_steps_per_second": 0.451,
305
+ "eval_nb_samtale_wer": 0.2655436197916667,
306
+ "step": 3393
307
+ },
308
+ {
309
+ "epoch": 9.0,
310
+ "eval_bigbrother_loss": 1.207922101020813,
311
+ "eval_bigbrother_runtime": 41.3161,
312
+ "eval_bigbrother_samples_per_second": 33.159,
313
+ "eval_bigbrother_steps_per_second": 1.041,
314
+ "eval_bigbrother_wer": 0.4722468379924273,
315
+ "step": 3393
316
+ },
317
+ {
318
+ "epoch": 10.0,
319
+ "grad_norm": 10.218366622924805,
320
+ "learning_rate": 8.330633284241532e-05,
321
+ "loss": 1.4412,
322
+ "step": 3770
323
+ },
324
+ {
325
+ "epoch": 10.0,
326
+ "eval_rundkast_loss": 0.4589375853538513,
327
+ "eval_rundkast_runtime": 33.4846,
328
+ "eval_rundkast_samples_per_second": 40.168,
329
+ "eval_rundkast_steps_per_second": 1.284,
330
+ "eval_rundkast_wer": 0.2531816735602927,
331
+ "step": 3770
332
+ },
333
+ {
334
+ "epoch": 10.0,
335
+ "eval_nb_samtale_loss": 0.5715627670288086,
336
+ "eval_nb_samtale_runtime": 37.8642,
337
+ "eval_nb_samtale_samples_per_second": 14.077,
338
+ "eval_nb_samtale_steps_per_second": 0.449,
339
+ "eval_nb_samtale_wer": 0.2689615885416667,
340
+ "step": 3770
341
+ },
342
+ {
343
+ "epoch": 10.0,
344
+ "eval_bigbrother_loss": 1.1500192880630493,
345
+ "eval_bigbrother_runtime": 41.3376,
346
+ "eval_bigbrother_samples_per_second": 33.142,
347
+ "eval_bigbrother_steps_per_second": 1.04,
348
+ "eval_bigbrother_wer": 0.46588254249577055,
349
+ "step": 3770
350
+ },
351
+ {
352
+ "epoch": 11.0,
353
+ "grad_norm": 3.9760067462921143,
354
+ "learning_rate": 8.053019145802652e-05,
355
+ "loss": 1.4267,
356
+ "step": 4147
357
+ },
358
+ {
359
+ "epoch": 11.0,
360
+ "eval_rundkast_loss": 0.4483039081096649,
361
+ "eval_rundkast_runtime": 33.8508,
362
+ "eval_rundkast_samples_per_second": 39.733,
363
+ "eval_rundkast_steps_per_second": 1.27,
364
+ "eval_rundkast_wer": 0.25015908367801465,
365
+ "step": 4147
366
+ },
367
+ {
368
+ "epoch": 11.0,
369
+ "eval_nb_samtale_loss": 0.5839167237281799,
370
+ "eval_nb_samtale_runtime": 38.2514,
371
+ "eval_nb_samtale_samples_per_second": 13.934,
372
+ "eval_nb_samtale_steps_per_second": 0.444,
373
+ "eval_nb_samtale_wer": 0.2657063802083333,
374
+ "step": 4147
375
+ },
376
+ {
377
+ "epoch": 11.0,
378
+ "eval_bigbrother_loss": 1.246797800064087,
379
+ "eval_bigbrother_runtime": 41.8091,
380
+ "eval_bigbrother_samples_per_second": 32.768,
381
+ "eval_bigbrother_steps_per_second": 1.028,
382
+ "eval_bigbrother_wer": 0.45669862241198744,
383
+ "step": 4147
384
+ },
385
+ {
386
+ "epoch": 12.0,
387
+ "grad_norm": 4.7413763999938965,
388
+ "learning_rate": 7.77540500736377e-05,
389
+ "loss": 1.3792,
390
+ "step": 4524
391
+ },
392
+ {
393
+ "epoch": 12.0,
394
+ "eval_rundkast_loss": 0.4521505832672119,
395
+ "eval_rundkast_runtime": 33.7711,
396
+ "eval_rundkast_samples_per_second": 39.827,
397
+ "eval_rundkast_steps_per_second": 1.273,
398
+ "eval_rundkast_wer": 0.24872733057588292,
399
+ "step": 4524
400
+ },
401
+ {
402
+ "epoch": 12.0,
403
+ "eval_nb_samtale_loss": 0.5718214511871338,
404
+ "eval_nb_samtale_runtime": 37.7574,
405
+ "eval_nb_samtale_samples_per_second": 14.116,
406
+ "eval_nb_samtale_steps_per_second": 0.45,
407
+ "eval_nb_samtale_wer": 0.2674967447916667,
408
+ "step": 4524
409
+ },
410
+ {
411
+ "epoch": 12.0,
412
+ "eval_bigbrother_loss": 1.1487047672271729,
413
+ "eval_bigbrother_runtime": 41.46,
414
+ "eval_bigbrother_samples_per_second": 33.044,
415
+ "eval_bigbrother_steps_per_second": 1.037,
416
+ "eval_bigbrother_wer": 0.4530733907999678,
417
+ "step": 4524
418
+ },
419
+ {
420
+ "epoch": 13.0,
421
+ "grad_norm": 5.937889099121094,
422
+ "learning_rate": 7.49779086892489e-05,
423
+ "loss": 1.3269,
424
+ "step": 4901
425
+ },
426
+ {
427
+ "epoch": 13.0,
428
+ "eval_rundkast_loss": 0.483146995306015,
429
+ "eval_rundkast_runtime": 33.3218,
430
+ "eval_rundkast_samples_per_second": 40.364,
431
+ "eval_rundkast_steps_per_second": 1.29,
432
+ "eval_rundkast_wer": 0.26002227171492204,
433
+ "step": 4901
434
+ },
435
+ {
436
+ "epoch": 13.0,
437
+ "eval_nb_samtale_loss": 0.6045836806297302,
438
+ "eval_nb_samtale_runtime": 37.3201,
439
+ "eval_nb_samtale_samples_per_second": 14.282,
440
+ "eval_nb_samtale_steps_per_second": 0.456,
441
+ "eval_nb_samtale_wer": 0.2681477864583333,
442
+ "step": 4901
443
+ },
444
+ {
445
+ "epoch": 13.0,
446
+ "eval_bigbrother_loss": 1.1842252016067505,
447
+ "eval_bigbrother_runtime": 41.1883,
448
+ "eval_bigbrother_samples_per_second": 33.262,
449
+ "eval_bigbrother_steps_per_second": 1.044,
450
+ "eval_bigbrother_wer": 0.45057600902279865,
451
+ "step": 4901
452
+ },
453
+ {
454
+ "epoch": 14.0,
455
+ "grad_norm": 6.761897087097168,
456
+ "learning_rate": 7.22017673048601e-05,
457
+ "loss": 1.2988,
458
+ "step": 5278
459
+ },
460
+ {
461
+ "epoch": 14.0,
462
+ "eval_rundkast_loss": 0.5177704691886902,
463
+ "eval_rundkast_runtime": 33.7773,
464
+ "eval_rundkast_samples_per_second": 39.82,
465
+ "eval_rundkast_steps_per_second": 1.273,
466
+ "eval_rundkast_wer": 0.24443207126948774,
467
+ "step": 5278
468
+ },
469
+ {
470
+ "epoch": 14.0,
471
+ "eval_nb_samtale_loss": 0.6508249044418335,
472
+ "eval_nb_samtale_runtime": 37.9055,
473
+ "eval_nb_samtale_samples_per_second": 14.061,
474
+ "eval_nb_samtale_steps_per_second": 0.448,
475
+ "eval_nb_samtale_wer": 0.2630208333333333,
476
+ "step": 5278
477
+ },
478
+ {
479
+ "epoch": 14.0,
480
+ "eval_bigbrother_loss": 1.2824336290359497,
481
+ "eval_bigbrother_runtime": 41.3751,
482
+ "eval_bigbrother_samples_per_second": 33.112,
483
+ "eval_bigbrother_steps_per_second": 1.039,
484
+ "eval_bigbrother_wer": 0.4433255457987594,
485
+ "step": 5278
486
+ },
487
+ {
488
+ "epoch": 15.0,
489
+ "grad_norm": 8.300821304321289,
490
+ "learning_rate": 6.942562592047128e-05,
491
+ "loss": 1.2819,
492
+ "step": 5655
493
+ },
494
+ {
495
+ "epoch": 15.0,
496
+ "eval_rundkast_loss": 0.47504347562789917,
497
+ "eval_rundkast_runtime": 33.2023,
498
+ "eval_rundkast_samples_per_second": 40.509,
499
+ "eval_rundkast_steps_per_second": 1.295,
500
+ "eval_rundkast_wer": 0.24737511931275852,
501
+ "step": 5655
502
+ },
503
+ {
504
+ "epoch": 15.0,
505
+ "eval_nb_samtale_loss": 0.6060231924057007,
506
+ "eval_nb_samtale_runtime": 38.0225,
507
+ "eval_nb_samtale_samples_per_second": 14.018,
508
+ "eval_nb_samtale_steps_per_second": 0.447,
509
+ "eval_nb_samtale_wer": 0.263427734375,
510
+ "step": 5655
511
+ },
512
+ {
513
+ "epoch": 15.0,
514
+ "eval_bigbrother_loss": 1.193629503250122,
515
+ "eval_bigbrother_runtime": 41.725,
516
+ "eval_bigbrother_samples_per_second": 32.834,
517
+ "eval_bigbrother_steps_per_second": 1.031,
518
+ "eval_bigbrother_wer": 0.4493675984854588,
519
+ "step": 5655
520
+ },
521
+ {
522
+ "epoch": 16.0,
523
+ "grad_norm": 6.88249397277832,
524
+ "learning_rate": 6.665684830633285e-05,
525
+ "loss": 1.2504,
526
+ "step": 6032
527
+ },
528
+ {
529
+ "epoch": 16.0,
530
+ "eval_rundkast_loss": 0.5179128646850586,
531
+ "eval_rundkast_runtime": 33.3288,
532
+ "eval_rundkast_samples_per_second": 40.356,
533
+ "eval_rundkast_steps_per_second": 1.29,
534
+ "eval_rundkast_wer": 0.24856824689786827,
535
+ "step": 6032
536
+ },
537
+ {
538
+ "epoch": 16.0,
539
+ "eval_nb_samtale_loss": 0.653479814529419,
540
+ "eval_nb_samtale_runtime": 37.2804,
541
+ "eval_nb_samtale_samples_per_second": 14.297,
542
+ "eval_nb_samtale_steps_per_second": 0.456,
543
+ "eval_nb_samtale_wer": 0.2644856770833333,
544
+ "step": 6032
545
+ },
546
+ {
547
+ "epoch": 16.0,
548
+ "eval_bigbrother_loss": 1.2211058139801025,
549
+ "eval_bigbrother_runtime": 42.0179,
550
+ "eval_bigbrother_samples_per_second": 32.605,
551
+ "eval_bigbrother_steps_per_second": 1.023,
552
+ "eval_bigbrother_wer": 0.43792797873197453,
553
+ "step": 6032
554
+ },
555
+ {
556
+ "epoch": 17.0,
557
+ "grad_norm": 3.6763479709625244,
558
+ "learning_rate": 6.388070692194403e-05,
559
+ "loss": 1.2295,
560
+ "step": 6409
561
+ },
562
+ {
563
+ "epoch": 17.0,
564
+ "eval_rundkast_loss": 0.4937501847743988,
565
+ "eval_rundkast_runtime": 37.7386,
566
+ "eval_rundkast_samples_per_second": 35.64,
567
+ "eval_rundkast_steps_per_second": 1.139,
568
+ "eval_rundkast_wer": 0.2561247216035635,
569
+ "step": 6409
570
+ },
571
+ {
572
+ "epoch": 17.0,
573
+ "eval_nb_samtale_loss": 0.6216253638267517,
574
+ "eval_nb_samtale_runtime": 38.691,
575
+ "eval_nb_samtale_samples_per_second": 13.776,
576
+ "eval_nb_samtale_steps_per_second": 0.439,
577
+ "eval_nb_samtale_wer": 0.2705078125,
578
+ "step": 6409
579
+ },
580
+ {
581
+ "epoch": 17.0,
582
+ "eval_bigbrother_loss": 1.188571572303772,
583
+ "eval_bigbrother_runtime": 41.7402,
584
+ "eval_bigbrother_samples_per_second": 32.822,
585
+ "eval_bigbrother_steps_per_second": 1.03,
586
+ "eval_bigbrother_wer": 0.4334165793925723,
587
+ "step": 6409
588
+ },
589
+ {
590
+ "epoch": 18.0,
591
+ "grad_norm": 5.536041259765625,
592
+ "learning_rate": 6.110456553755524e-05,
593
+ "loss": 1.2053,
594
+ "step": 6786
595
+ },
596
+ {
597
+ "epoch": 18.0,
598
+ "eval_rundkast_loss": 0.4938836097717285,
599
+ "eval_rundkast_runtime": 33.2744,
600
+ "eval_rundkast_samples_per_second": 40.421,
601
+ "eval_rundkast_steps_per_second": 1.292,
602
+ "eval_rundkast_wer": 0.24856824689786827,
603
+ "step": 6786
604
+ },
605
+ {
606
+ "epoch": 18.0,
607
+ "eval_nb_samtale_loss": 0.6238839030265808,
608
+ "eval_nb_samtale_runtime": 37.8912,
609
+ "eval_nb_samtale_samples_per_second": 14.067,
610
+ "eval_nb_samtale_steps_per_second": 0.449,
611
+ "eval_nb_samtale_wer": 0.263916015625,
612
+ "step": 6786
613
+ },
614
+ {
615
+ "epoch": 18.0,
616
+ "eval_bigbrother_loss": 1.1581498384475708,
617
+ "eval_bigbrother_runtime": 41.4842,
618
+ "eval_bigbrother_samples_per_second": 33.025,
619
+ "eval_bigbrother_steps_per_second": 1.037,
620
+ "eval_bigbrother_wer": 0.4314831225328285,
621
+ "step": 6786
622
+ },
623
+ {
624
+ "epoch": 19.0,
625
+ "grad_norm": 5.267818927764893,
626
+ "learning_rate": 5.832842415316643e-05,
627
+ "loss": 1.1815,
628
+ "step": 7163
629
+ },
630
+ {
631
+ "epoch": 19.0,
632
+ "eval_rundkast_loss": 0.4980849623680115,
633
+ "eval_rundkast_runtime": 33.6894,
634
+ "eval_rundkast_samples_per_second": 39.923,
635
+ "eval_rundkast_steps_per_second": 1.276,
636
+ "eval_rundkast_wer": 0.24451161310849506,
637
+ "step": 7163
638
+ },
639
+ {
640
+ "epoch": 19.0,
641
+ "eval_nb_samtale_loss": 0.6296201348304749,
642
+ "eval_nb_samtale_runtime": 37.5618,
643
+ "eval_nb_samtale_samples_per_second": 14.19,
644
+ "eval_nb_samtale_steps_per_second": 0.453,
645
+ "eval_nb_samtale_wer": 0.2608235677083333,
646
+ "step": 7163
647
+ },
648
+ {
649
+ "epoch": 19.0,
650
+ "eval_bigbrother_loss": 1.1892309188842773,
651
+ "eval_bigbrother_runtime": 41.4844,
652
+ "eval_bigbrother_samples_per_second": 33.024,
653
+ "eval_bigbrother_steps_per_second": 1.037,
654
+ "eval_bigbrother_wer": 0.4253605091436397,
655
+ "step": 7163
656
+ },
657
+ {
658
+ "epoch": 20.0,
659
+ "grad_norm": 41.6290283203125,
660
+ "learning_rate": 5.5559646539027985e-05,
661
+ "loss": 1.1703,
662
+ "step": 7540
663
+ },
664
+ {
665
+ "epoch": 20.0,
666
+ "eval_rundkast_loss": 0.5075950026512146,
667
+ "eval_rundkast_runtime": 33.2501,
668
+ "eval_rundkast_samples_per_second": 40.451,
669
+ "eval_rundkast_steps_per_second": 1.293,
670
+ "eval_rundkast_wer": 0.2478523703468024,
671
+ "step": 7540
672
+ },
673
+ {
674
+ "epoch": 20.0,
675
+ "eval_nb_samtale_loss": 0.6342408061027527,
676
+ "eval_nb_samtale_runtime": 37.4134,
677
+ "eval_nb_samtale_samples_per_second": 14.246,
678
+ "eval_nb_samtale_steps_per_second": 0.454,
679
+ "eval_nb_samtale_wer": 0.2586263020833333,
680
+ "step": 7540
681
+ },
682
+ {
683
+ "epoch": 20.0,
684
+ "eval_bigbrother_loss": 1.206842303276062,
685
+ "eval_bigbrother_runtime": 41.5395,
686
+ "eval_bigbrother_samples_per_second": 32.981,
687
+ "eval_bigbrother_steps_per_second": 1.035,
688
+ "eval_bigbrother_wer": 0.42519938773866106,
689
+ "step": 7540
690
+ },
691
+ {
692
+ "epoch": 21.0,
693
+ "grad_norm": 6.965384483337402,
694
+ "learning_rate": 5.278350515463918e-05,
695
+ "loss": 1.1446,
696
+ "step": 7917
697
+ },
698
+ {
699
+ "epoch": 21.0,
700
+ "eval_rundkast_loss": 0.5136203169822693,
701
+ "eval_rundkast_runtime": 33.5002,
702
+ "eval_rundkast_samples_per_second": 40.149,
703
+ "eval_rundkast_steps_per_second": 1.284,
704
+ "eval_rundkast_wer": 0.24793191218580973,
705
+ "step": 7917
706
+ },
707
+ {
708
+ "epoch": 21.0,
709
+ "eval_nb_samtale_loss": 0.6333425641059875,
710
+ "eval_nb_samtale_runtime": 37.5282,
711
+ "eval_nb_samtale_samples_per_second": 14.203,
712
+ "eval_nb_samtale_steps_per_second": 0.453,
713
+ "eval_nb_samtale_wer": 0.265625,
714
+ "step": 7917
715
+ },
716
+ {
717
+ "epoch": 21.0,
718
+ "eval_bigbrother_loss": 1.186505913734436,
719
+ "eval_bigbrother_runtime": 41.5636,
720
+ "eval_bigbrother_samples_per_second": 32.962,
721
+ "eval_bigbrother_steps_per_second": 1.035,
722
+ "eval_bigbrother_wer": 0.42487714492870376,
723
+ "step": 7917
724
+ },
725
+ {
726
+ "epoch": 22.0,
727
+ "grad_norm": 22.68753433227539,
728
+ "learning_rate": 5.000736377025037e-05,
729
+ "loss": 1.1384,
730
+ "step": 8294
731
+ },
732
+ {
733
+ "epoch": 22.0,
734
+ "eval_rundkast_loss": 0.5014224052429199,
735
+ "eval_rundkast_runtime": 33.6382,
736
+ "eval_rundkast_samples_per_second": 39.984,
737
+ "eval_rundkast_steps_per_second": 1.278,
738
+ "eval_rundkast_wer": 0.24880687241489025,
739
+ "step": 8294
740
+ },
741
+ {
742
+ "epoch": 22.0,
743
+ "eval_nb_samtale_loss": 0.6214331984519958,
744
+ "eval_nb_samtale_runtime": 37.41,
745
+ "eval_nb_samtale_samples_per_second": 14.248,
746
+ "eval_nb_samtale_steps_per_second": 0.454,
747
+ "eval_nb_samtale_wer": 0.2609049479166667,
748
+ "step": 8294
749
+ },
750
+ {
751
+ "epoch": 22.0,
752
+ "eval_bigbrother_loss": 1.1910523176193237,
753
+ "eval_bigbrother_runtime": 41.6527,
754
+ "eval_bigbrother_samples_per_second": 32.891,
755
+ "eval_bigbrother_steps_per_second": 1.032,
756
+ "eval_bigbrother_wer": 0.42189639893659875,
757
+ "step": 8294
758
+ },
759
+ {
760
+ "epoch": 23.0,
761
+ "grad_norm": 2.731438398361206,
762
+ "learning_rate": 4.723122238586156e-05,
763
+ "loss": 1.1324,
764
+ "step": 8671
765
+ },
766
+ {
767
+ "epoch": 23.0,
768
+ "eval_rundkast_loss": 0.476345032453537,
769
+ "eval_rundkast_runtime": 33.4012,
770
+ "eval_rundkast_samples_per_second": 40.268,
771
+ "eval_rundkast_steps_per_second": 1.287,
772
+ "eval_rundkast_wer": 0.24968183264397073,
773
+ "step": 8671
774
+ },
775
+ {
776
+ "epoch": 23.0,
777
+ "eval_nb_samtale_loss": 0.6042336821556091,
778
+ "eval_nb_samtale_runtime": 37.9587,
779
+ "eval_nb_samtale_samples_per_second": 14.042,
780
+ "eval_nb_samtale_steps_per_second": 0.448,
781
+ "eval_nb_samtale_wer": 0.2681477864583333,
782
+ "step": 8671
783
+ },
784
+ {
785
+ "epoch": 23.0,
786
+ "eval_bigbrother_loss": 1.1591678857803345,
787
+ "eval_bigbrother_runtime": 41.5785,
788
+ "eval_bigbrother_samples_per_second": 32.95,
789
+ "eval_bigbrother_steps_per_second": 1.034,
790
+ "eval_bigbrother_wer": 0.4180294852171111,
791
+ "step": 8671
792
+ },
793
+ {
794
+ "epoch": 24.0,
795
+ "grad_norm": 4.182296276092529,
796
+ "learning_rate": 4.4455081001472755e-05,
797
+ "loss": 1.0927,
798
+ "step": 9048
799
+ },
800
+ {
801
+ "epoch": 24.0,
802
+ "eval_rundkast_loss": 0.49040549993515015,
803
+ "eval_rundkast_runtime": 33.4922,
804
+ "eval_rundkast_samples_per_second": 40.159,
805
+ "eval_rundkast_steps_per_second": 1.284,
806
+ "eval_rundkast_wer": 0.2406936048361438,
807
+ "step": 9048
808
+ },
809
+ {
810
+ "epoch": 24.0,
811
+ "eval_nb_samtale_loss": 0.6173272132873535,
812
+ "eval_nb_samtale_runtime": 37.5762,
813
+ "eval_nb_samtale_samples_per_second": 14.184,
814
+ "eval_nb_samtale_steps_per_second": 0.452,
815
+ "eval_nb_samtale_wer": 0.26025390625,
816
+ "step": 9048
817
+ },
818
+ {
819
+ "epoch": 24.0,
820
+ "eval_bigbrother_loss": 1.1952488422393799,
821
+ "eval_bigbrother_runtime": 41.4629,
822
+ "eval_bigbrother_samples_per_second": 33.042,
823
+ "eval_bigbrother_steps_per_second": 1.037,
824
+ "eval_bigbrother_wer": 0.41786836381213244,
825
+ "step": 9048
826
+ },
827
+ {
828
+ "epoch": 25.0,
829
+ "grad_norm": 7.412995338439941,
830
+ "learning_rate": 4.167893961708395e-05,
831
+ "loss": 1.0897,
832
+ "step": 9425
833
+ },
834
+ {
835
+ "epoch": 25.0,
836
+ "eval_rundkast_loss": 0.5106588006019592,
837
+ "eval_rundkast_runtime": 33.3541,
838
+ "eval_rundkast_samples_per_second": 40.325,
839
+ "eval_rundkast_steps_per_second": 1.289,
840
+ "eval_rundkast_wer": 0.25206808781419027,
841
+ "step": 9425
842
+ },
843
+ {
844
+ "epoch": 25.0,
845
+ "eval_nb_samtale_loss": 0.6364408731460571,
846
+ "eval_nb_samtale_runtime": 37.3682,
847
+ "eval_nb_samtale_samples_per_second": 14.263,
848
+ "eval_nb_samtale_steps_per_second": 0.455,
849
+ "eval_nb_samtale_wer": 0.2630208333333333,
850
+ "step": 9425
851
+ },
852
+ {
853
+ "epoch": 25.0,
854
+ "eval_bigbrother_loss": 1.2089372873306274,
855
+ "eval_bigbrother_runtime": 41.0339,
856
+ "eval_bigbrother_samples_per_second": 33.387,
857
+ "eval_bigbrother_steps_per_second": 1.048,
858
+ "eval_bigbrother_wer": 0.4167405139772819,
859
+ "step": 9425
860
+ },
861
+ {
862
+ "epoch": 26.0,
863
+ "grad_norm": 20.474260330200195,
864
+ "learning_rate": 3.890279823269514e-05,
865
+ "loss": 1.0818,
866
+ "step": 9802
867
+ },
868
+ {
869
+ "epoch": 26.0,
870
+ "eval_rundkast_loss": 0.49753764271736145,
871
+ "eval_rundkast_runtime": 33.4596,
872
+ "eval_rundkast_samples_per_second": 40.198,
873
+ "eval_rundkast_steps_per_second": 1.285,
874
+ "eval_rundkast_wer": 0.24562519885459752,
875
+ "step": 9802
876
+ },
877
+ {
878
+ "epoch": 26.0,
879
+ "eval_nb_samtale_loss": 0.6342372298240662,
880
+ "eval_nb_samtale_runtime": 37.5602,
881
+ "eval_nb_samtale_samples_per_second": 14.191,
882
+ "eval_nb_samtale_steps_per_second": 0.453,
883
+ "eval_nb_samtale_wer": 0.260009765625,
884
+ "step": 9802
885
+ },
886
+ {
887
+ "epoch": 26.0,
888
+ "eval_bigbrother_loss": 1.175524115562439,
889
+ "eval_bigbrother_runtime": 41.4743,
890
+ "eval_bigbrother_samples_per_second": 33.033,
891
+ "eval_bigbrother_steps_per_second": 1.037,
892
+ "eval_bigbrother_wer": 0.4159349069523886,
893
+ "step": 9802
894
+ },
895
+ {
896
+ "epoch": 27.0,
897
+ "grad_norm": 3.3909595012664795,
898
+ "learning_rate": 3.6126656848306336e-05,
899
+ "loss": 1.0629,
900
+ "step": 10179
901
+ },
902
+ {
903
+ "epoch": 27.0,
904
+ "eval_rundkast_loss": 0.5271292924880981,
905
+ "eval_rundkast_runtime": 33.4554,
906
+ "eval_rundkast_samples_per_second": 40.203,
907
+ "eval_rundkast_steps_per_second": 1.285,
908
+ "eval_rundkast_wer": 0.24347756920139993,
909
+ "step": 10179
910
+ },
911
+ {
912
+ "epoch": 27.0,
913
+ "eval_nb_samtale_loss": 0.6458906531333923,
914
+ "eval_nb_samtale_runtime": 37.3547,
915
+ "eval_nb_samtale_samples_per_second": 14.269,
916
+ "eval_nb_samtale_steps_per_second": 0.455,
917
+ "eval_nb_samtale_wer": 0.2598470052083333,
918
+ "step": 10179
919
+ },
920
+ {
921
+ "epoch": 27.0,
922
+ "eval_bigbrother_loss": 1.1968939304351807,
923
+ "eval_bigbrother_runtime": 41.3751,
924
+ "eval_bigbrother_samples_per_second": 33.112,
925
+ "eval_bigbrother_steps_per_second": 1.039,
926
+ "eval_bigbrother_wer": 0.4142431322001128,
927
+ "step": 10179
928
+ },
929
+ {
930
+ "epoch": 28.0,
931
+ "grad_norm": 19.91695213317871,
932
+ "learning_rate": 3.335787923416789e-05,
933
+ "loss": 1.069,
934
+ "step": 10556
935
+ },
936
+ {
937
+ "epoch": 28.0,
938
+ "eval_rundkast_loss": 0.5136268138885498,
939
+ "eval_rundkast_runtime": 33.4249,
940
+ "eval_rundkast_samples_per_second": 40.24,
941
+ "eval_rundkast_steps_per_second": 1.286,
942
+ "eval_rundkast_wer": 0.2433980273623926,
943
+ "step": 10556
944
+ },
945
+ {
946
+ "epoch": 28.0,
947
+ "eval_nb_samtale_loss": 0.6378623247146606,
948
+ "eval_nb_samtale_runtime": 37.4155,
949
+ "eval_nb_samtale_samples_per_second": 14.245,
950
+ "eval_nb_samtale_steps_per_second": 0.454,
951
+ "eval_nb_samtale_wer": 0.25927734375,
952
+ "step": 10556
953
+ },
954
+ {
955
+ "epoch": 28.0,
956
+ "eval_bigbrother_loss": 1.1700557470321655,
957
+ "eval_bigbrother_runtime": 41.488,
958
+ "eval_bigbrother_samples_per_second": 33.022,
959
+ "eval_bigbrother_steps_per_second": 1.036,
960
+ "eval_bigbrother_wer": 0.41102070410053976,
961
+ "step": 10556
962
+ },
963
+ {
964
+ "epoch": 29.0,
965
+ "grad_norm": 4.6988959312438965,
966
+ "learning_rate": 3.058173784977909e-05,
967
+ "loss": 1.0561,
968
+ "step": 10933
969
+ },
970
+ {
971
+ "epoch": 29.0,
972
+ "eval_rundkast_loss": 0.5006275773048401,
973
+ "eval_rundkast_runtime": 33.4079,
974
+ "eval_rundkast_samples_per_second": 40.26,
975
+ "eval_rundkast_steps_per_second": 1.287,
976
+ "eval_rundkast_wer": 0.24387527839643652,
977
+ "step": 10933
978
+ },
979
+ {
980
+ "epoch": 29.0,
981
+ "eval_nb_samtale_loss": 0.6258318424224854,
982
+ "eval_nb_samtale_runtime": 37.2465,
983
+ "eval_nb_samtale_samples_per_second": 14.31,
984
+ "eval_nb_samtale_steps_per_second": 0.456,
985
+ "eval_nb_samtale_wer": 0.258056640625,
986
+ "step": 10933
987
+ },
988
+ {
989
+ "epoch": 29.0,
990
+ "eval_bigbrother_loss": 1.1609516143798828,
991
+ "eval_bigbrother_runtime": 41.9041,
992
+ "eval_bigbrother_samples_per_second": 32.694,
993
+ "eval_bigbrother_steps_per_second": 1.026,
994
+ "eval_bigbrother_wer": 0.4105373398856038,
995
+ "step": 10933
996
+ },
997
+ {
998
+ "epoch": 30.0,
999
+ "grad_norm": 6.915459156036377,
1000
+ "learning_rate": 2.7805596465390282e-05,
1001
+ "loss": 1.046,
1002
+ "step": 11310
1003
+ },
1004
+ {
1005
+ "epoch": 30.0,
1006
+ "eval_rundkast_loss": 0.5193740725517273,
1007
+ "eval_rundkast_runtime": 33.0243,
1008
+ "eval_rundkast_samples_per_second": 40.728,
1009
+ "eval_rundkast_steps_per_second": 1.302,
1010
+ "eval_rundkast_wer": 0.24530703149856825,
1011
+ "step": 11310
1012
+ },
1013
+ {
1014
+ "epoch": 30.0,
1015
+ "eval_nb_samtale_loss": 0.6532759666442871,
1016
+ "eval_nb_samtale_runtime": 37.2211,
1017
+ "eval_nb_samtale_samples_per_second": 14.32,
1018
+ "eval_nb_samtale_steps_per_second": 0.457,
1019
+ "eval_nb_samtale_wer": 0.260986328125,
1020
+ "step": 11310
1021
+ },
1022
+ {
1023
+ "epoch": 30.0,
1024
+ "eval_bigbrother_loss": 1.2057961225509644,
1025
+ "eval_bigbrother_runtime": 41.7437,
1026
+ "eval_bigbrother_samples_per_second": 32.819,
1027
+ "eval_bigbrother_steps_per_second": 1.03,
1028
+ "eval_bigbrother_wer": 0.4085233223233707,
1029
+ "step": 11310
1030
+ },
1031
+ {
1032
+ "epoch": 31.0,
1033
+ "grad_norm": 6.216676712036133,
1034
+ "learning_rate": 2.5029455081001475e-05,
1035
+ "loss": 1.0244,
1036
+ "step": 11687
1037
+ },
1038
+ {
1039
+ "epoch": 31.0,
1040
+ "eval_rundkast_loss": 0.5112914443016052,
1041
+ "eval_rundkast_runtime": 33.3775,
1042
+ "eval_rundkast_samples_per_second": 40.297,
1043
+ "eval_rundkast_steps_per_second": 1.288,
1044
+ "eval_rundkast_wer": 0.24347756920139993,
1045
+ "step": 11687
1046
+ },
1047
+ {
1048
+ "epoch": 31.0,
1049
+ "eval_nb_samtale_loss": 0.637014627456665,
1050
+ "eval_nb_samtale_runtime": 37.0028,
1051
+ "eval_nb_samtale_samples_per_second": 14.404,
1052
+ "eval_nb_samtale_steps_per_second": 0.459,
1053
+ "eval_nb_samtale_wer": 0.2584635416666667,
1054
+ "step": 11687
1055
+ },
1056
+ {
1057
+ "epoch": 31.0,
1058
+ "eval_bigbrother_loss": 1.196700096130371,
1059
+ "eval_bigbrother_runtime": 41.5693,
1060
+ "eval_bigbrother_samples_per_second": 32.957,
1061
+ "eval_bigbrother_steps_per_second": 1.034,
1062
+ "eval_bigbrother_wer": 0.40699266897607345,
1063
+ "step": 11687
1064
+ },
1065
+ {
1066
+ "epoch": 32.0,
1067
+ "grad_norm": 5.768988609313965,
1068
+ "learning_rate": 2.2253313696612664e-05,
1069
+ "loss": 1.0211,
1070
+ "step": 12064
1071
+ },
1072
+ {
1073
+ "epoch": 32.0,
1074
+ "eval_rundkast_loss": 0.5140534043312073,
1075
+ "eval_rundkast_runtime": 33.4214,
1076
+ "eval_rundkast_samples_per_second": 40.244,
1077
+ "eval_rundkast_steps_per_second": 1.287,
1078
+ "eval_rundkast_wer": 0.24307986000636334,
1079
+ "step": 12064
1080
+ },
1081
+ {
1082
+ "epoch": 32.0,
1083
+ "eval_nb_samtale_loss": 0.6492373943328857,
1084
+ "eval_nb_samtale_runtime": 37.5056,
1085
+ "eval_nb_samtale_samples_per_second": 14.211,
1086
+ "eval_nb_samtale_steps_per_second": 0.453,
1087
+ "eval_nb_samtale_wer": 0.2571614583333333,
1088
+ "step": 12064
1089
+ },
1090
+ {
1091
+ "epoch": 32.0,
1092
+ "eval_bigbrother_loss": 1.1793997287750244,
1093
+ "eval_bigbrother_runtime": 41.1907,
1094
+ "eval_bigbrother_samples_per_second": 33.26,
1095
+ "eval_bigbrother_steps_per_second": 1.044,
1096
+ "eval_bigbrother_wer": 0.40812051881092404,
1097
+ "step": 12064
1098
+ },
1099
+ {
1100
+ "epoch": 33.0,
1101
+ "grad_norm": 8.210163116455078,
1102
+ "learning_rate": 1.947717231222386e-05,
1103
+ "loss": 1.0221,
1104
+ "step": 12441
1105
+ },
1106
+ {
1107
+ "epoch": 33.0,
1108
+ "eval_rundkast_loss": 0.5195760726928711,
1109
+ "eval_rundkast_runtime": 33.4259,
1110
+ "eval_rundkast_samples_per_second": 40.238,
1111
+ "eval_rundkast_steps_per_second": 1.286,
1112
+ "eval_rundkast_wer": 0.24188673242125358,
1113
+ "step": 12441
1114
+ },
1115
+ {
1116
+ "epoch": 33.0,
1117
+ "eval_nb_samtale_loss": 0.6468714475631714,
1118
+ "eval_nb_samtale_runtime": 37.1982,
1119
+ "eval_nb_samtale_samples_per_second": 14.329,
1120
+ "eval_nb_samtale_steps_per_second": 0.457,
1121
+ "eval_nb_samtale_wer": 0.257568359375,
1122
+ "step": 12441
1123
+ },
1124
+ {
1125
+ "epoch": 33.0,
1126
+ "eval_bigbrother_loss": 1.1880689859390259,
1127
+ "eval_bigbrother_runtime": 41.3876,
1128
+ "eval_bigbrother_samples_per_second": 33.102,
1129
+ "eval_bigbrother_steps_per_second": 1.039,
1130
+ "eval_bigbrother_wer": 0.40747603319100945,
1131
+ "step": 12441
1132
+ },
1133
+ {
1134
+ "epoch": 34.0,
1135
+ "grad_norm": 4.438199043273926,
1136
+ "learning_rate": 1.670839469808542e-05,
1137
+ "loss": 1.0095,
1138
+ "step": 12818
1139
+ },
1140
+ {
1141
+ "epoch": 34.0,
1142
+ "eval_rundkast_loss": 0.5272213816642761,
1143
+ "eval_rundkast_runtime": 33.3742,
1144
+ "eval_rundkast_samples_per_second": 40.301,
1145
+ "eval_rundkast_steps_per_second": 1.288,
1146
+ "eval_rundkast_wer": 0.24212535793827553,
1147
+ "step": 12818
1148
+ },
1149
+ {
1150
+ "epoch": 34.0,
1151
+ "eval_nb_samtale_loss": 0.6708551645278931,
1152
+ "eval_nb_samtale_runtime": 37.2593,
1153
+ "eval_nb_samtale_samples_per_second": 14.305,
1154
+ "eval_nb_samtale_steps_per_second": 0.456,
1155
+ "eval_nb_samtale_wer": 0.2578125,
1156
+ "step": 12818
1157
+ },
1158
+ {
1159
+ "epoch": 34.0,
1160
+ "eval_bigbrother_loss": 1.2168066501617432,
1161
+ "eval_bigbrother_runtime": 41.3249,
1162
+ "eval_bigbrother_samples_per_second": 33.152,
1163
+ "eval_bigbrother_steps_per_second": 1.041,
1164
+ "eval_bigbrother_wer": 0.4064287440586482,
1165
+ "step": 12818
1166
+ },
1167
+ {
1168
+ "epoch": 35.0,
1169
+ "grad_norm": 18.065574645996094,
1170
+ "learning_rate": 1.3932253313696614e-05,
1171
+ "loss": 1.0011,
1172
+ "step": 13195
1173
+ },
1174
+ {
1175
+ "epoch": 35.0,
1176
+ "eval_rundkast_loss": 0.5081976652145386,
1177
+ "eval_rundkast_runtime": 33.3601,
1178
+ "eval_rundkast_samples_per_second": 40.318,
1179
+ "eval_rundkast_steps_per_second": 1.289,
1180
+ "eval_rundkast_wer": 0.2410117721921731,
1181
+ "step": 13195
1182
+ },
1183
+ {
1184
+ "epoch": 35.0,
1185
+ "eval_nb_samtale_loss": 0.6458988189697266,
1186
+ "eval_nb_samtale_runtime": 37.6087,
1187
+ "eval_nb_samtale_samples_per_second": 14.172,
1188
+ "eval_nb_samtale_steps_per_second": 0.452,
1189
+ "eval_nb_samtale_wer": 0.257080078125,
1190
+ "step": 13195
1191
+ },
1192
+ {
1193
+ "epoch": 35.0,
1194
+ "eval_bigbrother_loss": 1.1833205223083496,
1195
+ "eval_bigbrother_runtime": 41.2758,
1196
+ "eval_bigbrother_samples_per_second": 33.191,
1197
+ "eval_bigbrother_steps_per_second": 1.042,
1198
+ "eval_bigbrother_wer": 0.40667042616611615,
1199
+ "step": 13195
1200
+ },
1201
+ {
1202
+ "epoch": 36.0,
1203
+ "grad_norm": 14.747867584228516,
1204
+ "learning_rate": 1.1156111929307807e-05,
1205
+ "loss": 0.9849,
1206
+ "step": 13572
1207
+ },
1208
+ {
1209
+ "epoch": 36.0,
1210
+ "eval_rundkast_loss": 0.5170104503631592,
1211
+ "eval_rundkast_runtime": 33.5036,
1212
+ "eval_rundkast_samples_per_second": 40.145,
1213
+ "eval_rundkast_steps_per_second": 1.283,
1214
+ "eval_rundkast_wer": 0.24140948138720966,
1215
+ "step": 13572
1216
+ },
1217
+ {
1218
+ "epoch": 36.0,
1219
+ "eval_nb_samtale_loss": 0.6568956971168518,
1220
+ "eval_nb_samtale_runtime": 37.0353,
1221
+ "eval_nb_samtale_samples_per_second": 14.392,
1222
+ "eval_nb_samtale_steps_per_second": 0.459,
1223
+ "eval_nb_samtale_wer": 0.2569173177083333,
1224
+ "step": 13572
1225
+ },
1226
+ {
1227
+ "epoch": 36.0,
1228
+ "eval_bigbrother_loss": 1.2056487798690796,
1229
+ "eval_bigbrother_runtime": 41.6456,
1230
+ "eval_bigbrother_samples_per_second": 32.897,
1231
+ "eval_bigbrother_steps_per_second": 1.033,
1232
+ "eval_bigbrother_wer": 0.404656408603883,
1233
+ "step": 13572
1234
+ }
1235
+ ],
1236
+ "logging_steps": 500,
1237
+ "max_steps": 15080,
1238
+ "num_input_tokens_seen": 0,
1239
+ "num_train_epochs": 40,
1240
+ "save_steps": 500,
1241
+ "total_flos": 7.449714460951059e+19,
1242
+ "train_batch_size": 48,
1243
+ "trial_name": null,
1244
+ "trial_params": null
1245
+ }
checkpoint-13572/training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4a7afd10c5b5a15d9300aae5934cd968ceb099ca1ed1f39f2f47a88cce68902c
3
+ size 4984
checkpoint-15080/config.json ADDED
@@ -0,0 +1,116 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "NbAiLab/nb-wav2vec2-300m-bokmaal",
3
+ "activation_dropout": 0.055,
4
+ "adapter_attn_dim": null,
5
+ "adapter_kernel_size": 3,
6
+ "adapter_stride": 2,
7
+ "add_adapter": false,
8
+ "apply_spec_augment": true,
9
+ "architectures": [
10
+ "Wav2Vec2ForCTC"
11
+ ],
12
+ "attention_dropout": 0.094,
13
+ "bos_token_id": 1,
14
+ "classifier_proj_size": 256,
15
+ "codevector_dim": 768,
16
+ "contrastive_logits_temperature": 0.1,
17
+ "conv_bias": true,
18
+ "conv_dim": [
19
+ 512,
20
+ 512,
21
+ 512,
22
+ 512,
23
+ 512,
24
+ 512,
25
+ 512
26
+ ],
27
+ "conv_kernel": [
28
+ 10,
29
+ 3,
30
+ 3,
31
+ 3,
32
+ 3,
33
+ 2,
34
+ 2
35
+ ],
36
+ "conv_stride": [
37
+ 5,
38
+ 2,
39
+ 2,
40
+ 2,
41
+ 2,
42
+ 2,
43
+ 2
44
+ ],
45
+ "ctc_loss_reduction": "mean",
46
+ "ctc_zero_infinity": true,
47
+ "diversity_loss_weight": 0.1,
48
+ "do_stable_layer_norm": true,
49
+ "eos_token_id": 2,
50
+ "feat_extract_activation": "gelu",
51
+ "feat_extract_dropout": 0.0,
52
+ "feat_extract_norm": "layer",
53
+ "feat_proj_dropout": 0.04,
54
+ "feat_quantizer_dropout": 0.0,
55
+ "final_dropout": 0.0,
56
+ "hidden_act": "gelu",
57
+ "hidden_dropout": 0.047,
58
+ "hidden_size": 1024,
59
+ "initializer_range": 0.02,
60
+ "intermediate_size": 4096,
61
+ "layer_norm_eps": 1e-05,
62
+ "layerdrop": 0.041,
63
+ "mask_channel_length": 10,
64
+ "mask_channel_min_space": 1,
65
+ "mask_channel_other": 0.0,
66
+ "mask_channel_prob": 0.0,
67
+ "mask_channel_selection": "static",
68
+ "mask_feature_length": 64,
69
+ "mask_feature_min_masks": 0,
70
+ "mask_feature_prob": 0.25,
71
+ "mask_time_length": 10,
72
+ "mask_time_min_masks": 2,
73
+ "mask_time_min_space": 1,
74
+ "mask_time_other": 0.0,
75
+ "mask_time_prob": 0.082,
76
+ "mask_time_selection": "static",
77
+ "model_type": "wav2vec2",
78
+ "num_adapter_layers": 3,
79
+ "num_attention_heads": 16,
80
+ "num_codevector_groups": 2,
81
+ "num_codevectors_per_group": 320,
82
+ "num_conv_pos_embedding_groups": 16,
83
+ "num_conv_pos_embeddings": 128,
84
+ "num_feat_extract_layers": 7,
85
+ "num_hidden_layers": 24,
86
+ "num_negatives": 100,
87
+ "output_hidden_size": 1024,
88
+ "pad_token_id": 31,
89
+ "proj_codevector_dim": 768,
90
+ "tdnn_dilation": [
91
+ 1,
92
+ 2,
93
+ 3,
94
+ 1,
95
+ 1
96
+ ],
97
+ "tdnn_dim": [
98
+ 512,
99
+ 512,
100
+ 512,
101
+ 512,
102
+ 1500
103
+ ],
104
+ "tdnn_kernel": [
105
+ 5,
106
+ 3,
107
+ 3,
108
+ 1,
109
+ 1
110
+ ],
111
+ "torch_dtype": "float32",
112
+ "transformers_version": "4.38.1",
113
+ "use_weighted_layer_sum": false,
114
+ "vocab_size": 34,
115
+ "xvector_output_dim": 512
116
+ }
checkpoint-15080/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:39e689cc12ab482feb0543c127b7b511e2bad060fad7ae443c65237b8aa89966
3
+ size 1261946880
checkpoint-15080/optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c4d700b62e172b807b13a613dfc450159dec725c98fec11ad5c13fc7a2d88e8a
3
+ size 2490438582
checkpoint-15080/preprocessor_config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "do_normalize": true,
3
+ "feature_extractor_type": "Wav2Vec2FeatureExtractor",
4
+ "feature_size": 1,
5
+ "padding_side": "right",
6
+ "padding_value": 0,
7
+ "processor_class": "Wav2Vec2ProcessorWithLM",
8
+ "return_attention_mask": true,
9
+ "sampling_rate": 16000
10
+ }
checkpoint-15080/rng_state.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:de1e1fdc839d0eaf996b93c34a19f41220628252c50203f110f97e10918b59d9
3
+ size 14244
checkpoint-15080/scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fe19c5deafc968b9be7b3e8b84cb739747a2b63c5e20665bc18b1ca7af211e33
3
+ size 1064
checkpoint-15080/trainer_state.json ADDED
@@ -0,0 +1,1381 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_metric": 0.404656408603883,
3
+ "best_model_checkpoint": "/cluster/home/torstefl/Master/saved_model/W2V/single/BB/30.05/checkpoint-13572",
4
+ "epoch": 40.0,
5
+ "eval_steps": 500,
6
+ "global_step": 15080,
7
+ "is_hyper_param_search": false,
8
+ "is_local_process_zero": true,
9
+ "is_world_process_zero": true,
10
+ "log_history": [
11
+ {
12
+ "epoch": 1.0,
13
+ "grad_norm": 6.709790229797363,
14
+ "learning_rate": 2.5e-05,
15
+ "loss": 2.4835,
16
+ "step": 377
17
+ },
18
+ {
19
+ "epoch": 1.0,
20
+ "eval_rundkast_loss": 0.419185996055603,
21
+ "eval_rundkast_runtime": 33.4408,
22
+ "eval_rundkast_samples_per_second": 40.22,
23
+ "eval_rundkast_steps_per_second": 1.286,
24
+ "eval_rundkast_wer": 0.24005727012408526,
25
+ "step": 377
26
+ },
27
+ {
28
+ "epoch": 1.0,
29
+ "eval_nb_samtale_loss": 0.5932812094688416,
30
+ "eval_nb_samtale_runtime": 38.8625,
31
+ "eval_nb_samtale_samples_per_second": 13.715,
32
+ "eval_nb_samtale_steps_per_second": 0.437,
33
+ "eval_nb_samtale_wer": 0.2626953125,
34
+ "step": 377
35
+ },
36
+ {
37
+ "epoch": 1.0,
38
+ "eval_bigbrother_loss": 1.838633418083191,
39
+ "eval_bigbrother_runtime": 41.4238,
40
+ "eval_bigbrother_samples_per_second": 33.073,
41
+ "eval_bigbrother_steps_per_second": 1.038,
42
+ "eval_bigbrother_wer": 0.5936518166438411,
43
+ "step": 377
44
+ },
45
+ {
46
+ "epoch": 2.0,
47
+ "grad_norm": 8.728084564208984,
48
+ "learning_rate": 5.013333333333333e-05,
49
+ "loss": 2.0585,
50
+ "step": 754
51
+ },
52
+ {
53
+ "epoch": 2.0,
54
+ "eval_rundkast_loss": 0.40199896693229675,
55
+ "eval_rundkast_runtime": 33.3039,
56
+ "eval_rundkast_samples_per_second": 40.386,
57
+ "eval_rundkast_steps_per_second": 1.291,
58
+ "eval_rundkast_wer": 0.2357620108176901,
59
+ "step": 754
60
+ },
61
+ {
62
+ "epoch": 2.0,
63
+ "eval_nb_samtale_loss": 0.5779798030853271,
64
+ "eval_nb_samtale_runtime": 37.921,
65
+ "eval_nb_samtale_samples_per_second": 14.056,
66
+ "eval_nb_samtale_steps_per_second": 0.448,
67
+ "eval_nb_samtale_wer": 0.253173828125,
68
+ "step": 754
69
+ },
70
+ {
71
+ "epoch": 2.0,
72
+ "eval_bigbrother_loss": 1.5922973155975342,
73
+ "eval_bigbrother_runtime": 41.1151,
74
+ "eval_bigbrother_samples_per_second": 33.321,
75
+ "eval_bigbrother_steps_per_second": 1.046,
76
+ "eval_bigbrother_wer": 0.5574800612261339,
77
+ "step": 754
78
+ },
79
+ {
80
+ "epoch": 3.0,
81
+ "grad_norm": 6.347009658813477,
82
+ "learning_rate": 7.526666666666668e-05,
83
+ "loss": 1.9142,
84
+ "step": 1131
85
+ },
86
+ {
87
+ "epoch": 3.0,
88
+ "eval_rundkast_loss": 0.3938983082771301,
89
+ "eval_rundkast_runtime": 33.5062,
90
+ "eval_rundkast_samples_per_second": 40.142,
91
+ "eval_rundkast_steps_per_second": 1.283,
92
+ "eval_rundkast_wer": 0.24029589564110723,
93
+ "step": 1131
94
+ },
95
+ {
96
+ "epoch": 3.0,
97
+ "eval_nb_samtale_loss": 0.5637267827987671,
98
+ "eval_nb_samtale_runtime": 38.0506,
99
+ "eval_nb_samtale_samples_per_second": 14.008,
100
+ "eval_nb_samtale_steps_per_second": 0.447,
101
+ "eval_nb_samtale_wer": 0.2652180989583333,
102
+ "step": 1131
103
+ },
104
+ {
105
+ "epoch": 3.0,
106
+ "eval_bigbrother_loss": 1.4027199745178223,
107
+ "eval_bigbrother_runtime": 41.4816,
108
+ "eval_bigbrother_samples_per_second": 33.027,
109
+ "eval_bigbrother_steps_per_second": 1.037,
110
+ "eval_bigbrother_wer": 0.5358897929589946,
111
+ "step": 1131
112
+ },
113
+ {
114
+ "epoch": 4.0,
115
+ "grad_norm": 13.580578804016113,
116
+ "learning_rate": 9.99558173784978e-05,
117
+ "loss": 1.816,
118
+ "step": 1508
119
+ },
120
+ {
121
+ "epoch": 4.0,
122
+ "eval_rundkast_loss": 0.3921523690223694,
123
+ "eval_rundkast_runtime": 33.1392,
124
+ "eval_rundkast_samples_per_second": 40.586,
125
+ "eval_rundkast_steps_per_second": 1.298,
126
+ "eval_rundkast_wer": 0.24315940184537066,
127
+ "step": 1508
128
+ },
129
+ {
130
+ "epoch": 4.0,
131
+ "eval_nb_samtale_loss": 0.5375664830207825,
132
+ "eval_nb_samtale_runtime": 37.7821,
133
+ "eval_nb_samtale_samples_per_second": 14.107,
134
+ "eval_nb_samtale_steps_per_second": 0.45,
135
+ "eval_nb_samtale_wer": 0.262939453125,
136
+ "step": 1508
137
+ },
138
+ {
139
+ "epoch": 4.0,
140
+ "eval_bigbrother_loss": 1.2783823013305664,
141
+ "eval_bigbrother_runtime": 41.1302,
142
+ "eval_bigbrother_samples_per_second": 33.309,
143
+ "eval_bigbrother_steps_per_second": 1.045,
144
+ "eval_bigbrother_wer": 0.5196165310561508,
145
+ "step": 1508
146
+ },
147
+ {
148
+ "epoch": 5.0,
149
+ "grad_norm": 5.716007709503174,
150
+ "learning_rate": 9.717967599410898e-05,
151
+ "loss": 1.7424,
152
+ "step": 1885
153
+ },
154
+ {
155
+ "epoch": 5.0,
156
+ "eval_rundkast_loss": 0.40847668051719666,
157
+ "eval_rundkast_runtime": 33.007,
158
+ "eval_rundkast_samples_per_second": 40.749,
159
+ "eval_rundkast_steps_per_second": 1.303,
160
+ "eval_rundkast_wer": 0.23870505886096086,
161
+ "step": 1885
162
+ },
163
+ {
164
+ "epoch": 5.0,
165
+ "eval_nb_samtale_loss": 0.5756209492683411,
166
+ "eval_nb_samtale_runtime": 37.9326,
167
+ "eval_nb_samtale_samples_per_second": 14.051,
168
+ "eval_nb_samtale_steps_per_second": 0.448,
169
+ "eval_nb_samtale_wer": 0.2607421875,
170
+ "step": 1885
171
+ },
172
+ {
173
+ "epoch": 5.0,
174
+ "eval_bigbrother_loss": 1.3229293823242188,
175
+ "eval_bigbrother_runtime": 40.9231,
176
+ "eval_bigbrother_samples_per_second": 33.477,
177
+ "eval_bigbrother_steps_per_second": 1.051,
178
+ "eval_bigbrother_wer": 0.5068879400628373,
179
+ "step": 1885
180
+ },
181
+ {
182
+ "epoch": 6.0,
183
+ "grad_norm": 4.037384510040283,
184
+ "learning_rate": 9.440353460972018e-05,
185
+ "loss": 1.6619,
186
+ "step": 2262
187
+ },
188
+ {
189
+ "epoch": 6.0,
190
+ "eval_rundkast_loss": 0.4441112279891968,
191
+ "eval_rundkast_runtime": 33.7921,
192
+ "eval_rundkast_samples_per_second": 39.802,
193
+ "eval_rundkast_steps_per_second": 1.272,
194
+ "eval_rundkast_wer": 0.24315940184537066,
195
+ "step": 2262
196
+ },
197
+ {
198
+ "epoch": 6.0,
199
+ "eval_nb_samtale_loss": 0.6132481694221497,
200
+ "eval_nb_samtale_runtime": 38.4613,
201
+ "eval_nb_samtale_samples_per_second": 13.858,
202
+ "eval_nb_samtale_steps_per_second": 0.442,
203
+ "eval_nb_samtale_wer": 0.2744140625,
204
+ "step": 2262
205
+ },
206
+ {
207
+ "epoch": 6.0,
208
+ "eval_bigbrother_loss": 1.3384240865707397,
209
+ "eval_bigbrother_runtime": 41.3107,
210
+ "eval_bigbrother_samples_per_second": 33.163,
211
+ "eval_bigbrother_steps_per_second": 1.041,
212
+ "eval_bigbrother_wer": 0.5068879400628373,
213
+ "step": 2262
214
+ },
215
+ {
216
+ "epoch": 7.0,
217
+ "grad_norm": 8.110424995422363,
218
+ "learning_rate": 9.163475699558174e-05,
219
+ "loss": 1.6139,
220
+ "step": 2639
221
+ },
222
+ {
223
+ "epoch": 7.0,
224
+ "eval_rundkast_loss": 0.45466309785842896,
225
+ "eval_rundkast_runtime": 33.255,
226
+ "eval_rundkast_samples_per_second": 40.445,
227
+ "eval_rundkast_steps_per_second": 1.293,
228
+ "eval_rundkast_wer": 0.24713649379573654,
229
+ "step": 2639
230
+ },
231
+ {
232
+ "epoch": 7.0,
233
+ "eval_nb_samtale_loss": 0.5841706991195679,
234
+ "eval_nb_samtale_runtime": 38.3016,
235
+ "eval_nb_samtale_samples_per_second": 13.916,
236
+ "eval_nb_samtale_steps_per_second": 0.444,
237
+ "eval_nb_samtale_wer": 0.2757975260416667,
238
+ "step": 2639
239
+ },
240
+ {
241
+ "epoch": 7.0,
242
+ "eval_bigbrother_loss": 1.338714838027954,
243
+ "eval_bigbrother_runtime": 41.6281,
244
+ "eval_bigbrother_samples_per_second": 32.91,
245
+ "eval_bigbrother_steps_per_second": 1.033,
246
+ "eval_bigbrother_wer": 0.49625392733424634,
247
+ "step": 2639
248
+ },
249
+ {
250
+ "epoch": 8.0,
251
+ "grad_norm": 19.2949275970459,
252
+ "learning_rate": 8.885861561119293e-05,
253
+ "loss": 1.5567,
254
+ "step": 3016
255
+ },
256
+ {
257
+ "epoch": 8.0,
258
+ "eval_rundkast_loss": 0.45816686749458313,
259
+ "eval_rundkast_runtime": 33.2799,
260
+ "eval_rundkast_samples_per_second": 40.415,
261
+ "eval_rundkast_steps_per_second": 1.292,
262
+ "eval_rundkast_wer": 0.25095450206808784,
263
+ "step": 3016
264
+ },
265
+ {
266
+ "epoch": 8.0,
267
+ "eval_nb_samtale_loss": 0.5909866690635681,
268
+ "eval_nb_samtale_runtime": 37.7066,
269
+ "eval_nb_samtale_samples_per_second": 14.135,
270
+ "eval_nb_samtale_steps_per_second": 0.451,
271
+ "eval_nb_samtale_wer": 0.2709147135416667,
272
+ "step": 3016
273
+ },
274
+ {
275
+ "epoch": 8.0,
276
+ "eval_bigbrother_loss": 1.2419942617416382,
277
+ "eval_bigbrother_runtime": 41.2436,
278
+ "eval_bigbrother_samples_per_second": 33.217,
279
+ "eval_bigbrother_steps_per_second": 1.043,
280
+ "eval_bigbrother_wer": 0.49238701361475873,
281
+ "step": 3016
282
+ },
283
+ {
284
+ "epoch": 9.0,
285
+ "grad_norm": 4.614249229431152,
286
+ "learning_rate": 8.608247422680413e-05,
287
+ "loss": 1.4942,
288
+ "step": 3393
289
+ },
290
+ {
291
+ "epoch": 9.0,
292
+ "eval_rundkast_loss": 0.4503733217716217,
293
+ "eval_rundkast_runtime": 33.2408,
294
+ "eval_rundkast_samples_per_second": 40.462,
295
+ "eval_rundkast_steps_per_second": 1.294,
296
+ "eval_rundkast_wer": 0.2419662742602609,
297
+ "step": 3393
298
+ },
299
+ {
300
+ "epoch": 9.0,
301
+ "eval_nb_samtale_loss": 0.5738528370857239,
302
+ "eval_nb_samtale_runtime": 37.6697,
303
+ "eval_nb_samtale_samples_per_second": 14.149,
304
+ "eval_nb_samtale_steps_per_second": 0.451,
305
+ "eval_nb_samtale_wer": 0.2655436197916667,
306
+ "step": 3393
307
+ },
308
+ {
309
+ "epoch": 9.0,
310
+ "eval_bigbrother_loss": 1.207922101020813,
311
+ "eval_bigbrother_runtime": 41.3161,
312
+ "eval_bigbrother_samples_per_second": 33.159,
313
+ "eval_bigbrother_steps_per_second": 1.041,
314
+ "eval_bigbrother_wer": 0.4722468379924273,
315
+ "step": 3393
316
+ },
317
+ {
318
+ "epoch": 10.0,
319
+ "grad_norm": 10.218366622924805,
320
+ "learning_rate": 8.330633284241532e-05,
321
+ "loss": 1.4412,
322
+ "step": 3770
323
+ },
324
+ {
325
+ "epoch": 10.0,
326
+ "eval_rundkast_loss": 0.4589375853538513,
327
+ "eval_rundkast_runtime": 33.4846,
328
+ "eval_rundkast_samples_per_second": 40.168,
329
+ "eval_rundkast_steps_per_second": 1.284,
330
+ "eval_rundkast_wer": 0.2531816735602927,
331
+ "step": 3770
332
+ },
333
+ {
334
+ "epoch": 10.0,
335
+ "eval_nb_samtale_loss": 0.5715627670288086,
336
+ "eval_nb_samtale_runtime": 37.8642,
337
+ "eval_nb_samtale_samples_per_second": 14.077,
338
+ "eval_nb_samtale_steps_per_second": 0.449,
339
+ "eval_nb_samtale_wer": 0.2689615885416667,
340
+ "step": 3770
341
+ },
342
+ {
343
+ "epoch": 10.0,
344
+ "eval_bigbrother_loss": 1.1500192880630493,
345
+ "eval_bigbrother_runtime": 41.3376,
346
+ "eval_bigbrother_samples_per_second": 33.142,
347
+ "eval_bigbrother_steps_per_second": 1.04,
348
+ "eval_bigbrother_wer": 0.46588254249577055,
349
+ "step": 3770
350
+ },
351
+ {
352
+ "epoch": 11.0,
353
+ "grad_norm": 3.9760067462921143,
354
+ "learning_rate": 8.053019145802652e-05,
355
+ "loss": 1.4267,
356
+ "step": 4147
357
+ },
358
+ {
359
+ "epoch": 11.0,
360
+ "eval_rundkast_loss": 0.4483039081096649,
361
+ "eval_rundkast_runtime": 33.8508,
362
+ "eval_rundkast_samples_per_second": 39.733,
363
+ "eval_rundkast_steps_per_second": 1.27,
364
+ "eval_rundkast_wer": 0.25015908367801465,
365
+ "step": 4147
366
+ },
367
+ {
368
+ "epoch": 11.0,
369
+ "eval_nb_samtale_loss": 0.5839167237281799,
370
+ "eval_nb_samtale_runtime": 38.2514,
371
+ "eval_nb_samtale_samples_per_second": 13.934,
372
+ "eval_nb_samtale_steps_per_second": 0.444,
373
+ "eval_nb_samtale_wer": 0.2657063802083333,
374
+ "step": 4147
375
+ },
376
+ {
377
+ "epoch": 11.0,
378
+ "eval_bigbrother_loss": 1.246797800064087,
379
+ "eval_bigbrother_runtime": 41.8091,
380
+ "eval_bigbrother_samples_per_second": 32.768,
381
+ "eval_bigbrother_steps_per_second": 1.028,
382
+ "eval_bigbrother_wer": 0.45669862241198744,
383
+ "step": 4147
384
+ },
385
+ {
386
+ "epoch": 12.0,
387
+ "grad_norm": 4.7413763999938965,
388
+ "learning_rate": 7.77540500736377e-05,
389
+ "loss": 1.3792,
390
+ "step": 4524
391
+ },
392
+ {
393
+ "epoch": 12.0,
394
+ "eval_rundkast_loss": 0.4521505832672119,
395
+ "eval_rundkast_runtime": 33.7711,
396
+ "eval_rundkast_samples_per_second": 39.827,
397
+ "eval_rundkast_steps_per_second": 1.273,
398
+ "eval_rundkast_wer": 0.24872733057588292,
399
+ "step": 4524
400
+ },
401
+ {
402
+ "epoch": 12.0,
403
+ "eval_nb_samtale_loss": 0.5718214511871338,
404
+ "eval_nb_samtale_runtime": 37.7574,
405
+ "eval_nb_samtale_samples_per_second": 14.116,
406
+ "eval_nb_samtale_steps_per_second": 0.45,
407
+ "eval_nb_samtale_wer": 0.2674967447916667,
408
+ "step": 4524
409
+ },
410
+ {
411
+ "epoch": 12.0,
412
+ "eval_bigbrother_loss": 1.1487047672271729,
413
+ "eval_bigbrother_runtime": 41.46,
414
+ "eval_bigbrother_samples_per_second": 33.044,
415
+ "eval_bigbrother_steps_per_second": 1.037,
416
+ "eval_bigbrother_wer": 0.4530733907999678,
417
+ "step": 4524
418
+ },
419
+ {
420
+ "epoch": 13.0,
421
+ "grad_norm": 5.937889099121094,
422
+ "learning_rate": 7.49779086892489e-05,
423
+ "loss": 1.3269,
424
+ "step": 4901
425
+ },
426
+ {
427
+ "epoch": 13.0,
428
+ "eval_rundkast_loss": 0.483146995306015,
429
+ "eval_rundkast_runtime": 33.3218,
430
+ "eval_rundkast_samples_per_second": 40.364,
431
+ "eval_rundkast_steps_per_second": 1.29,
432
+ "eval_rundkast_wer": 0.26002227171492204,
433
+ "step": 4901
434
+ },
435
+ {
436
+ "epoch": 13.0,
437
+ "eval_nb_samtale_loss": 0.6045836806297302,
438
+ "eval_nb_samtale_runtime": 37.3201,
439
+ "eval_nb_samtale_samples_per_second": 14.282,
440
+ "eval_nb_samtale_steps_per_second": 0.456,
441
+ "eval_nb_samtale_wer": 0.2681477864583333,
442
+ "step": 4901
443
+ },
444
+ {
445
+ "epoch": 13.0,
446
+ "eval_bigbrother_loss": 1.1842252016067505,
447
+ "eval_bigbrother_runtime": 41.1883,
448
+ "eval_bigbrother_samples_per_second": 33.262,
449
+ "eval_bigbrother_steps_per_second": 1.044,
450
+ "eval_bigbrother_wer": 0.45057600902279865,
451
+ "step": 4901
452
+ },
453
+ {
454
+ "epoch": 14.0,
455
+ "grad_norm": 6.761897087097168,
456
+ "learning_rate": 7.22017673048601e-05,
457
+ "loss": 1.2988,
458
+ "step": 5278
459
+ },
460
+ {
461
+ "epoch": 14.0,
462
+ "eval_rundkast_loss": 0.5177704691886902,
463
+ "eval_rundkast_runtime": 33.7773,
464
+ "eval_rundkast_samples_per_second": 39.82,
465
+ "eval_rundkast_steps_per_second": 1.273,
466
+ "eval_rundkast_wer": 0.24443207126948774,
467
+ "step": 5278
468
+ },
469
+ {
470
+ "epoch": 14.0,
471
+ "eval_nb_samtale_loss": 0.6508249044418335,
472
+ "eval_nb_samtale_runtime": 37.9055,
473
+ "eval_nb_samtale_samples_per_second": 14.061,
474
+ "eval_nb_samtale_steps_per_second": 0.448,
475
+ "eval_nb_samtale_wer": 0.2630208333333333,
476
+ "step": 5278
477
+ },
478
+ {
479
+ "epoch": 14.0,
480
+ "eval_bigbrother_loss": 1.2824336290359497,
481
+ "eval_bigbrother_runtime": 41.3751,
482
+ "eval_bigbrother_samples_per_second": 33.112,
483
+ "eval_bigbrother_steps_per_second": 1.039,
484
+ "eval_bigbrother_wer": 0.4433255457987594,
485
+ "step": 5278
486
+ },
487
+ {
488
+ "epoch": 15.0,
489
+ "grad_norm": 8.300821304321289,
490
+ "learning_rate": 6.942562592047128e-05,
491
+ "loss": 1.2819,
492
+ "step": 5655
493
+ },
494
+ {
495
+ "epoch": 15.0,
496
+ "eval_rundkast_loss": 0.47504347562789917,
497
+ "eval_rundkast_runtime": 33.2023,
498
+ "eval_rundkast_samples_per_second": 40.509,
499
+ "eval_rundkast_steps_per_second": 1.295,
500
+ "eval_rundkast_wer": 0.24737511931275852,
501
+ "step": 5655
502
+ },
503
+ {
504
+ "epoch": 15.0,
505
+ "eval_nb_samtale_loss": 0.6060231924057007,
506
+ "eval_nb_samtale_runtime": 38.0225,
507
+ "eval_nb_samtale_samples_per_second": 14.018,
508
+ "eval_nb_samtale_steps_per_second": 0.447,
509
+ "eval_nb_samtale_wer": 0.263427734375,
510
+ "step": 5655
511
+ },
512
+ {
513
+ "epoch": 15.0,
514
+ "eval_bigbrother_loss": 1.193629503250122,
515
+ "eval_bigbrother_runtime": 41.725,
516
+ "eval_bigbrother_samples_per_second": 32.834,
517
+ "eval_bigbrother_steps_per_second": 1.031,
518
+ "eval_bigbrother_wer": 0.4493675984854588,
519
+ "step": 5655
520
+ },
521
+ {
522
+ "epoch": 16.0,
523
+ "grad_norm": 6.88249397277832,
524
+ "learning_rate": 6.665684830633285e-05,
525
+ "loss": 1.2504,
526
+ "step": 6032
527
+ },
528
+ {
529
+ "epoch": 16.0,
530
+ "eval_rundkast_loss": 0.5179128646850586,
531
+ "eval_rundkast_runtime": 33.3288,
532
+ "eval_rundkast_samples_per_second": 40.356,
533
+ "eval_rundkast_steps_per_second": 1.29,
534
+ "eval_rundkast_wer": 0.24856824689786827,
535
+ "step": 6032
536
+ },
537
+ {
538
+ "epoch": 16.0,
539
+ "eval_nb_samtale_loss": 0.653479814529419,
540
+ "eval_nb_samtale_runtime": 37.2804,
541
+ "eval_nb_samtale_samples_per_second": 14.297,
542
+ "eval_nb_samtale_steps_per_second": 0.456,
543
+ "eval_nb_samtale_wer": 0.2644856770833333,
544
+ "step": 6032
545
+ },
546
+ {
547
+ "epoch": 16.0,
548
+ "eval_bigbrother_loss": 1.2211058139801025,
549
+ "eval_bigbrother_runtime": 42.0179,
550
+ "eval_bigbrother_samples_per_second": 32.605,
551
+ "eval_bigbrother_steps_per_second": 1.023,
552
+ "eval_bigbrother_wer": 0.43792797873197453,
553
+ "step": 6032
554
+ },
555
+ {
556
+ "epoch": 17.0,
557
+ "grad_norm": 3.6763479709625244,
558
+ "learning_rate": 6.388070692194403e-05,
559
+ "loss": 1.2295,
560
+ "step": 6409
561
+ },
562
+ {
563
+ "epoch": 17.0,
564
+ "eval_rundkast_loss": 0.4937501847743988,
565
+ "eval_rundkast_runtime": 37.7386,
566
+ "eval_rundkast_samples_per_second": 35.64,
567
+ "eval_rundkast_steps_per_second": 1.139,
568
+ "eval_rundkast_wer": 0.2561247216035635,
569
+ "step": 6409
570
+ },
571
+ {
572
+ "epoch": 17.0,
573
+ "eval_nb_samtale_loss": 0.6216253638267517,
574
+ "eval_nb_samtale_runtime": 38.691,
575
+ "eval_nb_samtale_samples_per_second": 13.776,
576
+ "eval_nb_samtale_steps_per_second": 0.439,
577
+ "eval_nb_samtale_wer": 0.2705078125,
578
+ "step": 6409
579
+ },
580
+ {
581
+ "epoch": 17.0,
582
+ "eval_bigbrother_loss": 1.188571572303772,
583
+ "eval_bigbrother_runtime": 41.7402,
584
+ "eval_bigbrother_samples_per_second": 32.822,
585
+ "eval_bigbrother_steps_per_second": 1.03,
586
+ "eval_bigbrother_wer": 0.4334165793925723,
587
+ "step": 6409
588
+ },
589
+ {
590
+ "epoch": 18.0,
591
+ "grad_norm": 5.536041259765625,
592
+ "learning_rate": 6.110456553755524e-05,
593
+ "loss": 1.2053,
594
+ "step": 6786
595
+ },
596
+ {
597
+ "epoch": 18.0,
598
+ "eval_rundkast_loss": 0.4938836097717285,
599
+ "eval_rundkast_runtime": 33.2744,
600
+ "eval_rundkast_samples_per_second": 40.421,
601
+ "eval_rundkast_steps_per_second": 1.292,
602
+ "eval_rundkast_wer": 0.24856824689786827,
603
+ "step": 6786
604
+ },
605
+ {
606
+ "epoch": 18.0,
607
+ "eval_nb_samtale_loss": 0.6238839030265808,
608
+ "eval_nb_samtale_runtime": 37.8912,
609
+ "eval_nb_samtale_samples_per_second": 14.067,
610
+ "eval_nb_samtale_steps_per_second": 0.449,
611
+ "eval_nb_samtale_wer": 0.263916015625,
612
+ "step": 6786
613
+ },
614
+ {
615
+ "epoch": 18.0,
616
+ "eval_bigbrother_loss": 1.1581498384475708,
617
+ "eval_bigbrother_runtime": 41.4842,
618
+ "eval_bigbrother_samples_per_second": 33.025,
619
+ "eval_bigbrother_steps_per_second": 1.037,
620
+ "eval_bigbrother_wer": 0.4314831225328285,
621
+ "step": 6786
622
+ },
623
+ {
624
+ "epoch": 19.0,
625
+ "grad_norm": 5.267818927764893,
626
+ "learning_rate": 5.832842415316643e-05,
627
+ "loss": 1.1815,
628
+ "step": 7163
629
+ },
630
+ {
631
+ "epoch": 19.0,
632
+ "eval_rundkast_loss": 0.4980849623680115,
633
+ "eval_rundkast_runtime": 33.6894,
634
+ "eval_rundkast_samples_per_second": 39.923,
635
+ "eval_rundkast_steps_per_second": 1.276,
636
+ "eval_rundkast_wer": 0.24451161310849506,
637
+ "step": 7163
638
+ },
639
+ {
640
+ "epoch": 19.0,
641
+ "eval_nb_samtale_loss": 0.6296201348304749,
642
+ "eval_nb_samtale_runtime": 37.5618,
643
+ "eval_nb_samtale_samples_per_second": 14.19,
644
+ "eval_nb_samtale_steps_per_second": 0.453,
645
+ "eval_nb_samtale_wer": 0.2608235677083333,
646
+ "step": 7163
647
+ },
648
+ {
649
+ "epoch": 19.0,
650
+ "eval_bigbrother_loss": 1.1892309188842773,
651
+ "eval_bigbrother_runtime": 41.4844,
652
+ "eval_bigbrother_samples_per_second": 33.024,
653
+ "eval_bigbrother_steps_per_second": 1.037,
654
+ "eval_bigbrother_wer": 0.4253605091436397,
655
+ "step": 7163
656
+ },
657
+ {
658
+ "epoch": 20.0,
659
+ "grad_norm": 41.6290283203125,
660
+ "learning_rate": 5.5559646539027985e-05,
661
+ "loss": 1.1703,
662
+ "step": 7540
663
+ },
664
+ {
665
+ "epoch": 20.0,
666
+ "eval_rundkast_loss": 0.5075950026512146,
667
+ "eval_rundkast_runtime": 33.2501,
668
+ "eval_rundkast_samples_per_second": 40.451,
669
+ "eval_rundkast_steps_per_second": 1.293,
670
+ "eval_rundkast_wer": 0.2478523703468024,
671
+ "step": 7540
672
+ },
673
+ {
674
+ "epoch": 20.0,
675
+ "eval_nb_samtale_loss": 0.6342408061027527,
676
+ "eval_nb_samtale_runtime": 37.4134,
677
+ "eval_nb_samtale_samples_per_second": 14.246,
678
+ "eval_nb_samtale_steps_per_second": 0.454,
679
+ "eval_nb_samtale_wer": 0.2586263020833333,
680
+ "step": 7540
681
+ },
682
+ {
683
+ "epoch": 20.0,
684
+ "eval_bigbrother_loss": 1.206842303276062,
685
+ "eval_bigbrother_runtime": 41.5395,
686
+ "eval_bigbrother_samples_per_second": 32.981,
687
+ "eval_bigbrother_steps_per_second": 1.035,
688
+ "eval_bigbrother_wer": 0.42519938773866106,
689
+ "step": 7540
690
+ },
691
+ {
692
+ "epoch": 21.0,
693
+ "grad_norm": 6.965384483337402,
694
+ "learning_rate": 5.278350515463918e-05,
695
+ "loss": 1.1446,
696
+ "step": 7917
697
+ },
698
+ {
699
+ "epoch": 21.0,
700
+ "eval_rundkast_loss": 0.5136203169822693,
701
+ "eval_rundkast_runtime": 33.5002,
702
+ "eval_rundkast_samples_per_second": 40.149,
703
+ "eval_rundkast_steps_per_second": 1.284,
704
+ "eval_rundkast_wer": 0.24793191218580973,
705
+ "step": 7917
706
+ },
707
+ {
708
+ "epoch": 21.0,
709
+ "eval_nb_samtale_loss": 0.6333425641059875,
710
+ "eval_nb_samtale_runtime": 37.5282,
711
+ "eval_nb_samtale_samples_per_second": 14.203,
712
+ "eval_nb_samtale_steps_per_second": 0.453,
713
+ "eval_nb_samtale_wer": 0.265625,
714
+ "step": 7917
715
+ },
716
+ {
717
+ "epoch": 21.0,
718
+ "eval_bigbrother_loss": 1.186505913734436,
719
+ "eval_bigbrother_runtime": 41.5636,
720
+ "eval_bigbrother_samples_per_second": 32.962,
721
+ "eval_bigbrother_steps_per_second": 1.035,
722
+ "eval_bigbrother_wer": 0.42487714492870376,
723
+ "step": 7917
724
+ },
725
+ {
726
+ "epoch": 22.0,
727
+ "grad_norm": 22.68753433227539,
728
+ "learning_rate": 5.000736377025037e-05,
729
+ "loss": 1.1384,
730
+ "step": 8294
731
+ },
732
+ {
733
+ "epoch": 22.0,
734
+ "eval_rundkast_loss": 0.5014224052429199,
735
+ "eval_rundkast_runtime": 33.6382,
736
+ "eval_rundkast_samples_per_second": 39.984,
737
+ "eval_rundkast_steps_per_second": 1.278,
738
+ "eval_rundkast_wer": 0.24880687241489025,
739
+ "step": 8294
740
+ },
741
+ {
742
+ "epoch": 22.0,
743
+ "eval_nb_samtale_loss": 0.6214331984519958,
744
+ "eval_nb_samtale_runtime": 37.41,
745
+ "eval_nb_samtale_samples_per_second": 14.248,
746
+ "eval_nb_samtale_steps_per_second": 0.454,
747
+ "eval_nb_samtale_wer": 0.2609049479166667,
748
+ "step": 8294
749
+ },
750
+ {
751
+ "epoch": 22.0,
752
+ "eval_bigbrother_loss": 1.1910523176193237,
753
+ "eval_bigbrother_runtime": 41.6527,
754
+ "eval_bigbrother_samples_per_second": 32.891,
755
+ "eval_bigbrother_steps_per_second": 1.032,
756
+ "eval_bigbrother_wer": 0.42189639893659875,
757
+ "step": 8294
758
+ },
759
+ {
760
+ "epoch": 23.0,
761
+ "grad_norm": 2.731438398361206,
762
+ "learning_rate": 4.723122238586156e-05,
763
+ "loss": 1.1324,
764
+ "step": 8671
765
+ },
766
+ {
767
+ "epoch": 23.0,
768
+ "eval_rundkast_loss": 0.476345032453537,
769
+ "eval_rundkast_runtime": 33.4012,
770
+ "eval_rundkast_samples_per_second": 40.268,
771
+ "eval_rundkast_steps_per_second": 1.287,
772
+ "eval_rundkast_wer": 0.24968183264397073,
773
+ "step": 8671
774
+ },
775
+ {
776
+ "epoch": 23.0,
777
+ "eval_nb_samtale_loss": 0.6042336821556091,
778
+ "eval_nb_samtale_runtime": 37.9587,
779
+ "eval_nb_samtale_samples_per_second": 14.042,
780
+ "eval_nb_samtale_steps_per_second": 0.448,
781
+ "eval_nb_samtale_wer": 0.2681477864583333,
782
+ "step": 8671
783
+ },
784
+ {
785
+ "epoch": 23.0,
786
+ "eval_bigbrother_loss": 1.1591678857803345,
787
+ "eval_bigbrother_runtime": 41.5785,
788
+ "eval_bigbrother_samples_per_second": 32.95,
789
+ "eval_bigbrother_steps_per_second": 1.034,
790
+ "eval_bigbrother_wer": 0.4180294852171111,
791
+ "step": 8671
792
+ },
793
+ {
794
+ "epoch": 24.0,
795
+ "grad_norm": 4.182296276092529,
796
+ "learning_rate": 4.4455081001472755e-05,
797
+ "loss": 1.0927,
798
+ "step": 9048
799
+ },
800
+ {
801
+ "epoch": 24.0,
802
+ "eval_rundkast_loss": 0.49040549993515015,
803
+ "eval_rundkast_runtime": 33.4922,
804
+ "eval_rundkast_samples_per_second": 40.159,
805
+ "eval_rundkast_steps_per_second": 1.284,
806
+ "eval_rundkast_wer": 0.2406936048361438,
807
+ "step": 9048
808
+ },
809
+ {
810
+ "epoch": 24.0,
811
+ "eval_nb_samtale_loss": 0.6173272132873535,
812
+ "eval_nb_samtale_runtime": 37.5762,
813
+ "eval_nb_samtale_samples_per_second": 14.184,
814
+ "eval_nb_samtale_steps_per_second": 0.452,
815
+ "eval_nb_samtale_wer": 0.26025390625,
816
+ "step": 9048
817
+ },
818
+ {
819
+ "epoch": 24.0,
820
+ "eval_bigbrother_loss": 1.1952488422393799,
821
+ "eval_bigbrother_runtime": 41.4629,
822
+ "eval_bigbrother_samples_per_second": 33.042,
823
+ "eval_bigbrother_steps_per_second": 1.037,
824
+ "eval_bigbrother_wer": 0.41786836381213244,
825
+ "step": 9048
826
+ },
827
+ {
828
+ "epoch": 25.0,
829
+ "grad_norm": 7.412995338439941,
830
+ "learning_rate": 4.167893961708395e-05,
831
+ "loss": 1.0897,
832
+ "step": 9425
833
+ },
834
+ {
835
+ "epoch": 25.0,
836
+ "eval_rundkast_loss": 0.5106588006019592,
837
+ "eval_rundkast_runtime": 33.3541,
838
+ "eval_rundkast_samples_per_second": 40.325,
839
+ "eval_rundkast_steps_per_second": 1.289,
840
+ "eval_rundkast_wer": 0.25206808781419027,
841
+ "step": 9425
842
+ },
843
+ {
844
+ "epoch": 25.0,
845
+ "eval_nb_samtale_loss": 0.6364408731460571,
846
+ "eval_nb_samtale_runtime": 37.3682,
847
+ "eval_nb_samtale_samples_per_second": 14.263,
848
+ "eval_nb_samtale_steps_per_second": 0.455,
849
+ "eval_nb_samtale_wer": 0.2630208333333333,
850
+ "step": 9425
851
+ },
852
+ {
853
+ "epoch": 25.0,
854
+ "eval_bigbrother_loss": 1.2089372873306274,
855
+ "eval_bigbrother_runtime": 41.0339,
856
+ "eval_bigbrother_samples_per_second": 33.387,
857
+ "eval_bigbrother_steps_per_second": 1.048,
858
+ "eval_bigbrother_wer": 0.4167405139772819,
859
+ "step": 9425
860
+ },
861
+ {
862
+ "epoch": 26.0,
863
+ "grad_norm": 20.474260330200195,
864
+ "learning_rate": 3.890279823269514e-05,
865
+ "loss": 1.0818,
866
+ "step": 9802
867
+ },
868
+ {
869
+ "epoch": 26.0,
870
+ "eval_rundkast_loss": 0.49753764271736145,
871
+ "eval_rundkast_runtime": 33.4596,
872
+ "eval_rundkast_samples_per_second": 40.198,
873
+ "eval_rundkast_steps_per_second": 1.285,
874
+ "eval_rundkast_wer": 0.24562519885459752,
875
+ "step": 9802
876
+ },
877
+ {
878
+ "epoch": 26.0,
879
+ "eval_nb_samtale_loss": 0.6342372298240662,
880
+ "eval_nb_samtale_runtime": 37.5602,
881
+ "eval_nb_samtale_samples_per_second": 14.191,
882
+ "eval_nb_samtale_steps_per_second": 0.453,
883
+ "eval_nb_samtale_wer": 0.260009765625,
884
+ "step": 9802
885
+ },
886
+ {
887
+ "epoch": 26.0,
888
+ "eval_bigbrother_loss": 1.175524115562439,
889
+ "eval_bigbrother_runtime": 41.4743,
890
+ "eval_bigbrother_samples_per_second": 33.033,
891
+ "eval_bigbrother_steps_per_second": 1.037,
892
+ "eval_bigbrother_wer": 0.4159349069523886,
893
+ "step": 9802
894
+ },
895
+ {
896
+ "epoch": 27.0,
897
+ "grad_norm": 3.3909595012664795,
898
+ "learning_rate": 3.6126656848306336e-05,
899
+ "loss": 1.0629,
900
+ "step": 10179
901
+ },
902
+ {
903
+ "epoch": 27.0,
904
+ "eval_rundkast_loss": 0.5271292924880981,
905
+ "eval_rundkast_runtime": 33.4554,
906
+ "eval_rundkast_samples_per_second": 40.203,
907
+ "eval_rundkast_steps_per_second": 1.285,
908
+ "eval_rundkast_wer": 0.24347756920139993,
909
+ "step": 10179
910
+ },
911
+ {
912
+ "epoch": 27.0,
913
+ "eval_nb_samtale_loss": 0.6458906531333923,
914
+ "eval_nb_samtale_runtime": 37.3547,
915
+ "eval_nb_samtale_samples_per_second": 14.269,
916
+ "eval_nb_samtale_steps_per_second": 0.455,
917
+ "eval_nb_samtale_wer": 0.2598470052083333,
918
+ "step": 10179
919
+ },
920
+ {
921
+ "epoch": 27.0,
922
+ "eval_bigbrother_loss": 1.1968939304351807,
923
+ "eval_bigbrother_runtime": 41.3751,
924
+ "eval_bigbrother_samples_per_second": 33.112,
925
+ "eval_bigbrother_steps_per_second": 1.039,
926
+ "eval_bigbrother_wer": 0.4142431322001128,
927
+ "step": 10179
928
+ },
929
+ {
930
+ "epoch": 28.0,
931
+ "grad_norm": 19.91695213317871,
932
+ "learning_rate": 3.335787923416789e-05,
933
+ "loss": 1.069,
934
+ "step": 10556
935
+ },
936
+ {
937
+ "epoch": 28.0,
938
+ "eval_rundkast_loss": 0.5136268138885498,
939
+ "eval_rundkast_runtime": 33.4249,
940
+ "eval_rundkast_samples_per_second": 40.24,
941
+ "eval_rundkast_steps_per_second": 1.286,
942
+ "eval_rundkast_wer": 0.2433980273623926,
943
+ "step": 10556
944
+ },
945
+ {
946
+ "epoch": 28.0,
947
+ "eval_nb_samtale_loss": 0.6378623247146606,
948
+ "eval_nb_samtale_runtime": 37.4155,
949
+ "eval_nb_samtale_samples_per_second": 14.245,
950
+ "eval_nb_samtale_steps_per_second": 0.454,
951
+ "eval_nb_samtale_wer": 0.25927734375,
952
+ "step": 10556
953
+ },
954
+ {
955
+ "epoch": 28.0,
956
+ "eval_bigbrother_loss": 1.1700557470321655,
957
+ "eval_bigbrother_runtime": 41.488,
958
+ "eval_bigbrother_samples_per_second": 33.022,
959
+ "eval_bigbrother_steps_per_second": 1.036,
960
+ "eval_bigbrother_wer": 0.41102070410053976,
961
+ "step": 10556
962
+ },
963
+ {
964
+ "epoch": 29.0,
965
+ "grad_norm": 4.6988959312438965,
966
+ "learning_rate": 3.058173784977909e-05,
967
+ "loss": 1.0561,
968
+ "step": 10933
969
+ },
970
+ {
971
+ "epoch": 29.0,
972
+ "eval_rundkast_loss": 0.5006275773048401,
973
+ "eval_rundkast_runtime": 33.4079,
974
+ "eval_rundkast_samples_per_second": 40.26,
975
+ "eval_rundkast_steps_per_second": 1.287,
976
+ "eval_rundkast_wer": 0.24387527839643652,
977
+ "step": 10933
978
+ },
979
+ {
980
+ "epoch": 29.0,
981
+ "eval_nb_samtale_loss": 0.6258318424224854,
982
+ "eval_nb_samtale_runtime": 37.2465,
983
+ "eval_nb_samtale_samples_per_second": 14.31,
984
+ "eval_nb_samtale_steps_per_second": 0.456,
985
+ "eval_nb_samtale_wer": 0.258056640625,
986
+ "step": 10933
987
+ },
988
+ {
989
+ "epoch": 29.0,
990
+ "eval_bigbrother_loss": 1.1609516143798828,
991
+ "eval_bigbrother_runtime": 41.9041,
992
+ "eval_bigbrother_samples_per_second": 32.694,
993
+ "eval_bigbrother_steps_per_second": 1.026,
994
+ "eval_bigbrother_wer": 0.4105373398856038,
995
+ "step": 10933
996
+ },
997
+ {
998
+ "epoch": 30.0,
999
+ "grad_norm": 6.915459156036377,
1000
+ "learning_rate": 2.7805596465390282e-05,
1001
+ "loss": 1.046,
1002
+ "step": 11310
1003
+ },
1004
+ {
1005
+ "epoch": 30.0,
1006
+ "eval_rundkast_loss": 0.5193740725517273,
1007
+ "eval_rundkast_runtime": 33.0243,
1008
+ "eval_rundkast_samples_per_second": 40.728,
1009
+ "eval_rundkast_steps_per_second": 1.302,
1010
+ "eval_rundkast_wer": 0.24530703149856825,
1011
+ "step": 11310
1012
+ },
1013
+ {
1014
+ "epoch": 30.0,
1015
+ "eval_nb_samtale_loss": 0.6532759666442871,
1016
+ "eval_nb_samtale_runtime": 37.2211,
1017
+ "eval_nb_samtale_samples_per_second": 14.32,
1018
+ "eval_nb_samtale_steps_per_second": 0.457,
1019
+ "eval_nb_samtale_wer": 0.260986328125,
1020
+ "step": 11310
1021
+ },
1022
+ {
1023
+ "epoch": 30.0,
1024
+ "eval_bigbrother_loss": 1.2057961225509644,
1025
+ "eval_bigbrother_runtime": 41.7437,
1026
+ "eval_bigbrother_samples_per_second": 32.819,
1027
+ "eval_bigbrother_steps_per_second": 1.03,
1028
+ "eval_bigbrother_wer": 0.4085233223233707,
1029
+ "step": 11310
1030
+ },
1031
+ {
1032
+ "epoch": 31.0,
1033
+ "grad_norm": 6.216676712036133,
1034
+ "learning_rate": 2.5029455081001475e-05,
1035
+ "loss": 1.0244,
1036
+ "step": 11687
1037
+ },
1038
+ {
1039
+ "epoch": 31.0,
1040
+ "eval_rundkast_loss": 0.5112914443016052,
1041
+ "eval_rundkast_runtime": 33.3775,
1042
+ "eval_rundkast_samples_per_second": 40.297,
1043
+ "eval_rundkast_steps_per_second": 1.288,
1044
+ "eval_rundkast_wer": 0.24347756920139993,
1045
+ "step": 11687
1046
+ },
1047
+ {
1048
+ "epoch": 31.0,
1049
+ "eval_nb_samtale_loss": 0.637014627456665,
1050
+ "eval_nb_samtale_runtime": 37.0028,
1051
+ "eval_nb_samtale_samples_per_second": 14.404,
1052
+ "eval_nb_samtale_steps_per_second": 0.459,
1053
+ "eval_nb_samtale_wer": 0.2584635416666667,
1054
+ "step": 11687
1055
+ },
1056
+ {
1057
+ "epoch": 31.0,
1058
+ "eval_bigbrother_loss": 1.196700096130371,
1059
+ "eval_bigbrother_runtime": 41.5693,
1060
+ "eval_bigbrother_samples_per_second": 32.957,
1061
+ "eval_bigbrother_steps_per_second": 1.034,
1062
+ "eval_bigbrother_wer": 0.40699266897607345,
1063
+ "step": 11687
1064
+ },
1065
+ {
1066
+ "epoch": 32.0,
1067
+ "grad_norm": 5.768988609313965,
1068
+ "learning_rate": 2.2253313696612664e-05,
1069
+ "loss": 1.0211,
1070
+ "step": 12064
1071
+ },
1072
+ {
1073
+ "epoch": 32.0,
1074
+ "eval_rundkast_loss": 0.5140534043312073,
1075
+ "eval_rundkast_runtime": 33.4214,
1076
+ "eval_rundkast_samples_per_second": 40.244,
1077
+ "eval_rundkast_steps_per_second": 1.287,
1078
+ "eval_rundkast_wer": 0.24307986000636334,
1079
+ "step": 12064
1080
+ },
1081
+ {
1082
+ "epoch": 32.0,
1083
+ "eval_nb_samtale_loss": 0.6492373943328857,
1084
+ "eval_nb_samtale_runtime": 37.5056,
1085
+ "eval_nb_samtale_samples_per_second": 14.211,
1086
+ "eval_nb_samtale_steps_per_second": 0.453,
1087
+ "eval_nb_samtale_wer": 0.2571614583333333,
1088
+ "step": 12064
1089
+ },
1090
+ {
1091
+ "epoch": 32.0,
1092
+ "eval_bigbrother_loss": 1.1793997287750244,
1093
+ "eval_bigbrother_runtime": 41.1907,
1094
+ "eval_bigbrother_samples_per_second": 33.26,
1095
+ "eval_bigbrother_steps_per_second": 1.044,
1096
+ "eval_bigbrother_wer": 0.40812051881092404,
1097
+ "step": 12064
1098
+ },
1099
+ {
1100
+ "epoch": 33.0,
1101
+ "grad_norm": 8.210163116455078,
1102
+ "learning_rate": 1.947717231222386e-05,
1103
+ "loss": 1.0221,
1104
+ "step": 12441
1105
+ },
1106
+ {
1107
+ "epoch": 33.0,
1108
+ "eval_rundkast_loss": 0.5195760726928711,
1109
+ "eval_rundkast_runtime": 33.4259,
1110
+ "eval_rundkast_samples_per_second": 40.238,
1111
+ "eval_rundkast_steps_per_second": 1.286,
1112
+ "eval_rundkast_wer": 0.24188673242125358,
1113
+ "step": 12441
1114
+ },
1115
+ {
1116
+ "epoch": 33.0,
1117
+ "eval_nb_samtale_loss": 0.6468714475631714,
1118
+ "eval_nb_samtale_runtime": 37.1982,
1119
+ "eval_nb_samtale_samples_per_second": 14.329,
1120
+ "eval_nb_samtale_steps_per_second": 0.457,
1121
+ "eval_nb_samtale_wer": 0.257568359375,
1122
+ "step": 12441
1123
+ },
1124
+ {
1125
+ "epoch": 33.0,
1126
+ "eval_bigbrother_loss": 1.1880689859390259,
1127
+ "eval_bigbrother_runtime": 41.3876,
1128
+ "eval_bigbrother_samples_per_second": 33.102,
1129
+ "eval_bigbrother_steps_per_second": 1.039,
1130
+ "eval_bigbrother_wer": 0.40747603319100945,
1131
+ "step": 12441
1132
+ },
1133
+ {
1134
+ "epoch": 34.0,
1135
+ "grad_norm": 4.438199043273926,
1136
+ "learning_rate": 1.670839469808542e-05,
1137
+ "loss": 1.0095,
1138
+ "step": 12818
1139
+ },
1140
+ {
1141
+ "epoch": 34.0,
1142
+ "eval_rundkast_loss": 0.5272213816642761,
1143
+ "eval_rundkast_runtime": 33.3742,
1144
+ "eval_rundkast_samples_per_second": 40.301,
1145
+ "eval_rundkast_steps_per_second": 1.288,
1146
+ "eval_rundkast_wer": 0.24212535793827553,
1147
+ "step": 12818
1148
+ },
1149
+ {
1150
+ "epoch": 34.0,
1151
+ "eval_nb_samtale_loss": 0.6708551645278931,
1152
+ "eval_nb_samtale_runtime": 37.2593,
1153
+ "eval_nb_samtale_samples_per_second": 14.305,
1154
+ "eval_nb_samtale_steps_per_second": 0.456,
1155
+ "eval_nb_samtale_wer": 0.2578125,
1156
+ "step": 12818
1157
+ },
1158
+ {
1159
+ "epoch": 34.0,
1160
+ "eval_bigbrother_loss": 1.2168066501617432,
1161
+ "eval_bigbrother_runtime": 41.3249,
1162
+ "eval_bigbrother_samples_per_second": 33.152,
1163
+ "eval_bigbrother_steps_per_second": 1.041,
1164
+ "eval_bigbrother_wer": 0.4064287440586482,
1165
+ "step": 12818
1166
+ },
1167
+ {
1168
+ "epoch": 35.0,
1169
+ "grad_norm": 18.065574645996094,
1170
+ "learning_rate": 1.3932253313696614e-05,
1171
+ "loss": 1.0011,
1172
+ "step": 13195
1173
+ },
1174
+ {
1175
+ "epoch": 35.0,
1176
+ "eval_rundkast_loss": 0.5081976652145386,
1177
+ "eval_rundkast_runtime": 33.3601,
1178
+ "eval_rundkast_samples_per_second": 40.318,
1179
+ "eval_rundkast_steps_per_second": 1.289,
1180
+ "eval_rundkast_wer": 0.2410117721921731,
1181
+ "step": 13195
1182
+ },
1183
+ {
1184
+ "epoch": 35.0,
1185
+ "eval_nb_samtale_loss": 0.6458988189697266,
1186
+ "eval_nb_samtale_runtime": 37.6087,
1187
+ "eval_nb_samtale_samples_per_second": 14.172,
1188
+ "eval_nb_samtale_steps_per_second": 0.452,
1189
+ "eval_nb_samtale_wer": 0.257080078125,
1190
+ "step": 13195
1191
+ },
1192
+ {
1193
+ "epoch": 35.0,
1194
+ "eval_bigbrother_loss": 1.1833205223083496,
1195
+ "eval_bigbrother_runtime": 41.2758,
1196
+ "eval_bigbrother_samples_per_second": 33.191,
1197
+ "eval_bigbrother_steps_per_second": 1.042,
1198
+ "eval_bigbrother_wer": 0.40667042616611615,
1199
+ "step": 13195
1200
+ },
1201
+ {
1202
+ "epoch": 36.0,
1203
+ "grad_norm": 14.747867584228516,
1204
+ "learning_rate": 1.1156111929307807e-05,
1205
+ "loss": 0.9849,
1206
+ "step": 13572
1207
+ },
1208
+ {
1209
+ "epoch": 36.0,
1210
+ "eval_rundkast_loss": 0.5170104503631592,
1211
+ "eval_rundkast_runtime": 33.5036,
1212
+ "eval_rundkast_samples_per_second": 40.145,
1213
+ "eval_rundkast_steps_per_second": 1.283,
1214
+ "eval_rundkast_wer": 0.24140948138720966,
1215
+ "step": 13572
1216
+ },
1217
+ {
1218
+ "epoch": 36.0,
1219
+ "eval_nb_samtale_loss": 0.6568956971168518,
1220
+ "eval_nb_samtale_runtime": 37.0353,
1221
+ "eval_nb_samtale_samples_per_second": 14.392,
1222
+ "eval_nb_samtale_steps_per_second": 0.459,
1223
+ "eval_nb_samtale_wer": 0.2569173177083333,
1224
+ "step": 13572
1225
+ },
1226
+ {
1227
+ "epoch": 36.0,
1228
+ "eval_bigbrother_loss": 1.2056487798690796,
1229
+ "eval_bigbrother_runtime": 41.6456,
1230
+ "eval_bigbrother_samples_per_second": 32.897,
1231
+ "eval_bigbrother_steps_per_second": 1.033,
1232
+ "eval_bigbrother_wer": 0.404656408603883,
1233
+ "step": 13572
1234
+ },
1235
+ {
1236
+ "epoch": 37.0,
1237
+ "grad_norm": 4.995520114898682,
1238
+ "learning_rate": 8.379970544918998e-06,
1239
+ "loss": 0.9785,
1240
+ "step": 13949
1241
+ },
1242
+ {
1243
+ "epoch": 37.0,
1244
+ "eval_rundkast_loss": 0.5311599969863892,
1245
+ "eval_rundkast_runtime": 33.4582,
1246
+ "eval_rundkast_samples_per_second": 40.199,
1247
+ "eval_rundkast_steps_per_second": 1.285,
1248
+ "eval_rundkast_wer": 0.2419662742602609,
1249
+ "step": 13949
1250
+ },
1251
+ {
1252
+ "epoch": 37.0,
1253
+ "eval_nb_samtale_loss": 0.6731538772583008,
1254
+ "eval_nb_samtale_runtime": 37.2532,
1255
+ "eval_nb_samtale_samples_per_second": 14.307,
1256
+ "eval_nb_samtale_steps_per_second": 0.456,
1257
+ "eval_nb_samtale_wer": 0.2586263020833333,
1258
+ "step": 13949
1259
+ },
1260
+ {
1261
+ "epoch": 37.0,
1262
+ "eval_bigbrother_loss": 1.2453778982162476,
1263
+ "eval_bigbrother_runtime": 41.2503,
1264
+ "eval_bigbrother_samples_per_second": 33.212,
1265
+ "eval_bigbrother_steps_per_second": 1.042,
1266
+ "eval_bigbrother_wer": 0.40667042616611615,
1267
+ "step": 13949
1268
+ },
1269
+ {
1270
+ "epoch": 38.0,
1271
+ "grad_norm": 4.370805740356445,
1272
+ "learning_rate": 5.603829160530192e-06,
1273
+ "loss": 0.9853,
1274
+ "step": 14326
1275
+ },
1276
+ {
1277
+ "epoch": 38.0,
1278
+ "eval_rundkast_loss": 0.514400064945221,
1279
+ "eval_rundkast_runtime": 33.9777,
1280
+ "eval_rundkast_samples_per_second": 39.585,
1281
+ "eval_rundkast_steps_per_second": 1.266,
1282
+ "eval_rundkast_wer": 0.2415685650652243,
1283
+ "step": 14326
1284
+ },
1285
+ {
1286
+ "epoch": 38.0,
1287
+ "eval_nb_samtale_loss": 0.6526333689689636,
1288
+ "eval_nb_samtale_runtime": 37.0789,
1289
+ "eval_nb_samtale_samples_per_second": 14.375,
1290
+ "eval_nb_samtale_steps_per_second": 0.458,
1291
+ "eval_nb_samtale_wer": 0.2571614583333333,
1292
+ "step": 14326
1293
+ },
1294
+ {
1295
+ "epoch": 38.0,
1296
+ "eval_bigbrother_loss": 1.2180824279785156,
1297
+ "eval_bigbrother_runtime": 41.4647,
1298
+ "eval_bigbrother_samples_per_second": 33.04,
1299
+ "eval_bigbrother_steps_per_second": 1.037,
1300
+ "eval_bigbrother_wer": 0.4073954724885201,
1301
+ "step": 14326
1302
+ },
1303
+ {
1304
+ "epoch": 39.0,
1305
+ "grad_norm": 7.822154998779297,
1306
+ "learning_rate": 2.8350515463917527e-06,
1307
+ "loss": 0.9714,
1308
+ "step": 14703
1309
+ },
1310
+ {
1311
+ "epoch": 39.0,
1312
+ "eval_rundkast_loss": 0.5187526345252991,
1313
+ "eval_rundkast_runtime": 33.0413,
1314
+ "eval_rundkast_samples_per_second": 40.707,
1315
+ "eval_rundkast_steps_per_second": 1.301,
1316
+ "eval_rundkast_wer": 0.24268215081132677,
1317
+ "step": 14703
1318
+ },
1319
+ {
1320
+ "epoch": 39.0,
1321
+ "eval_nb_samtale_loss": 0.6581916809082031,
1322
+ "eval_nb_samtale_runtime": 37.1203,
1323
+ "eval_nb_samtale_samples_per_second": 14.359,
1324
+ "eval_nb_samtale_steps_per_second": 0.458,
1325
+ "eval_nb_samtale_wer": 0.257568359375,
1326
+ "step": 14703
1327
+ },
1328
+ {
1329
+ "epoch": 39.0,
1330
+ "eval_bigbrother_loss": 1.2226907014846802,
1331
+ "eval_bigbrother_runtime": 41.2361,
1332
+ "eval_bigbrother_samples_per_second": 33.223,
1333
+ "eval_bigbrother_steps_per_second": 1.043,
1334
+ "eval_bigbrother_wer": 0.404898090711351,
1335
+ "step": 14703
1336
+ },
1337
+ {
1338
+ "epoch": 40.0,
1339
+ "grad_norm": 5.108997821807861,
1340
+ "learning_rate": 5.8910162002945506e-08,
1341
+ "loss": 0.9743,
1342
+ "step": 15080
1343
+ },
1344
+ {
1345
+ "epoch": 40.0,
1346
+ "eval_rundkast_loss": 0.5217476487159729,
1347
+ "eval_rundkast_runtime": 33.378,
1348
+ "eval_rundkast_samples_per_second": 40.296,
1349
+ "eval_rundkast_steps_per_second": 1.288,
1350
+ "eval_rundkast_wer": 0.2423639834552975,
1351
+ "step": 15080
1352
+ },
1353
+ {
1354
+ "epoch": 40.0,
1355
+ "eval_nb_samtale_loss": 0.6616787314414978,
1356
+ "eval_nb_samtale_runtime": 37.4121,
1357
+ "eval_nb_samtale_samples_per_second": 14.247,
1358
+ "eval_nb_samtale_steps_per_second": 0.454,
1359
+ "eval_nb_samtale_wer": 0.2579752604166667,
1360
+ "step": 15080
1361
+ },
1362
+ {
1363
+ "epoch": 40.0,
1364
+ "eval_bigbrother_loss": 1.2213834524154663,
1365
+ "eval_bigbrother_runtime": 41.9733,
1366
+ "eval_bigbrother_samples_per_second": 32.64,
1367
+ "eval_bigbrother_steps_per_second": 1.024,
1368
+ "eval_bigbrother_wer": 0.40538145492628697,
1369
+ "step": 15080
1370
+ }
1371
+ ],
1372
+ "logging_steps": 500,
1373
+ "max_steps": 15080,
1374
+ "num_input_tokens_seen": 0,
1375
+ "num_train_epochs": 40,
1376
+ "save_steps": 500,
1377
+ "total_flos": 8.278376708995564e+19,
1378
+ "train_batch_size": 48,
1379
+ "trial_name": null,
1380
+ "trial_params": null
1381
+ }
checkpoint-15080/training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4a7afd10c5b5a15d9300aae5934cd968ceb099ca1ed1f39f2f47a88cce68902c
3
+ size 4984
config.json ADDED
@@ -0,0 +1,116 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "NbAiLab/nb-wav2vec2-300m-bokmaal",
3
+ "activation_dropout": 0.055,
4
+ "adapter_attn_dim": null,
5
+ "adapter_kernel_size": 3,
6
+ "adapter_stride": 2,
7
+ "add_adapter": false,
8
+ "apply_spec_augment": true,
9
+ "architectures": [
10
+ "Wav2Vec2ForCTC"
11
+ ],
12
+ "attention_dropout": 0.094,
13
+ "bos_token_id": 1,
14
+ "classifier_proj_size": 256,
15
+ "codevector_dim": 768,
16
+ "contrastive_logits_temperature": 0.1,
17
+ "conv_bias": true,
18
+ "conv_dim": [
19
+ 512,
20
+ 512,
21
+ 512,
22
+ 512,
23
+ 512,
24
+ 512,
25
+ 512
26
+ ],
27
+ "conv_kernel": [
28
+ 10,
29
+ 3,
30
+ 3,
31
+ 3,
32
+ 3,
33
+ 2,
34
+ 2
35
+ ],
36
+ "conv_stride": [
37
+ 5,
38
+ 2,
39
+ 2,
40
+ 2,
41
+ 2,
42
+ 2,
43
+ 2
44
+ ],
45
+ "ctc_loss_reduction": "mean",
46
+ "ctc_zero_infinity": true,
47
+ "diversity_loss_weight": 0.1,
48
+ "do_stable_layer_norm": true,
49
+ "eos_token_id": 2,
50
+ "feat_extract_activation": "gelu",
51
+ "feat_extract_dropout": 0.0,
52
+ "feat_extract_norm": "layer",
53
+ "feat_proj_dropout": 0.04,
54
+ "feat_quantizer_dropout": 0.0,
55
+ "final_dropout": 0.0,
56
+ "hidden_act": "gelu",
57
+ "hidden_dropout": 0.047,
58
+ "hidden_size": 1024,
59
+ "initializer_range": 0.02,
60
+ "intermediate_size": 4096,
61
+ "layer_norm_eps": 1e-05,
62
+ "layerdrop": 0.041,
63
+ "mask_channel_length": 10,
64
+ "mask_channel_min_space": 1,
65
+ "mask_channel_other": 0.0,
66
+ "mask_channel_prob": 0.0,
67
+ "mask_channel_selection": "static",
68
+ "mask_feature_length": 64,
69
+ "mask_feature_min_masks": 0,
70
+ "mask_feature_prob": 0.25,
71
+ "mask_time_length": 10,
72
+ "mask_time_min_masks": 2,
73
+ "mask_time_min_space": 1,
74
+ "mask_time_other": 0.0,
75
+ "mask_time_prob": 0.082,
76
+ "mask_time_selection": "static",
77
+ "model_type": "wav2vec2",
78
+ "num_adapter_layers": 3,
79
+ "num_attention_heads": 16,
80
+ "num_codevector_groups": 2,
81
+ "num_codevectors_per_group": 320,
82
+ "num_conv_pos_embedding_groups": 16,
83
+ "num_conv_pos_embeddings": 128,
84
+ "num_feat_extract_layers": 7,
85
+ "num_hidden_layers": 24,
86
+ "num_negatives": 100,
87
+ "output_hidden_size": 1024,
88
+ "pad_token_id": 31,
89
+ "proj_codevector_dim": 768,
90
+ "tdnn_dilation": [
91
+ 1,
92
+ 2,
93
+ 3,
94
+ 1,
95
+ 1
96
+ ],
97
+ "tdnn_dim": [
98
+ 512,
99
+ 512,
100
+ 512,
101
+ 512,
102
+ 1500
103
+ ],
104
+ "tdnn_kernel": [
105
+ 5,
106
+ 3,
107
+ 3,
108
+ 1,
109
+ 1
110
+ ],
111
+ "torch_dtype": "float32",
112
+ "transformers_version": "4.38.1",
113
+ "use_weighted_layer_sum": false,
114
+ "vocab_size": 34,
115
+ "xvector_output_dim": 512
116
+ }
language_model/5gram.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7b41c24c63f2f0585bea83666369593f3b3e6d047f327a90f36ebca2c35ef0ff
3
+ size 4243671427
language_model/attrs.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"alpha": 0.5, "beta": 0.1, "unk_score_offset": -10.0, "score_boundary": true}
language_model/unigrams.txt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ac3e71ca49838ca355df6fdcb8d89344a5a9bf9e1a76587cdf5df1367c19b9a9
3
+ size 16759269
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fa5e89f40e056dff1b1d3f46fa4de904588460556c26f78f3f3db7fae7c5dc81
3
+ size 1261946880
preprocessor_config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "do_normalize": true,
3
+ "feature_extractor_type": "Wav2Vec2FeatureExtractor",
4
+ "feature_size": 1,
5
+ "padding_side": "right",
6
+ "padding_value": 0,
7
+ "processor_class": "Wav2Vec2ProcessorWithLM",
8
+ "return_attention_mask": true,
9
+ "sampling_rate": 16000
10
+ }
runs/May30_20-55-10_idun-04-08/events.out.tfevents.1717095672.idun-04-08.1435950.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3e49f19246523318cf60a0bd46f571c286c542e762b475b1555535083d8f0e25
3
+ size 59537
special_tokens_map.json ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": "<s>",
3
+ "eos_token": "</s>",
4
+ "pad_token": "[PAD]",
5
+ "unk_token": "[UNK]"
6
+ }
tokenizer_config.json ADDED
@@ -0,0 +1,47 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "added_tokens_decoder": {
3
+ "30": {
4
+ "content": "[UNK]",
5
+ "lstrip": true,
6
+ "normalized": false,
7
+ "rstrip": true,
8
+ "single_word": false,
9
+ "special": false
10
+ },
11
+ "31": {
12
+ "content": "[PAD]",
13
+ "lstrip": true,
14
+ "normalized": false,
15
+ "rstrip": true,
16
+ "single_word": false,
17
+ "special": false
18
+ },
19
+ "32": {
20
+ "content": "<s>",
21
+ "lstrip": false,
22
+ "normalized": false,
23
+ "rstrip": false,
24
+ "single_word": false,
25
+ "special": true
26
+ },
27
+ "33": {
28
+ "content": "</s>",
29
+ "lstrip": false,
30
+ "normalized": false,
31
+ "rstrip": false,
32
+ "single_word": false,
33
+ "special": true
34
+ }
35
+ },
36
+ "bos_token": "<s>",
37
+ "clean_up_tokenization_spaces": true,
38
+ "do_lower_case": false,
39
+ "eos_token": "</s>",
40
+ "model_max_length": 1000000000000000019884624838656,
41
+ "pad_token": "[PAD]",
42
+ "replace_word_delimiter_char": " ",
43
+ "target_lang": null,
44
+ "tokenizer_class": "Wav2Vec2CTCTokenizer",
45
+ "unk_token": "[UNK]",
46
+ "word_delimiter_token": "|"
47
+ }
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4a7afd10c5b5a15d9300aae5934cd968ceb099ca1ed1f39f2f47a88cce68902c
3
+ size 4984
vocab.json ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "[PAD]": 31,
3
+ "[UNK]": 30,
4
+ "a": 1,
5
+ "b": 2,
6
+ "c": 3,
7
+ "d": 4,
8
+ "e": 5,
9
+ "f": 6,
10
+ "g": 7,
11
+ "h": 8,
12
+ "i": 9,
13
+ "j": 10,
14
+ "k": 11,
15
+ "l": 12,
16
+ "m": 13,
17
+ "n": 14,
18
+ "o": 15,
19
+ "p": 16,
20
+ "q": 17,
21
+ "r": 18,
22
+ "s": 19,
23
+ "t": 20,
24
+ "u": 21,
25
+ "v": 22,
26
+ "w": 23,
27
+ "x": 24,
28
+ "y": 25,
29
+ "z": 26,
30
+ "|": 0,
31
+ "å": 27,
32
+ "æ": 28,
33
+ "ø": 29
34
+ }