Tflatval commited on
Commit
580f5b3
1 Parent(s): f15a9de

Upload folder using huggingface_hub

Browse files
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ language_model/unigrams.txt filter=lfs diff=lfs merge=lfs -text
added_tokens.json ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ {
2
+ "</s>": 33,
3
+ "<s>": 32
4
+ }
alphabet.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"labels": [" ", "a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k", "l", "m", "n", "o", "p", "q", "r", "s", "t", "u", "v", "w", "x", "y", "z", "\u00e5", "\u00e6", "\u00f8" ,"\u2047", "", "<s>", "</s>"], "is_bpe": false}
checkpoint-3432/config.json ADDED
@@ -0,0 +1,108 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "NbAiLab/nb-wav2vec2-1b-bokmaal",
3
+ "activation_dropout": 0.055,
4
+ "adapter_attn_dim": null,
5
+ "adapter_kernel_size": 3,
6
+ "adapter_stride": 2,
7
+ "add_adapter": false,
8
+ "apply_spec_augment": true,
9
+ "architectures": [
10
+ "Wav2Vec2ForCTC"
11
+ ],
12
+ "attention_dropout": 0.094,
13
+ "bos_token_id": 1,
14
+ "classifier_proj_size": 256,
15
+ "codevector_dim": 1024,
16
+ "contrastive_logits_temperature": 0.1,
17
+ "conv_bias": true,
18
+ "conv_dim": [
19
+ 512,
20
+ 512,
21
+ 512,
22
+ 512,
23
+ 512,
24
+ 512,
25
+ 512
26
+ ],
27
+ "conv_kernel": [
28
+ 10,
29
+ 3,
30
+ 3,
31
+ 3,
32
+ 3,
33
+ 2,
34
+ 2
35
+ ],
36
+ "conv_stride": [
37
+ 5,
38
+ 2,
39
+ 2,
40
+ 2,
41
+ 2,
42
+ 2,
43
+ 2
44
+ ],
45
+ "ctc_loss_reduction": "mean",
46
+ "ctc_zero_infinity": true,
47
+ "diversity_loss_weight": 0.1,
48
+ "do_stable_layer_norm": true,
49
+ "eos_token_id": 2,
50
+ "feat_extract_activation": "gelu",
51
+ "feat_extract_dropout": 0.0,
52
+ "feat_extract_norm": "layer",
53
+ "feat_proj_dropout": 0.04,
54
+ "feat_quantizer_dropout": 0.0,
55
+ "final_dropout": 0.0,
56
+ "hidden_act": "gelu",
57
+ "hidden_dropout": 0.047,
58
+ "hidden_size": 1280,
59
+ "initializer_range": 0.02,
60
+ "intermediate_size": 5120,
61
+ "layer_norm_eps": 1e-05,
62
+ "layerdrop": 0.041,
63
+ "mask_feature_length": 64,
64
+ "mask_feature_min_masks": 0,
65
+ "mask_feature_prob": 0.25,
66
+ "mask_time_length": 10,
67
+ "mask_time_min_masks": 2,
68
+ "mask_time_prob": 0.082,
69
+ "model_type": "wav2vec2",
70
+ "num_adapter_layers": 3,
71
+ "num_attention_heads": 16,
72
+ "num_codevector_groups": 2,
73
+ "num_codevectors_per_group": 320,
74
+ "num_conv_pos_embedding_groups": 16,
75
+ "num_conv_pos_embeddings": 128,
76
+ "num_feat_extract_layers": 7,
77
+ "num_hidden_layers": 48,
78
+ "num_negatives": 100,
79
+ "output_hidden_size": 1280,
80
+ "pad_token_id": 31,
81
+ "proj_codevector_dim": 1024,
82
+ "tdnn_dilation": [
83
+ 1,
84
+ 2,
85
+ 3,
86
+ 1,
87
+ 1
88
+ ],
89
+ "tdnn_dim": [
90
+ 512,
91
+ 512,
92
+ 512,
93
+ 512,
94
+ 1500
95
+ ],
96
+ "tdnn_kernel": [
97
+ 5,
98
+ 3,
99
+ 3,
100
+ 1,
101
+ 1
102
+ ],
103
+ "torch_dtype": "float32",
104
+ "transformers_version": "4.38.1",
105
+ "use_weighted_layer_sum": false,
106
+ "vocab_size": 34,
107
+ "xvector_output_dim": 512
108
+ }
checkpoint-3432/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:20c77978e409c821f7a1bf59c121b9a47ae8192725878db239ae531f6db6ed82
3
+ size 3850265216
checkpoint-3432/optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:aee311fd1945a268b55e54f81b0e0190fedd3d2d336958e504e9b429125e8c61
3
+ size 7667307858
checkpoint-3432/preprocessor_config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "do_normalize": true,
3
+ "feature_extractor_type": "Wav2Vec2FeatureExtractor",
4
+ "feature_size": 1,
5
+ "padding_side": "right",
6
+ "padding_value": 0,
7
+ "processor_class": "Wav2Vec2ProcessorWithLM",
8
+ "return_attention_mask": true,
9
+ "sampling_rate": 16000
10
+ }
checkpoint-3432/rng_state.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4ea77d55a21ce714851e38d4a47558146eb926deae3eb204a679dacc77173a4b
3
+ size 14308
checkpoint-3432/scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c4c245983b3866e9acb963f3fb35b2b6cfbc4212737b99d4635f7e855c32ebb2
3
+ size 1064
checkpoint-3432/trainer_state.json ADDED
@@ -0,0 +1,1347 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_metric": 0.14607747395833334,
3
+ "best_model_checkpoint": "/cluster/home/torstefl/Master/saved_model/W2V/single/NB/NB-1b-19.06/checkpoint-3432",
4
+ "epoch": 39.0,
5
+ "eval_steps": 500,
6
+ "global_step": 3432,
7
+ "is_hyper_param_search": false,
8
+ "is_local_process_zero": true,
9
+ "is_world_process_zero": true,
10
+ "log_history": [
11
+ {
12
+ "epoch": 1.0,
13
+ "grad_norm": 10.972672462463379,
14
+ "learning_rate": 1.1600000000000001e-06,
15
+ "loss": 0.8305,
16
+ "step": 88
17
+ },
18
+ {
19
+ "epoch": 1.0,
20
+ "eval_rundkast_loss": 0.506217360496521,
21
+ "eval_rundkast_runtime": 31.8232,
22
+ "eval_rundkast_samples_per_second": 42.076,
23
+ "eval_rundkast_steps_per_second": 1.32,
24
+ "eval_rundkast_wer": 0.20532714765637497,
25
+ "step": 88
26
+ },
27
+ {
28
+ "epoch": 1.0,
29
+ "eval_nb_samtale_loss": 0.7159814238548279,
30
+ "eval_nb_samtale_runtime": 38.8295,
31
+ "eval_nb_samtale_samples_per_second": 13.727,
32
+ "eval_nb_samtale_steps_per_second": 0.438,
33
+ "eval_nb_samtale_wer": 0.20125325520833334,
34
+ "step": 88
35
+ },
36
+ {
37
+ "epoch": 1.0,
38
+ "eval_bigbrother_loss": 2.73382568359375,
39
+ "eval_bigbrother_runtime": 45.0269,
40
+ "eval_bigbrother_samples_per_second": 30.426,
41
+ "eval_bigbrother_steps_per_second": 0.955,
42
+ "eval_bigbrother_wer": 0.5745589301538709,
43
+ "step": 88
44
+ },
45
+ {
46
+ "epoch": 2.0,
47
+ "grad_norm": 6.6329345703125,
48
+ "learning_rate": 2.3333333333333336e-06,
49
+ "loss": 0.6423,
50
+ "step": 176
51
+ },
52
+ {
53
+ "epoch": 2.0,
54
+ "eval_rundkast_loss": 0.42037609219551086,
55
+ "eval_rundkast_runtime": 31.233,
56
+ "eval_rundkast_samples_per_second": 42.871,
57
+ "eval_rundkast_steps_per_second": 1.345,
58
+ "eval_rundkast_wer": 0.2020476723724204,
59
+ "step": 176
60
+ },
61
+ {
62
+ "epoch": 2.0,
63
+ "eval_nb_samtale_loss": 0.5762496590614319,
64
+ "eval_nb_samtale_runtime": 38.6529,
65
+ "eval_nb_samtale_samples_per_second": 13.789,
66
+ "eval_nb_samtale_steps_per_second": 0.44,
67
+ "eval_nb_samtale_wer": 0.19417317708333334,
68
+ "step": 176
69
+ },
70
+ {
71
+ "epoch": 2.0,
72
+ "eval_bigbrother_loss": 2.2738213539123535,
73
+ "eval_bigbrother_runtime": 44.7689,
74
+ "eval_bigbrother_samples_per_second": 30.602,
75
+ "eval_bigbrother_steps_per_second": 0.96,
76
+ "eval_bigbrother_wer": 0.5613469749456216,
77
+ "step": 176
78
+ },
79
+ {
80
+ "epoch": 3.0,
81
+ "grad_norm": 4.364439964294434,
82
+ "learning_rate": 3.5066666666666673e-06,
83
+ "loss": 0.5857,
84
+ "step": 264
85
+ },
86
+ {
87
+ "epoch": 3.0,
88
+ "eval_rundkast_loss": 0.41098639369010925,
89
+ "eval_rundkast_runtime": 31.3258,
90
+ "eval_rundkast_samples_per_second": 42.744,
91
+ "eval_rundkast_steps_per_second": 1.341,
92
+ "eval_rundkast_wer": 0.1977283634618461,
93
+ "step": 264
94
+ },
95
+ {
96
+ "epoch": 3.0,
97
+ "eval_nb_samtale_loss": 0.5280192494392395,
98
+ "eval_nb_samtale_runtime": 38.586,
99
+ "eval_nb_samtale_samples_per_second": 13.813,
100
+ "eval_nb_samtale_steps_per_second": 0.441,
101
+ "eval_nb_samtale_wer": 0.18359375,
102
+ "step": 264
103
+ },
104
+ {
105
+ "epoch": 3.0,
106
+ "eval_bigbrother_loss": 2.192491054534912,
107
+ "eval_bigbrother_runtime": 44.8015,
108
+ "eval_bigbrother_samples_per_second": 30.579,
109
+ "eval_bigbrother_steps_per_second": 0.96,
110
+ "eval_bigbrother_wer": 0.5548215580439861,
111
+ "step": 264
112
+ },
113
+ {
114
+ "epoch": 4.0,
115
+ "grad_norm": 5.418688774108887,
116
+ "learning_rate": 4.680000000000001e-06,
117
+ "loss": 0.5332,
118
+ "step": 352
119
+ },
120
+ {
121
+ "epoch": 4.0,
122
+ "eval_rundkast_loss": 0.3839390277862549,
123
+ "eval_rundkast_runtime": 31.2002,
124
+ "eval_rundkast_samples_per_second": 42.916,
125
+ "eval_rundkast_steps_per_second": 1.346,
126
+ "eval_rundkast_wer": 0.19900815869460886,
127
+ "step": 352
128
+ },
129
+ {
130
+ "epoch": 4.0,
131
+ "eval_nb_samtale_loss": 0.48143112659454346,
132
+ "eval_nb_samtale_runtime": 38.3454,
133
+ "eval_nb_samtale_samples_per_second": 13.9,
134
+ "eval_nb_samtale_steps_per_second": 0.443,
135
+ "eval_nb_samtale_wer": 0.179443359375,
136
+ "step": 352
137
+ },
138
+ {
139
+ "epoch": 4.0,
140
+ "eval_bigbrother_loss": 2.0169622898101807,
141
+ "eval_bigbrother_runtime": 44.7675,
142
+ "eval_bigbrother_samples_per_second": 30.603,
143
+ "eval_bigbrother_steps_per_second": 0.961,
144
+ "eval_bigbrother_wer": 0.5486183839523081,
145
+ "step": 352
146
+ },
147
+ {
148
+ "epoch": 5.0,
149
+ "grad_norm": 6.6992034912109375,
150
+ "learning_rate": 5.853333333333335e-06,
151
+ "loss": 0.5117,
152
+ "step": 440
153
+ },
154
+ {
155
+ "epoch": 5.0,
156
+ "eval_rundkast_loss": 0.38616734743118286,
157
+ "eval_rundkast_runtime": 31.1274,
158
+ "eval_rundkast_samples_per_second": 43.017,
159
+ "eval_rundkast_steps_per_second": 1.349,
160
+ "eval_rundkast_wer": 0.1986082226843705,
161
+ "step": 440
162
+ },
163
+ {
164
+ "epoch": 5.0,
165
+ "eval_nb_samtale_loss": 0.46892496943473816,
166
+ "eval_nb_samtale_runtime": 38.4436,
167
+ "eval_nb_samtale_samples_per_second": 13.864,
168
+ "eval_nb_samtale_steps_per_second": 0.442,
169
+ "eval_nb_samtale_wer": 0.17545572916666666,
170
+ "step": 440
171
+ },
172
+ {
173
+ "epoch": 5.0,
174
+ "eval_bigbrother_loss": 1.9676716327667236,
175
+ "eval_bigbrother_runtime": 44.7467,
176
+ "eval_bigbrother_samples_per_second": 30.617,
177
+ "eval_bigbrother_steps_per_second": 0.961,
178
+ "eval_bigbrother_wer": 0.5392733424635463,
179
+ "step": 440
180
+ },
181
+ {
182
+ "epoch": 6.0,
183
+ "grad_norm": 4.8291120529174805,
184
+ "learning_rate": 7.0266666666666674e-06,
185
+ "loss": 0.4809,
186
+ "step": 528
187
+ },
188
+ {
189
+ "epoch": 6.0,
190
+ "eval_rundkast_loss": 0.37009599804878235,
191
+ "eval_rundkast_runtime": 31.2555,
192
+ "eval_rundkast_samples_per_second": 42.84,
193
+ "eval_rundkast_steps_per_second": 1.344,
194
+ "eval_rundkast_wer": 0.19636858102703567,
195
+ "step": 528
196
+ },
197
+ {
198
+ "epoch": 6.0,
199
+ "eval_nb_samtale_loss": 0.4429633915424347,
200
+ "eval_nb_samtale_runtime": 38.3862,
201
+ "eval_nb_samtale_samples_per_second": 13.885,
202
+ "eval_nb_samtale_steps_per_second": 0.443,
203
+ "eval_nb_samtale_wer": 0.17342122395833334,
204
+ "step": 528
205
+ },
206
+ {
207
+ "epoch": 6.0,
208
+ "eval_bigbrother_loss": 1.9097399711608887,
209
+ "eval_bigbrother_runtime": 44.7043,
210
+ "eval_bigbrother_samples_per_second": 30.646,
211
+ "eval_bigbrother_steps_per_second": 0.962,
212
+ "eval_bigbrother_wer": 0.5354869894465479,
213
+ "step": 528
214
+ },
215
+ {
216
+ "epoch": 7.0,
217
+ "grad_norm": 6.0818376541137695,
218
+ "learning_rate": 8.2e-06,
219
+ "loss": 0.4485,
220
+ "step": 616
221
+ },
222
+ {
223
+ "epoch": 7.0,
224
+ "eval_rundkast_loss": 0.39074423909187317,
225
+ "eval_rundkast_runtime": 31.2655,
226
+ "eval_rundkast_samples_per_second": 42.827,
227
+ "eval_rundkast_steps_per_second": 1.343,
228
+ "eval_rundkast_wer": 0.19580867061270196,
229
+ "step": 616
230
+ },
231
+ {
232
+ "epoch": 7.0,
233
+ "eval_nb_samtale_loss": 0.44968363642692566,
234
+ "eval_nb_samtale_runtime": 38.4136,
235
+ "eval_nb_samtale_samples_per_second": 13.875,
236
+ "eval_nb_samtale_steps_per_second": 0.443,
237
+ "eval_nb_samtale_wer": 0.17179361979166666,
238
+ "step": 616
239
+ },
240
+ {
241
+ "epoch": 7.0,
242
+ "eval_bigbrother_loss": 2.002044439315796,
243
+ "eval_bigbrother_runtime": 44.9245,
244
+ "eval_bigbrother_samples_per_second": 30.496,
245
+ "eval_bigbrother_steps_per_second": 0.957,
246
+ "eval_bigbrother_wer": 0.5383871747361637,
247
+ "step": 616
248
+ },
249
+ {
250
+ "epoch": 8.0,
251
+ "grad_norm": 5.505432605743408,
252
+ "learning_rate": 9.373333333333334e-06,
253
+ "loss": 0.4364,
254
+ "step": 704
255
+ },
256
+ {
257
+ "epoch": 8.0,
258
+ "eval_rundkast_loss": 0.38471347093582153,
259
+ "eval_rundkast_runtime": 31.9089,
260
+ "eval_rundkast_samples_per_second": 41.963,
261
+ "eval_rundkast_steps_per_second": 1.316,
262
+ "eval_rundkast_wer": 0.1968485042393217,
263
+ "step": 704
264
+ },
265
+ {
266
+ "epoch": 8.0,
267
+ "eval_nb_samtale_loss": 0.45092540979385376,
268
+ "eval_nb_samtale_runtime": 38.457,
269
+ "eval_nb_samtale_samples_per_second": 13.86,
270
+ "eval_nb_samtale_steps_per_second": 0.442,
271
+ "eval_nb_samtale_wer": 0.16552734375,
272
+ "step": 704
273
+ },
274
+ {
275
+ "epoch": 8.0,
276
+ "eval_bigbrother_loss": 1.967365026473999,
277
+ "eval_bigbrother_runtime": 45.046,
278
+ "eval_bigbrother_samples_per_second": 30.413,
279
+ "eval_bigbrother_steps_per_second": 0.955,
280
+ "eval_bigbrother_wer": 0.5337146539917829,
281
+ "step": 704
282
+ },
283
+ {
284
+ "epoch": 9.0,
285
+ "grad_norm": 12.103546142578125,
286
+ "learning_rate": 1.0546666666666667e-05,
287
+ "loss": 0.4194,
288
+ "step": 792
289
+ },
290
+ {
291
+ "epoch": 9.0,
292
+ "eval_rundkast_loss": 0.37225037813186646,
293
+ "eval_rundkast_runtime": 31.1897,
294
+ "eval_rundkast_samples_per_second": 42.931,
295
+ "eval_rundkast_steps_per_second": 1.347,
296
+ "eval_rundkast_wer": 0.1936490161574148,
297
+ "step": 792
298
+ },
299
+ {
300
+ "epoch": 9.0,
301
+ "eval_nb_samtale_loss": 0.42615896463394165,
302
+ "eval_nb_samtale_runtime": 38.2289,
303
+ "eval_nb_samtale_samples_per_second": 13.942,
304
+ "eval_nb_samtale_steps_per_second": 0.445,
305
+ "eval_nb_samtale_wer": 0.164306640625,
306
+ "step": 792
307
+ },
308
+ {
309
+ "epoch": 9.0,
310
+ "eval_bigbrother_loss": 1.930216908454895,
311
+ "eval_bigbrother_runtime": 44.8984,
312
+ "eval_bigbrother_samples_per_second": 30.513,
313
+ "eval_bigbrother_steps_per_second": 0.958,
314
+ "eval_bigbrother_wer": 0.5332312897768469,
315
+ "step": 792
316
+ },
317
+ {
318
+ "epoch": 10.0,
319
+ "grad_norm": 8.539125442504883,
320
+ "learning_rate": 1.172e-05,
321
+ "loss": 0.4004,
322
+ "step": 880
323
+ },
324
+ {
325
+ "epoch": 10.0,
326
+ "eval_rundkast_loss": 0.3706592917442322,
327
+ "eval_rundkast_runtime": 31.2141,
328
+ "eval_rundkast_samples_per_second": 42.897,
329
+ "eval_rundkast_steps_per_second": 1.346,
330
+ "eval_rundkast_wer": 0.1950887857942729,
331
+ "step": 880
332
+ },
333
+ {
334
+ "epoch": 10.0,
335
+ "eval_nb_samtale_loss": 0.4074735939502716,
336
+ "eval_nb_samtale_runtime": 38.275,
337
+ "eval_nb_samtale_samples_per_second": 13.926,
338
+ "eval_nb_samtale_steps_per_second": 0.444,
339
+ "eval_nb_samtale_wer": 0.16300455729166666,
340
+ "step": 880
341
+ },
342
+ {
343
+ "epoch": 10.0,
344
+ "eval_bigbrother_loss": 1.9817578792572021,
345
+ "eval_bigbrother_runtime": 44.6517,
346
+ "eval_bigbrother_samples_per_second": 30.682,
347
+ "eval_bigbrother_steps_per_second": 0.963,
348
+ "eval_bigbrother_wer": 0.5371787641988238,
349
+ "step": 880
350
+ },
351
+ {
352
+ "epoch": 11.0,
353
+ "grad_norm": 4.635890007019043,
354
+ "learning_rate": 1.2893333333333336e-05,
355
+ "loss": 0.3842,
356
+ "step": 968
357
+ },
358
+ {
359
+ "epoch": 11.0,
360
+ "eval_rundkast_loss": 0.39748790860176086,
361
+ "eval_rundkast_runtime": 31.2552,
362
+ "eval_rundkast_samples_per_second": 42.841,
363
+ "eval_rundkast_steps_per_second": 1.344,
364
+ "eval_rundkast_wer": 0.19588865781474965,
365
+ "step": 968
366
+ },
367
+ {
368
+ "epoch": 11.0,
369
+ "eval_nb_samtale_loss": 0.4311392307281494,
370
+ "eval_nb_samtale_runtime": 38.4527,
371
+ "eval_nb_samtale_samples_per_second": 13.861,
372
+ "eval_nb_samtale_steps_per_second": 0.442,
373
+ "eval_nb_samtale_wer": 0.164794921875,
374
+ "step": 968
375
+ },
376
+ {
377
+ "epoch": 11.0,
378
+ "eval_bigbrother_loss": 1.9875516891479492,
379
+ "eval_bigbrother_runtime": 44.694,
380
+ "eval_bigbrother_samples_per_second": 30.653,
381
+ "eval_bigbrother_steps_per_second": 0.962,
382
+ "eval_bigbrother_wer": 0.5363731571739305,
383
+ "step": 968
384
+ },
385
+ {
386
+ "epoch": 12.0,
387
+ "grad_norm": 7.757190704345703,
388
+ "learning_rate": 1.4053333333333335e-05,
389
+ "loss": 0.3739,
390
+ "step": 1056
391
+ },
392
+ {
393
+ "epoch": 12.0,
394
+ "eval_rundkast_loss": 0.41941559314727783,
395
+ "eval_rundkast_runtime": 31.7689,
396
+ "eval_rundkast_samples_per_second": 42.148,
397
+ "eval_rundkast_steps_per_second": 1.322,
398
+ "eval_rundkast_wer": 0.191649336106223,
399
+ "step": 1056
400
+ },
401
+ {
402
+ "epoch": 12.0,
403
+ "eval_nb_samtale_loss": 0.4516970217227936,
404
+ "eval_nb_samtale_runtime": 38.6901,
405
+ "eval_nb_samtale_samples_per_second": 13.776,
406
+ "eval_nb_samtale_steps_per_second": 0.439,
407
+ "eval_nb_samtale_wer": 0.15885416666666666,
408
+ "step": 1056
409
+ },
410
+ {
411
+ "epoch": 12.0,
412
+ "eval_bigbrother_loss": 2.081986904144287,
413
+ "eval_bigbrother_runtime": 44.6453,
414
+ "eval_bigbrother_samples_per_second": 30.686,
415
+ "eval_bigbrother_steps_per_second": 0.963,
416
+ "eval_bigbrother_wer": 0.5287198904374446,
417
+ "step": 1056
418
+ },
419
+ {
420
+ "epoch": 13.0,
421
+ "grad_norm": 12.277324676513672,
422
+ "learning_rate": 1.5226666666666668e-05,
423
+ "loss": 0.3641,
424
+ "step": 1144
425
+ },
426
+ {
427
+ "epoch": 13.0,
428
+ "eval_rundkast_loss": 0.4138753414154053,
429
+ "eval_rundkast_runtime": 31.1718,
430
+ "eval_rundkast_samples_per_second": 42.956,
431
+ "eval_rundkast_steps_per_second": 1.347,
432
+ "eval_rundkast_wer": 0.19524876019836826,
433
+ "step": 1144
434
+ },
435
+ {
436
+ "epoch": 13.0,
437
+ "eval_nb_samtale_loss": 0.42660218477249146,
438
+ "eval_nb_samtale_runtime": 38.7065,
439
+ "eval_nb_samtale_samples_per_second": 13.77,
440
+ "eval_nb_samtale_steps_per_second": 0.439,
441
+ "eval_nb_samtale_wer": 0.16324869791666666,
442
+ "step": 1144
443
+ },
444
+ {
445
+ "epoch": 13.0,
446
+ "eval_bigbrother_loss": 2.037122964859009,
447
+ "eval_bigbrother_runtime": 45.1132,
448
+ "eval_bigbrother_samples_per_second": 30.368,
449
+ "eval_bigbrother_steps_per_second": 0.953,
450
+ "eval_bigbrother_wer": 0.5277531620075727,
451
+ "step": 1144
452
+ },
453
+ {
454
+ "epoch": 14.0,
455
+ "grad_norm": 6.313786506652832,
456
+ "learning_rate": 1.64e-05,
457
+ "loss": 0.3745,
458
+ "step": 1232
459
+ },
460
+ {
461
+ "epoch": 14.0,
462
+ "eval_rundkast_loss": 0.40043848752975464,
463
+ "eval_rundkast_runtime": 31.4205,
464
+ "eval_rundkast_samples_per_second": 42.616,
465
+ "eval_rundkast_steps_per_second": 1.337,
466
+ "eval_rundkast_wer": 0.196048632218845,
467
+ "step": 1232
468
+ },
469
+ {
470
+ "epoch": 14.0,
471
+ "eval_nb_samtale_loss": 0.43297895789146423,
472
+ "eval_nb_samtale_runtime": 38.2868,
473
+ "eval_nb_samtale_samples_per_second": 13.921,
474
+ "eval_nb_samtale_steps_per_second": 0.444,
475
+ "eval_nb_samtale_wer": 0.15926106770833334,
476
+ "step": 1232
477
+ },
478
+ {
479
+ "epoch": 14.0,
480
+ "eval_bigbrother_loss": 2.02278995513916,
481
+ "eval_bigbrother_runtime": 44.7867,
482
+ "eval_bigbrother_samples_per_second": 30.589,
483
+ "eval_bigbrother_steps_per_second": 0.96,
484
+ "eval_bigbrother_wer": 0.5295254974623379,
485
+ "step": 1232
486
+ },
487
+ {
488
+ "epoch": 15.0,
489
+ "grad_norm": 8.265440940856934,
490
+ "learning_rate": 1.756e-05,
491
+ "loss": 0.3399,
492
+ "step": 1320
493
+ },
494
+ {
495
+ "epoch": 15.0,
496
+ "eval_rundkast_loss": 0.4764655530452728,
497
+ "eval_rundkast_runtime": 31.1836,
498
+ "eval_rundkast_samples_per_second": 42.939,
499
+ "eval_rundkast_steps_per_second": 1.347,
500
+ "eval_rundkast_wer": 0.196528555431131,
501
+ "step": 1320
502
+ },
503
+ {
504
+ "epoch": 15.0,
505
+ "eval_nb_samtale_loss": 0.4471026062965393,
506
+ "eval_nb_samtale_runtime": 38.1832,
507
+ "eval_nb_samtale_samples_per_second": 13.959,
508
+ "eval_nb_samtale_steps_per_second": 0.445,
509
+ "eval_nb_samtale_wer": 0.15681966145833334,
510
+ "step": 1320
511
+ },
512
+ {
513
+ "epoch": 15.0,
514
+ "eval_bigbrother_loss": 2.3220856189727783,
515
+ "eval_bigbrother_runtime": 44.7848,
516
+ "eval_bigbrother_samples_per_second": 30.591,
517
+ "eval_bigbrother_steps_per_second": 0.96,
518
+ "eval_bigbrother_wer": 0.5332312897768469,
519
+ "step": 1320
520
+ },
521
+ {
522
+ "epoch": 16.0,
523
+ "grad_norm": 7.783019542694092,
524
+ "learning_rate": 1.8733333333333336e-05,
525
+ "loss": 0.3234,
526
+ "step": 1408
527
+ },
528
+ {
529
+ "epoch": 16.0,
530
+ "eval_rundkast_loss": 0.46145308017730713,
531
+ "eval_rundkast_runtime": 31.6896,
532
+ "eval_rundkast_samples_per_second": 42.254,
533
+ "eval_rundkast_steps_per_second": 1.325,
534
+ "eval_rundkast_wer": 0.19580867061270196,
535
+ "step": 1408
536
+ },
537
+ {
538
+ "epoch": 16.0,
539
+ "eval_nb_samtale_loss": 0.42990735173225403,
540
+ "eval_nb_samtale_runtime": 38.25,
541
+ "eval_nb_samtale_samples_per_second": 13.935,
542
+ "eval_nb_samtale_steps_per_second": 0.444,
543
+ "eval_nb_samtale_wer": 0.156494140625,
544
+ "step": 1408
545
+ },
546
+ {
547
+ "epoch": 16.0,
548
+ "eval_bigbrother_loss": 2.225994110107422,
549
+ "eval_bigbrother_runtime": 44.8274,
550
+ "eval_bigbrother_samples_per_second": 30.562,
551
+ "eval_bigbrother_steps_per_second": 0.959,
552
+ "eval_bigbrother_wer": 0.5260613872552968,
553
+ "step": 1408
554
+ },
555
+ {
556
+ "epoch": 17.0,
557
+ "grad_norm": 4.970972537994385,
558
+ "learning_rate": 1.9906666666666667e-05,
559
+ "loss": 0.3201,
560
+ "step": 1496
561
+ },
562
+ {
563
+ "epoch": 17.0,
564
+ "eval_rundkast_loss": 0.4546896815299988,
565
+ "eval_rundkast_runtime": 31.3176,
566
+ "eval_rundkast_samples_per_second": 42.756,
567
+ "eval_rundkast_steps_per_second": 1.341,
568
+ "eval_rundkast_wer": 0.19300911854103345,
569
+ "step": 1496
570
+ },
571
+ {
572
+ "epoch": 17.0,
573
+ "eval_nb_samtale_loss": 0.4340006113052368,
574
+ "eval_nb_samtale_runtime": 38.2709,
575
+ "eval_nb_samtale_samples_per_second": 13.927,
576
+ "eval_nb_samtale_steps_per_second": 0.444,
577
+ "eval_nb_samtale_wer": 0.15616861979166666,
578
+ "step": 1496
579
+ },
580
+ {
581
+ "epoch": 17.0,
582
+ "eval_bigbrother_loss": 2.141953945159912,
583
+ "eval_bigbrother_runtime": 44.7194,
584
+ "eval_bigbrother_samples_per_second": 30.635,
585
+ "eval_bigbrother_steps_per_second": 0.962,
586
+ "eval_bigbrother_wer": 0.5173608313864497,
587
+ "step": 1496
588
+ },
589
+ {
590
+ "epoch": 18.0,
591
+ "grad_norm": 4.4445576667785645,
592
+ "learning_rate": 1.91980198019802e-05,
593
+ "loss": 0.3069,
594
+ "step": 1584
595
+ },
596
+ {
597
+ "epoch": 18.0,
598
+ "eval_rundkast_loss": 0.47925588488578796,
599
+ "eval_rundkast_runtime": 31.2291,
600
+ "eval_rundkast_samples_per_second": 42.877,
601
+ "eval_rundkast_steps_per_second": 1.345,
602
+ "eval_rundkast_wer": 0.19676851703727405,
603
+ "step": 1584
604
+ },
605
+ {
606
+ "epoch": 18.0,
607
+ "eval_nb_samtale_loss": 0.4501398503780365,
608
+ "eval_nb_samtale_runtime": 38.2766,
609
+ "eval_nb_samtale_samples_per_second": 13.925,
610
+ "eval_nb_samtale_steps_per_second": 0.444,
611
+ "eval_nb_samtale_wer": 0.158203125,
612
+ "step": 1584
613
+ },
614
+ {
615
+ "epoch": 18.0,
616
+ "eval_bigbrother_loss": 2.33626651763916,
617
+ "eval_bigbrother_runtime": 44.7536,
618
+ "eval_bigbrother_samples_per_second": 30.612,
619
+ "eval_bigbrother_steps_per_second": 0.961,
620
+ "eval_bigbrother_wer": 0.5310561508096351,
621
+ "step": 1584
622
+ },
623
+ {
624
+ "epoch": 19.0,
625
+ "grad_norm": 4.55182409286499,
626
+ "learning_rate": 1.832673267326733e-05,
627
+ "loss": 0.2912,
628
+ "step": 1672
629
+ },
630
+ {
631
+ "epoch": 19.0,
632
+ "eval_rundkast_loss": 0.4635383188724518,
633
+ "eval_rundkast_runtime": 31.6064,
634
+ "eval_rundkast_samples_per_second": 42.365,
635
+ "eval_rundkast_steps_per_second": 1.329,
636
+ "eval_rundkast_wer": 0.20028795392737161,
637
+ "step": 1672
638
+ },
639
+ {
640
+ "epoch": 19.0,
641
+ "eval_nb_samtale_loss": 0.4135359227657318,
642
+ "eval_nb_samtale_runtime": 38.4073,
643
+ "eval_nb_samtale_samples_per_second": 13.878,
644
+ "eval_nb_samtale_steps_per_second": 0.443,
645
+ "eval_nb_samtale_wer": 0.156494140625,
646
+ "step": 1672
647
+ },
648
+ {
649
+ "epoch": 19.0,
650
+ "eval_bigbrother_loss": 2.3355190753936768,
651
+ "eval_bigbrother_runtime": 44.9365,
652
+ "eval_bigbrother_samples_per_second": 30.487,
653
+ "eval_bigbrother_steps_per_second": 0.957,
654
+ "eval_bigbrother_wer": 0.5369370820913558,
655
+ "step": 1672
656
+ },
657
+ {
658
+ "epoch": 20.0,
659
+ "grad_norm": 4.11824893951416,
660
+ "learning_rate": 1.7455445544554458e-05,
661
+ "loss": 0.2824,
662
+ "step": 1760
663
+ },
664
+ {
665
+ "epoch": 20.0,
666
+ "eval_rundkast_loss": 0.5276007652282715,
667
+ "eval_rundkast_runtime": 31.3512,
668
+ "eval_rundkast_samples_per_second": 42.71,
669
+ "eval_rundkast_steps_per_second": 1.34,
670
+ "eval_rundkast_wer": 0.196048632218845,
671
+ "step": 1760
672
+ },
673
+ {
674
+ "epoch": 20.0,
675
+ "eval_nb_samtale_loss": 0.47676244378089905,
676
+ "eval_nb_samtale_runtime": 38.3319,
677
+ "eval_nb_samtale_samples_per_second": 13.905,
678
+ "eval_nb_samtale_steps_per_second": 0.443,
679
+ "eval_nb_samtale_wer": 0.15804036458333334,
680
+ "step": 1760
681
+ },
682
+ {
683
+ "epoch": 20.0,
684
+ "eval_bigbrother_loss": 2.57776141166687,
685
+ "eval_bigbrother_runtime": 44.7907,
686
+ "eval_bigbrother_samples_per_second": 30.587,
687
+ "eval_bigbrother_steps_per_second": 0.96,
688
+ "eval_bigbrother_wer": 0.5436236203979699,
689
+ "step": 1760
690
+ },
691
+ {
692
+ "epoch": 21.0,
693
+ "grad_norm": 4.462925434112549,
694
+ "learning_rate": 1.6584158415841584e-05,
695
+ "loss": 0.2661,
696
+ "step": 1848
697
+ },
698
+ {
699
+ "epoch": 21.0,
700
+ "eval_rundkast_loss": 0.5087462067604065,
701
+ "eval_rundkast_runtime": 31.2914,
702
+ "eval_rundkast_samples_per_second": 42.791,
703
+ "eval_rundkast_steps_per_second": 1.342,
704
+ "eval_rundkast_wer": 0.19732842745160775,
705
+ "step": 1848
706
+ },
707
+ {
708
+ "epoch": 21.0,
709
+ "eval_nb_samtale_loss": 0.44534921646118164,
710
+ "eval_nb_samtale_runtime": 38.226,
711
+ "eval_nb_samtale_samples_per_second": 13.943,
712
+ "eval_nb_samtale_steps_per_second": 0.445,
713
+ "eval_nb_samtale_wer": 0.15413411458333334,
714
+ "step": 1848
715
+ },
716
+ {
717
+ "epoch": 21.0,
718
+ "eval_bigbrother_loss": 2.2979142665863037,
719
+ "eval_bigbrother_runtime": 44.8351,
720
+ "eval_bigbrother_samples_per_second": 30.556,
721
+ "eval_bigbrother_steps_per_second": 0.959,
722
+ "eval_bigbrother_wer": 0.5297671795698059,
723
+ "step": 1848
724
+ },
725
+ {
726
+ "epoch": 22.0,
727
+ "grad_norm": 3.7362682819366455,
728
+ "learning_rate": 1.5712871287128716e-05,
729
+ "loss": 0.2543,
730
+ "step": 1936
731
+ },
732
+ {
733
+ "epoch": 22.0,
734
+ "eval_rundkast_loss": 0.5440049171447754,
735
+ "eval_rundkast_runtime": 31.2654,
736
+ "eval_rundkast_samples_per_second": 42.827,
737
+ "eval_rundkast_steps_per_second": 1.343,
738
+ "eval_rundkast_wer": 0.19756838905775076,
739
+ "step": 1936
740
+ },
741
+ {
742
+ "epoch": 22.0,
743
+ "eval_nb_samtale_loss": 0.486325204372406,
744
+ "eval_nb_samtale_runtime": 38.3702,
745
+ "eval_nb_samtale_samples_per_second": 13.891,
746
+ "eval_nb_samtale_steps_per_second": 0.443,
747
+ "eval_nb_samtale_wer": 0.15763346354166666,
748
+ "step": 1936
749
+ },
750
+ {
751
+ "epoch": 22.0,
752
+ "eval_bigbrother_loss": 2.4817354679107666,
753
+ "eval_bigbrother_runtime": 44.7425,
754
+ "eval_bigbrother_samples_per_second": 30.62,
755
+ "eval_bigbrother_steps_per_second": 0.961,
756
+ "eval_bigbrother_wer": 0.5300088616772738,
757
+ "step": 1936
758
+ },
759
+ {
760
+ "epoch": 23.0,
761
+ "grad_norm": 3.388705015182495,
762
+ "learning_rate": 1.4841584158415842e-05,
763
+ "loss": 0.2522,
764
+ "step": 2024
765
+ },
766
+ {
767
+ "epoch": 23.0,
768
+ "eval_rundkast_loss": 0.5401936173439026,
769
+ "eval_rundkast_runtime": 31.3567,
770
+ "eval_rundkast_samples_per_second": 42.702,
771
+ "eval_rundkast_steps_per_second": 1.339,
772
+ "eval_rundkast_wer": 0.19380899056151016,
773
+ "step": 2024
774
+ },
775
+ {
776
+ "epoch": 23.0,
777
+ "eval_nb_samtale_loss": 0.45578888058662415,
778
+ "eval_nb_samtale_runtime": 38.2879,
779
+ "eval_nb_samtale_samples_per_second": 13.921,
780
+ "eval_nb_samtale_steps_per_second": 0.444,
781
+ "eval_nb_samtale_wer": 0.15315755208333334,
782
+ "step": 2024
783
+ },
784
+ {
785
+ "epoch": 23.0,
786
+ "eval_bigbrother_loss": 2.4559385776519775,
787
+ "eval_bigbrother_runtime": 44.8922,
788
+ "eval_bigbrother_samples_per_second": 30.518,
789
+ "eval_bigbrother_steps_per_second": 0.958,
790
+ "eval_bigbrother_wer": 0.5254169016353822,
791
+ "step": 2024
792
+ },
793
+ {
794
+ "epoch": 24.0,
795
+ "grad_norm": 4.662117004394531,
796
+ "learning_rate": 1.3970297029702971e-05,
797
+ "loss": 0.2544,
798
+ "step": 2112
799
+ },
800
+ {
801
+ "epoch": 24.0,
802
+ "eval_rundkast_loss": 0.5467772483825684,
803
+ "eval_rundkast_runtime": 31.3965,
804
+ "eval_rundkast_samples_per_second": 42.648,
805
+ "eval_rundkast_steps_per_second": 1.338,
806
+ "eval_rundkast_wer": 0.1951687729963206,
807
+ "step": 2112
808
+ },
809
+ {
810
+ "epoch": 24.0,
811
+ "eval_nb_samtale_loss": 0.45863205194473267,
812
+ "eval_nb_samtale_runtime": 38.2548,
813
+ "eval_nb_samtale_samples_per_second": 13.933,
814
+ "eval_nb_samtale_steps_per_second": 0.444,
815
+ "eval_nb_samtale_wer": 0.151123046875,
816
+ "step": 2112
817
+ },
818
+ {
819
+ "epoch": 24.0,
820
+ "eval_bigbrother_loss": 2.302259922027588,
821
+ "eval_bigbrother_runtime": 44.9057,
822
+ "eval_bigbrother_samples_per_second": 30.508,
823
+ "eval_bigbrother_steps_per_second": 0.958,
824
+ "eval_bigbrother_wer": 0.5301699830822525,
825
+ "step": 2112
826
+ },
827
+ {
828
+ "epoch": 25.0,
829
+ "grad_norm": 2.7924931049346924,
830
+ "learning_rate": 1.30990099009901e-05,
831
+ "loss": 0.2445,
832
+ "step": 2200
833
+ },
834
+ {
835
+ "epoch": 25.0,
836
+ "eval_rundkast_loss": 0.5389044880867004,
837
+ "eval_rundkast_runtime": 31.3233,
838
+ "eval_rundkast_samples_per_second": 42.748,
839
+ "eval_rundkast_steps_per_second": 1.341,
840
+ "eval_rundkast_wer": 0.19500879859222525,
841
+ "step": 2200
842
+ },
843
+ {
844
+ "epoch": 25.0,
845
+ "eval_nb_samtale_loss": 0.4871143698692322,
846
+ "eval_nb_samtale_runtime": 38.3162,
847
+ "eval_nb_samtale_samples_per_second": 13.911,
848
+ "eval_nb_samtale_steps_per_second": 0.444,
849
+ "eval_nb_samtale_wer": 0.15299479166666666,
850
+ "step": 2200
851
+ },
852
+ {
853
+ "epoch": 25.0,
854
+ "eval_bigbrother_loss": 2.405670166015625,
855
+ "eval_bigbrother_runtime": 44.8162,
856
+ "eval_bigbrother_samples_per_second": 30.569,
857
+ "eval_bigbrother_steps_per_second": 0.959,
858
+ "eval_bigbrother_wer": 0.5264641907677435,
859
+ "step": 2200
860
+ },
861
+ {
862
+ "epoch": 26.0,
863
+ "grad_norm": 5.481632232666016,
864
+ "learning_rate": 1.2227722772277228e-05,
865
+ "loss": 0.2255,
866
+ "step": 2288
867
+ },
868
+ {
869
+ "epoch": 26.0,
870
+ "eval_rundkast_loss": 0.5802582502365112,
871
+ "eval_rundkast_runtime": 31.3722,
872
+ "eval_rundkast_samples_per_second": 42.681,
873
+ "eval_rundkast_steps_per_second": 1.339,
874
+ "eval_rundkast_wer": 0.19820828667413215,
875
+ "step": 2288
876
+ },
877
+ {
878
+ "epoch": 26.0,
879
+ "eval_nb_samtale_loss": 0.5078674554824829,
880
+ "eval_nb_samtale_runtime": 38.1701,
881
+ "eval_nb_samtale_samples_per_second": 13.964,
882
+ "eval_nb_samtale_steps_per_second": 0.445,
883
+ "eval_nb_samtale_wer": 0.1533203125,
884
+ "step": 2288
885
+ },
886
+ {
887
+ "epoch": 26.0,
888
+ "eval_bigbrother_loss": 2.5950069427490234,
889
+ "eval_bigbrother_runtime": 45.0629,
890
+ "eval_bigbrother_samples_per_second": 30.402,
891
+ "eval_bigbrother_steps_per_second": 0.954,
892
+ "eval_bigbrother_wer": 0.5358897929589946,
893
+ "step": 2288
894
+ },
895
+ {
896
+ "epoch": 27.0,
897
+ "grad_norm": 6.247470855712891,
898
+ "learning_rate": 1.1356435643564357e-05,
899
+ "loss": 0.2317,
900
+ "step": 2376
901
+ },
902
+ {
903
+ "epoch": 27.0,
904
+ "eval_rundkast_loss": 0.5534220337867737,
905
+ "eval_rundkast_runtime": 31.8039,
906
+ "eval_rundkast_samples_per_second": 42.102,
907
+ "eval_rundkast_steps_per_second": 1.321,
908
+ "eval_rundkast_wer": 0.19908814589665655,
909
+ "step": 2376
910
+ },
911
+ {
912
+ "epoch": 27.0,
913
+ "eval_nb_samtale_loss": 0.46262821555137634,
914
+ "eval_nb_samtale_runtime": 38.5085,
915
+ "eval_nb_samtale_samples_per_second": 13.841,
916
+ "eval_nb_samtale_steps_per_second": 0.441,
917
+ "eval_nb_samtale_wer": 0.15234375,
918
+ "step": 2376
919
+ },
920
+ {
921
+ "epoch": 27.0,
922
+ "eval_bigbrother_loss": 2.396604537963867,
923
+ "eval_bigbrother_runtime": 45.0988,
924
+ "eval_bigbrother_samples_per_second": 30.378,
925
+ "eval_bigbrother_steps_per_second": 0.953,
926
+ "eval_bigbrother_wer": 0.5242084910980424,
927
+ "step": 2376
928
+ },
929
+ {
930
+ "epoch": 28.0,
931
+ "grad_norm": 5.388619899749756,
932
+ "learning_rate": 1.0485148514851486e-05,
933
+ "loss": 0.2157,
934
+ "step": 2464
935
+ },
936
+ {
937
+ "epoch": 28.0,
938
+ "eval_rundkast_loss": 0.565626859664917,
939
+ "eval_rundkast_runtime": 31.3606,
940
+ "eval_rundkast_samples_per_second": 42.697,
941
+ "eval_rundkast_steps_per_second": 1.339,
942
+ "eval_rundkast_wer": 0.1988481842905135,
943
+ "step": 2464
944
+ },
945
+ {
946
+ "epoch": 28.0,
947
+ "eval_nb_samtale_loss": 0.4518119990825653,
948
+ "eval_nb_samtale_runtime": 38.2392,
949
+ "eval_nb_samtale_samples_per_second": 13.939,
950
+ "eval_nb_samtale_steps_per_second": 0.445,
951
+ "eval_nb_samtale_wer": 0.15185546875,
952
+ "step": 2464
953
+ },
954
+ {
955
+ "epoch": 28.0,
956
+ "eval_bigbrother_loss": 2.401461601257324,
957
+ "eval_bigbrother_runtime": 44.8671,
958
+ "eval_bigbrother_samples_per_second": 30.535,
959
+ "eval_bigbrother_steps_per_second": 0.958,
960
+ "eval_bigbrother_wer": 0.5246918553129784,
961
+ "step": 2464
962
+ },
963
+ {
964
+ "epoch": 29.0,
965
+ "grad_norm": 6.6439738273620605,
966
+ "learning_rate": 9.613861386138615e-06,
967
+ "loss": 0.22,
968
+ "step": 2552
969
+ },
970
+ {
971
+ "epoch": 29.0,
972
+ "eval_rundkast_loss": 0.5526180267333984,
973
+ "eval_rundkast_runtime": 31.4809,
974
+ "eval_rundkast_samples_per_second": 42.534,
975
+ "eval_rundkast_steps_per_second": 1.334,
976
+ "eval_rundkast_wer": 0.19996800511918092,
977
+ "step": 2552
978
+ },
979
+ {
980
+ "epoch": 29.0,
981
+ "eval_nb_samtale_loss": 0.4727042317390442,
982
+ "eval_nb_samtale_runtime": 38.2477,
983
+ "eval_nb_samtale_samples_per_second": 13.935,
984
+ "eval_nb_samtale_steps_per_second": 0.444,
985
+ "eval_nb_samtale_wer": 0.15364583333333334,
986
+ "step": 2552
987
+ },
988
+ {
989
+ "epoch": 29.0,
990
+ "eval_bigbrother_loss": 2.432448387145996,
991
+ "eval_bigbrother_runtime": 45.1692,
992
+ "eval_bigbrother_samples_per_second": 30.33,
993
+ "eval_bigbrother_steps_per_second": 0.952,
994
+ "eval_bigbrother_wer": 0.5283976476274873,
995
+ "step": 2552
996
+ },
997
+ {
998
+ "epoch": 30.0,
999
+ "grad_norm": 6.641559600830078,
1000
+ "learning_rate": 8.742574257425743e-06,
1001
+ "loss": 0.2142,
1002
+ "step": 2640
1003
+ },
1004
+ {
1005
+ "epoch": 30.0,
1006
+ "eval_rundkast_loss": 0.5641056299209595,
1007
+ "eval_rundkast_runtime": 31.6944,
1008
+ "eval_rundkast_samples_per_second": 42.247,
1009
+ "eval_rundkast_steps_per_second": 1.325,
1010
+ "eval_rundkast_wer": 0.19580867061270196,
1011
+ "step": 2640
1012
+ },
1013
+ {
1014
+ "epoch": 30.0,
1015
+ "eval_nb_samtale_loss": 0.48357564210891724,
1016
+ "eval_nb_samtale_runtime": 38.4852,
1017
+ "eval_nb_samtale_samples_per_second": 13.849,
1018
+ "eval_nb_samtale_steps_per_second": 0.442,
1019
+ "eval_nb_samtale_wer": 0.14640299479166666,
1020
+ "step": 2640
1021
+ },
1022
+ {
1023
+ "epoch": 30.0,
1024
+ "eval_bigbrother_loss": 2.3856699466705322,
1025
+ "eval_bigbrother_runtime": 44.9444,
1026
+ "eval_bigbrother_samples_per_second": 30.482,
1027
+ "eval_bigbrother_steps_per_second": 0.957,
1028
+ "eval_bigbrother_wer": 0.52090550229598,
1029
+ "step": 2640
1030
+ },
1031
+ {
1032
+ "epoch": 31.0,
1033
+ "grad_norm": 5.838632106781006,
1034
+ "learning_rate": 7.871287128712872e-06,
1035
+ "loss": 0.1948,
1036
+ "step": 2728
1037
+ },
1038
+ {
1039
+ "epoch": 31.0,
1040
+ "eval_rundkast_loss": 0.5922185778617859,
1041
+ "eval_rundkast_runtime": 31.5341,
1042
+ "eval_rundkast_samples_per_second": 42.462,
1043
+ "eval_rundkast_steps_per_second": 1.332,
1044
+ "eval_rundkast_wer": 0.19708846584546472,
1045
+ "step": 2728
1046
+ },
1047
+ {
1048
+ "epoch": 31.0,
1049
+ "eval_nb_samtale_loss": 0.5342143774032593,
1050
+ "eval_nb_samtale_runtime": 38.0861,
1051
+ "eval_nb_samtale_samples_per_second": 13.995,
1052
+ "eval_nb_samtale_steps_per_second": 0.446,
1053
+ "eval_nb_samtale_wer": 0.14860026041666666,
1054
+ "step": 2728
1055
+ },
1056
+ {
1057
+ "epoch": 31.0,
1058
+ "eval_bigbrother_loss": 2.5876622200012207,
1059
+ "eval_bigbrother_runtime": 44.9279,
1060
+ "eval_bigbrother_samples_per_second": 30.493,
1061
+ "eval_bigbrother_steps_per_second": 0.957,
1062
+ "eval_bigbrother_wer": 0.5268669942801901,
1063
+ "step": 2728
1064
+ },
1065
+ {
1066
+ "epoch": 32.0,
1067
+ "grad_norm": 7.094357967376709,
1068
+ "learning_rate": 7e-06,
1069
+ "loss": 0.1949,
1070
+ "step": 2816
1071
+ },
1072
+ {
1073
+ "epoch": 32.0,
1074
+ "eval_rundkast_loss": 0.5746641159057617,
1075
+ "eval_rundkast_runtime": 31.5695,
1076
+ "eval_rundkast_samples_per_second": 42.414,
1077
+ "eval_rundkast_steps_per_second": 1.33,
1078
+ "eval_rundkast_wer": 0.19524876019836826,
1079
+ "step": 2816
1080
+ },
1081
+ {
1082
+ "epoch": 32.0,
1083
+ "eval_nb_samtale_loss": 0.5246254801750183,
1084
+ "eval_nb_samtale_runtime": 38.4186,
1085
+ "eval_nb_samtale_samples_per_second": 13.873,
1086
+ "eval_nb_samtale_steps_per_second": 0.442,
1087
+ "eval_nb_samtale_wer": 0.15120442708333334,
1088
+ "step": 2816
1089
+ },
1090
+ {
1091
+ "epoch": 32.0,
1092
+ "eval_bigbrother_loss": 2.5409083366394043,
1093
+ "eval_bigbrother_runtime": 45.1035,
1094
+ "eval_bigbrother_samples_per_second": 30.375,
1095
+ "eval_bigbrother_steps_per_second": 0.953,
1096
+ "eval_bigbrother_wer": 0.5232417626681705,
1097
+ "step": 2816
1098
+ },
1099
+ {
1100
+ "epoch": 33.0,
1101
+ "grad_norm": 3.003549098968506,
1102
+ "learning_rate": 6.128712871287129e-06,
1103
+ "loss": 0.204,
1104
+ "step": 2904
1105
+ },
1106
+ {
1107
+ "epoch": 33.0,
1108
+ "eval_rundkast_loss": 0.5838789939880371,
1109
+ "eval_rundkast_runtime": 31.6764,
1110
+ "eval_rundkast_samples_per_second": 42.271,
1111
+ "eval_rundkast_steps_per_second": 1.326,
1112
+ "eval_rundkast_wer": 0.19460886258198687,
1113
+ "step": 2904
1114
+ },
1115
+ {
1116
+ "epoch": 33.0,
1117
+ "eval_nb_samtale_loss": 0.5083252787590027,
1118
+ "eval_nb_samtale_runtime": 38.236,
1119
+ "eval_nb_samtale_samples_per_second": 13.94,
1120
+ "eval_nb_samtale_steps_per_second": 0.445,
1121
+ "eval_nb_samtale_wer": 0.14876302083333334,
1122
+ "step": 2904
1123
+ },
1124
+ {
1125
+ "epoch": 33.0,
1126
+ "eval_bigbrother_loss": 2.4765820503234863,
1127
+ "eval_bigbrother_runtime": 45.0387,
1128
+ "eval_bigbrother_samples_per_second": 30.418,
1129
+ "eval_bigbrother_steps_per_second": 0.955,
1130
+ "eval_bigbrother_wer": 0.5217111093208733,
1131
+ "step": 2904
1132
+ },
1133
+ {
1134
+ "epoch": 34.0,
1135
+ "grad_norm": 13.520666122436523,
1136
+ "learning_rate": 5.257425742574258e-06,
1137
+ "loss": 0.1946,
1138
+ "step": 2992
1139
+ },
1140
+ {
1141
+ "epoch": 34.0,
1142
+ "eval_rundkast_loss": 0.5890854597091675,
1143
+ "eval_rundkast_runtime": 32.1786,
1144
+ "eval_rundkast_samples_per_second": 41.611,
1145
+ "eval_rundkast_steps_per_second": 1.305,
1146
+ "eval_rundkast_wer": 0.19380899056151016,
1147
+ "step": 2992
1148
+ },
1149
+ {
1150
+ "epoch": 34.0,
1151
+ "eval_nb_samtale_loss": 0.5050138235092163,
1152
+ "eval_nb_samtale_runtime": 38.4424,
1153
+ "eval_nb_samtale_samples_per_second": 13.865,
1154
+ "eval_nb_samtale_steps_per_second": 0.442,
1155
+ "eval_nb_samtale_wer": 0.14876302083333334,
1156
+ "step": 2992
1157
+ },
1158
+ {
1159
+ "epoch": 34.0,
1160
+ "eval_bigbrother_loss": 2.5397651195526123,
1161
+ "eval_bigbrother_runtime": 46.0852,
1162
+ "eval_bigbrother_samples_per_second": 29.728,
1163
+ "eval_bigbrother_steps_per_second": 0.933,
1164
+ "eval_bigbrother_wer": 0.5208249415934907,
1165
+ "step": 2992
1166
+ },
1167
+ {
1168
+ "epoch": 35.0,
1169
+ "grad_norm": 13.422295570373535,
1170
+ "learning_rate": 4.386138613861386e-06,
1171
+ "loss": 0.1953,
1172
+ "step": 3080
1173
+ },
1174
+ {
1175
+ "epoch": 35.0,
1176
+ "eval_rundkast_loss": 0.5813275575637817,
1177
+ "eval_rundkast_runtime": 31.8014,
1178
+ "eval_rundkast_samples_per_second": 42.105,
1179
+ "eval_rundkast_steps_per_second": 1.321,
1180
+ "eval_rundkast_wer": 0.19404895216765317,
1181
+ "step": 3080
1182
+ },
1183
+ {
1184
+ "epoch": 35.0,
1185
+ "eval_nb_samtale_loss": 0.5057322382926941,
1186
+ "eval_nb_samtale_runtime": 38.3631,
1187
+ "eval_nb_samtale_samples_per_second": 13.894,
1188
+ "eval_nb_samtale_steps_per_second": 0.443,
1189
+ "eval_nb_samtale_wer": 0.14925130208333334,
1190
+ "step": 3080
1191
+ },
1192
+ {
1193
+ "epoch": 35.0,
1194
+ "eval_bigbrother_loss": 2.4785802364349365,
1195
+ "eval_bigbrother_runtime": 45.4371,
1196
+ "eval_bigbrother_samples_per_second": 30.152,
1197
+ "eval_bigbrother_steps_per_second": 0.946,
1198
+ "eval_bigbrother_wer": 0.5193748489486828,
1199
+ "step": 3080
1200
+ },
1201
+ {
1202
+ "epoch": 36.0,
1203
+ "grad_norm": 5.827705383300781,
1204
+ "learning_rate": 3.514851485148515e-06,
1205
+ "loss": 0.1873,
1206
+ "step": 3168
1207
+ },
1208
+ {
1209
+ "epoch": 36.0,
1210
+ "eval_rundkast_loss": 0.5837633609771729,
1211
+ "eval_rundkast_runtime": 31.7208,
1212
+ "eval_rundkast_samples_per_second": 42.212,
1213
+ "eval_rundkast_steps_per_second": 1.324,
1214
+ "eval_rundkast_wer": 0.19444888817789155,
1215
+ "step": 3168
1216
+ },
1217
+ {
1218
+ "epoch": 36.0,
1219
+ "eval_nb_samtale_loss": 0.5117051005363464,
1220
+ "eval_nb_samtale_runtime": 38.4155,
1221
+ "eval_nb_samtale_samples_per_second": 13.875,
1222
+ "eval_nb_samtale_steps_per_second": 0.443,
1223
+ "eval_nb_samtale_wer": 0.147216796875,
1224
+ "step": 3168
1225
+ },
1226
+ {
1227
+ "epoch": 36.0,
1228
+ "eval_bigbrother_loss": 2.5473718643188477,
1229
+ "eval_bigbrother_runtime": 45.3311,
1230
+ "eval_bigbrother_samples_per_second": 30.222,
1231
+ "eval_bigbrother_steps_per_second": 0.949,
1232
+ "eval_bigbrother_wer": 0.5215499879158946,
1233
+ "step": 3168
1234
+ },
1235
+ {
1236
+ "epoch": 37.0,
1237
+ "grad_norm": 3.566361427307129,
1238
+ "learning_rate": 2.6435643564356437e-06,
1239
+ "loss": 0.1791,
1240
+ "step": 3256
1241
+ },
1242
+ {
1243
+ "epoch": 37.0,
1244
+ "eval_rundkast_loss": 0.6002511382102966,
1245
+ "eval_rundkast_runtime": 31.7763,
1246
+ "eval_rundkast_samples_per_second": 42.138,
1247
+ "eval_rundkast_steps_per_second": 1.322,
1248
+ "eval_rundkast_wer": 0.19588865781474965,
1249
+ "step": 3256
1250
+ },
1251
+ {
1252
+ "epoch": 37.0,
1253
+ "eval_nb_samtale_loss": 0.5211741328239441,
1254
+ "eval_nb_samtale_runtime": 38.5386,
1255
+ "eval_nb_samtale_samples_per_second": 13.83,
1256
+ "eval_nb_samtale_steps_per_second": 0.441,
1257
+ "eval_nb_samtale_wer": 0.14713541666666666,
1258
+ "step": 3256
1259
+ },
1260
+ {
1261
+ "epoch": 37.0,
1262
+ "eval_bigbrother_loss": 2.629565715789795,
1263
+ "eval_bigbrother_runtime": 45.36,
1264
+ "eval_bigbrother_samples_per_second": 30.203,
1265
+ "eval_bigbrother_steps_per_second": 0.948,
1266
+ "eval_bigbrother_wer": 0.5238056875855958,
1267
+ "step": 3256
1268
+ },
1269
+ {
1270
+ "epoch": 38.0,
1271
+ "grad_norm": 6.092939376831055,
1272
+ "learning_rate": 1.7722772277227724e-06,
1273
+ "loss": 0.193,
1274
+ "step": 3344
1275
+ },
1276
+ {
1277
+ "epoch": 38.0,
1278
+ "eval_rundkast_loss": 0.595504641532898,
1279
+ "eval_rundkast_runtime": 31.8783,
1280
+ "eval_rundkast_samples_per_second": 42.004,
1281
+ "eval_rundkast_steps_per_second": 1.318,
1282
+ "eval_rundkast_wer": 0.19356902895536715,
1283
+ "step": 3344
1284
+ },
1285
+ {
1286
+ "epoch": 38.0,
1287
+ "eval_nb_samtale_loss": 0.5151902437210083,
1288
+ "eval_nb_samtale_runtime": 38.9536,
1289
+ "eval_nb_samtale_samples_per_second": 13.683,
1290
+ "eval_nb_samtale_steps_per_second": 0.436,
1291
+ "eval_nb_samtale_wer": 0.14737955729166666,
1292
+ "step": 3344
1293
+ },
1294
+ {
1295
+ "epoch": 38.0,
1296
+ "eval_bigbrother_loss": 2.59112548828125,
1297
+ "eval_bigbrother_runtime": 45.3015,
1298
+ "eval_bigbrother_samples_per_second": 30.242,
1299
+ "eval_bigbrother_steps_per_second": 0.949,
1300
+ "eval_bigbrother_wer": 0.5234028840731492,
1301
+ "step": 3344
1302
+ },
1303
+ {
1304
+ "epoch": 39.0,
1305
+ "grad_norm": 4.503037452697754,
1306
+ "learning_rate": 9.00990099009901e-07,
1307
+ "loss": 0.1767,
1308
+ "step": 3432
1309
+ },
1310
+ {
1311
+ "epoch": 39.0,
1312
+ "eval_rundkast_loss": 0.5997776389122009,
1313
+ "eval_rundkast_runtime": 31.7079,
1314
+ "eval_rundkast_samples_per_second": 42.229,
1315
+ "eval_rundkast_steps_per_second": 1.325,
1316
+ "eval_rundkast_wer": 0.1945288753799392,
1317
+ "step": 3432
1318
+ },
1319
+ {
1320
+ "epoch": 39.0,
1321
+ "eval_nb_samtale_loss": 0.5143499970436096,
1322
+ "eval_nb_samtale_runtime": 38.5399,
1323
+ "eval_nb_samtale_samples_per_second": 13.83,
1324
+ "eval_nb_samtale_steps_per_second": 0.441,
1325
+ "eval_nb_samtale_wer": 0.14607747395833334,
1326
+ "step": 3432
1327
+ },
1328
+ {
1329
+ "epoch": 39.0,
1330
+ "eval_bigbrother_loss": 2.604764938354492,
1331
+ "eval_bigbrother_runtime": 45.3914,
1332
+ "eval_bigbrother_samples_per_second": 30.182,
1333
+ "eval_bigbrother_steps_per_second": 0.947,
1334
+ "eval_bigbrother_wer": 0.5224361556432772,
1335
+ "step": 3432
1336
+ }
1337
+ ],
1338
+ "logging_steps": 500,
1339
+ "max_steps": 3520,
1340
+ "num_input_tokens_seen": 0,
1341
+ "num_train_epochs": 40,
1342
+ "save_steps": 500,
1343
+ "total_flos": 1.431775651547523e+20,
1344
+ "train_batch_size": 48,
1345
+ "trial_name": null,
1346
+ "trial_params": null
1347
+ }
checkpoint-3432/training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d398cf74e9c3dfb84e4b7970c10ef91e54973a6fa45689917df89793be64f273
3
+ size 4984
checkpoint-3520/config.json ADDED
@@ -0,0 +1,108 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "NbAiLab/nb-wav2vec2-1b-bokmaal",
3
+ "activation_dropout": 0.055,
4
+ "adapter_attn_dim": null,
5
+ "adapter_kernel_size": 3,
6
+ "adapter_stride": 2,
7
+ "add_adapter": false,
8
+ "apply_spec_augment": true,
9
+ "architectures": [
10
+ "Wav2Vec2ForCTC"
11
+ ],
12
+ "attention_dropout": 0.094,
13
+ "bos_token_id": 1,
14
+ "classifier_proj_size": 256,
15
+ "codevector_dim": 1024,
16
+ "contrastive_logits_temperature": 0.1,
17
+ "conv_bias": true,
18
+ "conv_dim": [
19
+ 512,
20
+ 512,
21
+ 512,
22
+ 512,
23
+ 512,
24
+ 512,
25
+ 512
26
+ ],
27
+ "conv_kernel": [
28
+ 10,
29
+ 3,
30
+ 3,
31
+ 3,
32
+ 3,
33
+ 2,
34
+ 2
35
+ ],
36
+ "conv_stride": [
37
+ 5,
38
+ 2,
39
+ 2,
40
+ 2,
41
+ 2,
42
+ 2,
43
+ 2
44
+ ],
45
+ "ctc_loss_reduction": "mean",
46
+ "ctc_zero_infinity": true,
47
+ "diversity_loss_weight": 0.1,
48
+ "do_stable_layer_norm": true,
49
+ "eos_token_id": 2,
50
+ "feat_extract_activation": "gelu",
51
+ "feat_extract_dropout": 0.0,
52
+ "feat_extract_norm": "layer",
53
+ "feat_proj_dropout": 0.04,
54
+ "feat_quantizer_dropout": 0.0,
55
+ "final_dropout": 0.0,
56
+ "hidden_act": "gelu",
57
+ "hidden_dropout": 0.047,
58
+ "hidden_size": 1280,
59
+ "initializer_range": 0.02,
60
+ "intermediate_size": 5120,
61
+ "layer_norm_eps": 1e-05,
62
+ "layerdrop": 0.041,
63
+ "mask_feature_length": 64,
64
+ "mask_feature_min_masks": 0,
65
+ "mask_feature_prob": 0.25,
66
+ "mask_time_length": 10,
67
+ "mask_time_min_masks": 2,
68
+ "mask_time_prob": 0.082,
69
+ "model_type": "wav2vec2",
70
+ "num_adapter_layers": 3,
71
+ "num_attention_heads": 16,
72
+ "num_codevector_groups": 2,
73
+ "num_codevectors_per_group": 320,
74
+ "num_conv_pos_embedding_groups": 16,
75
+ "num_conv_pos_embeddings": 128,
76
+ "num_feat_extract_layers": 7,
77
+ "num_hidden_layers": 48,
78
+ "num_negatives": 100,
79
+ "output_hidden_size": 1280,
80
+ "pad_token_id": 31,
81
+ "proj_codevector_dim": 1024,
82
+ "tdnn_dilation": [
83
+ 1,
84
+ 2,
85
+ 3,
86
+ 1,
87
+ 1
88
+ ],
89
+ "tdnn_dim": [
90
+ 512,
91
+ 512,
92
+ 512,
93
+ 512,
94
+ 1500
95
+ ],
96
+ "tdnn_kernel": [
97
+ 5,
98
+ 3,
99
+ 3,
100
+ 1,
101
+ 1
102
+ ],
103
+ "torch_dtype": "float32",
104
+ "transformers_version": "4.38.1",
105
+ "use_weighted_layer_sum": false,
106
+ "vocab_size": 34,
107
+ "xvector_output_dim": 512
108
+ }
checkpoint-3520/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d969ffe7aeb0eb33e29e35db9980fb3c34afe60b2086fc3ff576414b17995447
3
+ size 3850265216
checkpoint-3520/optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:434acdd9b68220c1244cfbb803203f7ec87c9527884bc69987211f221eb2aa88
3
+ size 7667307858
checkpoint-3520/preprocessor_config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "do_normalize": true,
3
+ "feature_extractor_type": "Wav2Vec2FeatureExtractor",
4
+ "feature_size": 1,
5
+ "padding_side": "right",
6
+ "padding_value": 0,
7
+ "processor_class": "Wav2Vec2ProcessorWithLM",
8
+ "return_attention_mask": true,
9
+ "sampling_rate": 16000
10
+ }
checkpoint-3520/rng_state.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dd2776945603a707a6ead853a7b423e09f37868e5fd1c2731fdcdc87a077b2a6
3
+ size 14244
checkpoint-3520/scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:200e3f437fab06a8765238dc21bf3762303680d2e7fd5806cd6c64b26d8034a7
3
+ size 1064
checkpoint-3520/trainer_state.json ADDED
@@ -0,0 +1,1381 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_metric": 0.14607747395833334,
3
+ "best_model_checkpoint": "/cluster/home/torstefl/Master/saved_model/W2V/single/NB/NB-1b-19.06/checkpoint-3432",
4
+ "epoch": 40.0,
5
+ "eval_steps": 500,
6
+ "global_step": 3520,
7
+ "is_hyper_param_search": false,
8
+ "is_local_process_zero": true,
9
+ "is_world_process_zero": true,
10
+ "log_history": [
11
+ {
12
+ "epoch": 1.0,
13
+ "grad_norm": 10.972672462463379,
14
+ "learning_rate": 1.1600000000000001e-06,
15
+ "loss": 0.8305,
16
+ "step": 88
17
+ },
18
+ {
19
+ "epoch": 1.0,
20
+ "eval_rundkast_loss": 0.506217360496521,
21
+ "eval_rundkast_runtime": 31.8232,
22
+ "eval_rundkast_samples_per_second": 42.076,
23
+ "eval_rundkast_steps_per_second": 1.32,
24
+ "eval_rundkast_wer": 0.20532714765637497,
25
+ "step": 88
26
+ },
27
+ {
28
+ "epoch": 1.0,
29
+ "eval_nb_samtale_loss": 0.7159814238548279,
30
+ "eval_nb_samtale_runtime": 38.8295,
31
+ "eval_nb_samtale_samples_per_second": 13.727,
32
+ "eval_nb_samtale_steps_per_second": 0.438,
33
+ "eval_nb_samtale_wer": 0.20125325520833334,
34
+ "step": 88
35
+ },
36
+ {
37
+ "epoch": 1.0,
38
+ "eval_bigbrother_loss": 2.73382568359375,
39
+ "eval_bigbrother_runtime": 45.0269,
40
+ "eval_bigbrother_samples_per_second": 30.426,
41
+ "eval_bigbrother_steps_per_second": 0.955,
42
+ "eval_bigbrother_wer": 0.5745589301538709,
43
+ "step": 88
44
+ },
45
+ {
46
+ "epoch": 2.0,
47
+ "grad_norm": 6.6329345703125,
48
+ "learning_rate": 2.3333333333333336e-06,
49
+ "loss": 0.6423,
50
+ "step": 176
51
+ },
52
+ {
53
+ "epoch": 2.0,
54
+ "eval_rundkast_loss": 0.42037609219551086,
55
+ "eval_rundkast_runtime": 31.233,
56
+ "eval_rundkast_samples_per_second": 42.871,
57
+ "eval_rundkast_steps_per_second": 1.345,
58
+ "eval_rundkast_wer": 0.2020476723724204,
59
+ "step": 176
60
+ },
61
+ {
62
+ "epoch": 2.0,
63
+ "eval_nb_samtale_loss": 0.5762496590614319,
64
+ "eval_nb_samtale_runtime": 38.6529,
65
+ "eval_nb_samtale_samples_per_second": 13.789,
66
+ "eval_nb_samtale_steps_per_second": 0.44,
67
+ "eval_nb_samtale_wer": 0.19417317708333334,
68
+ "step": 176
69
+ },
70
+ {
71
+ "epoch": 2.0,
72
+ "eval_bigbrother_loss": 2.2738213539123535,
73
+ "eval_bigbrother_runtime": 44.7689,
74
+ "eval_bigbrother_samples_per_second": 30.602,
75
+ "eval_bigbrother_steps_per_second": 0.96,
76
+ "eval_bigbrother_wer": 0.5613469749456216,
77
+ "step": 176
78
+ },
79
+ {
80
+ "epoch": 3.0,
81
+ "grad_norm": 4.364439964294434,
82
+ "learning_rate": 3.5066666666666673e-06,
83
+ "loss": 0.5857,
84
+ "step": 264
85
+ },
86
+ {
87
+ "epoch": 3.0,
88
+ "eval_rundkast_loss": 0.41098639369010925,
89
+ "eval_rundkast_runtime": 31.3258,
90
+ "eval_rundkast_samples_per_second": 42.744,
91
+ "eval_rundkast_steps_per_second": 1.341,
92
+ "eval_rundkast_wer": 0.1977283634618461,
93
+ "step": 264
94
+ },
95
+ {
96
+ "epoch": 3.0,
97
+ "eval_nb_samtale_loss": 0.5280192494392395,
98
+ "eval_nb_samtale_runtime": 38.586,
99
+ "eval_nb_samtale_samples_per_second": 13.813,
100
+ "eval_nb_samtale_steps_per_second": 0.441,
101
+ "eval_nb_samtale_wer": 0.18359375,
102
+ "step": 264
103
+ },
104
+ {
105
+ "epoch": 3.0,
106
+ "eval_bigbrother_loss": 2.192491054534912,
107
+ "eval_bigbrother_runtime": 44.8015,
108
+ "eval_bigbrother_samples_per_second": 30.579,
109
+ "eval_bigbrother_steps_per_second": 0.96,
110
+ "eval_bigbrother_wer": 0.5548215580439861,
111
+ "step": 264
112
+ },
113
+ {
114
+ "epoch": 4.0,
115
+ "grad_norm": 5.418688774108887,
116
+ "learning_rate": 4.680000000000001e-06,
117
+ "loss": 0.5332,
118
+ "step": 352
119
+ },
120
+ {
121
+ "epoch": 4.0,
122
+ "eval_rundkast_loss": 0.3839390277862549,
123
+ "eval_rundkast_runtime": 31.2002,
124
+ "eval_rundkast_samples_per_second": 42.916,
125
+ "eval_rundkast_steps_per_second": 1.346,
126
+ "eval_rundkast_wer": 0.19900815869460886,
127
+ "step": 352
128
+ },
129
+ {
130
+ "epoch": 4.0,
131
+ "eval_nb_samtale_loss": 0.48143112659454346,
132
+ "eval_nb_samtale_runtime": 38.3454,
133
+ "eval_nb_samtale_samples_per_second": 13.9,
134
+ "eval_nb_samtale_steps_per_second": 0.443,
135
+ "eval_nb_samtale_wer": 0.179443359375,
136
+ "step": 352
137
+ },
138
+ {
139
+ "epoch": 4.0,
140
+ "eval_bigbrother_loss": 2.0169622898101807,
141
+ "eval_bigbrother_runtime": 44.7675,
142
+ "eval_bigbrother_samples_per_second": 30.603,
143
+ "eval_bigbrother_steps_per_second": 0.961,
144
+ "eval_bigbrother_wer": 0.5486183839523081,
145
+ "step": 352
146
+ },
147
+ {
148
+ "epoch": 5.0,
149
+ "grad_norm": 6.6992034912109375,
150
+ "learning_rate": 5.853333333333335e-06,
151
+ "loss": 0.5117,
152
+ "step": 440
153
+ },
154
+ {
155
+ "epoch": 5.0,
156
+ "eval_rundkast_loss": 0.38616734743118286,
157
+ "eval_rundkast_runtime": 31.1274,
158
+ "eval_rundkast_samples_per_second": 43.017,
159
+ "eval_rundkast_steps_per_second": 1.349,
160
+ "eval_rundkast_wer": 0.1986082226843705,
161
+ "step": 440
162
+ },
163
+ {
164
+ "epoch": 5.0,
165
+ "eval_nb_samtale_loss": 0.46892496943473816,
166
+ "eval_nb_samtale_runtime": 38.4436,
167
+ "eval_nb_samtale_samples_per_second": 13.864,
168
+ "eval_nb_samtale_steps_per_second": 0.442,
169
+ "eval_nb_samtale_wer": 0.17545572916666666,
170
+ "step": 440
171
+ },
172
+ {
173
+ "epoch": 5.0,
174
+ "eval_bigbrother_loss": 1.9676716327667236,
175
+ "eval_bigbrother_runtime": 44.7467,
176
+ "eval_bigbrother_samples_per_second": 30.617,
177
+ "eval_bigbrother_steps_per_second": 0.961,
178
+ "eval_bigbrother_wer": 0.5392733424635463,
179
+ "step": 440
180
+ },
181
+ {
182
+ "epoch": 6.0,
183
+ "grad_norm": 4.8291120529174805,
184
+ "learning_rate": 7.0266666666666674e-06,
185
+ "loss": 0.4809,
186
+ "step": 528
187
+ },
188
+ {
189
+ "epoch": 6.0,
190
+ "eval_rundkast_loss": 0.37009599804878235,
191
+ "eval_rundkast_runtime": 31.2555,
192
+ "eval_rundkast_samples_per_second": 42.84,
193
+ "eval_rundkast_steps_per_second": 1.344,
194
+ "eval_rundkast_wer": 0.19636858102703567,
195
+ "step": 528
196
+ },
197
+ {
198
+ "epoch": 6.0,
199
+ "eval_nb_samtale_loss": 0.4429633915424347,
200
+ "eval_nb_samtale_runtime": 38.3862,
201
+ "eval_nb_samtale_samples_per_second": 13.885,
202
+ "eval_nb_samtale_steps_per_second": 0.443,
203
+ "eval_nb_samtale_wer": 0.17342122395833334,
204
+ "step": 528
205
+ },
206
+ {
207
+ "epoch": 6.0,
208
+ "eval_bigbrother_loss": 1.9097399711608887,
209
+ "eval_bigbrother_runtime": 44.7043,
210
+ "eval_bigbrother_samples_per_second": 30.646,
211
+ "eval_bigbrother_steps_per_second": 0.962,
212
+ "eval_bigbrother_wer": 0.5354869894465479,
213
+ "step": 528
214
+ },
215
+ {
216
+ "epoch": 7.0,
217
+ "grad_norm": 6.0818376541137695,
218
+ "learning_rate": 8.2e-06,
219
+ "loss": 0.4485,
220
+ "step": 616
221
+ },
222
+ {
223
+ "epoch": 7.0,
224
+ "eval_rundkast_loss": 0.39074423909187317,
225
+ "eval_rundkast_runtime": 31.2655,
226
+ "eval_rundkast_samples_per_second": 42.827,
227
+ "eval_rundkast_steps_per_second": 1.343,
228
+ "eval_rundkast_wer": 0.19580867061270196,
229
+ "step": 616
230
+ },
231
+ {
232
+ "epoch": 7.0,
233
+ "eval_nb_samtale_loss": 0.44968363642692566,
234
+ "eval_nb_samtale_runtime": 38.4136,
235
+ "eval_nb_samtale_samples_per_second": 13.875,
236
+ "eval_nb_samtale_steps_per_second": 0.443,
237
+ "eval_nb_samtale_wer": 0.17179361979166666,
238
+ "step": 616
239
+ },
240
+ {
241
+ "epoch": 7.0,
242
+ "eval_bigbrother_loss": 2.002044439315796,
243
+ "eval_bigbrother_runtime": 44.9245,
244
+ "eval_bigbrother_samples_per_second": 30.496,
245
+ "eval_bigbrother_steps_per_second": 0.957,
246
+ "eval_bigbrother_wer": 0.5383871747361637,
247
+ "step": 616
248
+ },
249
+ {
250
+ "epoch": 8.0,
251
+ "grad_norm": 5.505432605743408,
252
+ "learning_rate": 9.373333333333334e-06,
253
+ "loss": 0.4364,
254
+ "step": 704
255
+ },
256
+ {
257
+ "epoch": 8.0,
258
+ "eval_rundkast_loss": 0.38471347093582153,
259
+ "eval_rundkast_runtime": 31.9089,
260
+ "eval_rundkast_samples_per_second": 41.963,
261
+ "eval_rundkast_steps_per_second": 1.316,
262
+ "eval_rundkast_wer": 0.1968485042393217,
263
+ "step": 704
264
+ },
265
+ {
266
+ "epoch": 8.0,
267
+ "eval_nb_samtale_loss": 0.45092540979385376,
268
+ "eval_nb_samtale_runtime": 38.457,
269
+ "eval_nb_samtale_samples_per_second": 13.86,
270
+ "eval_nb_samtale_steps_per_second": 0.442,
271
+ "eval_nb_samtale_wer": 0.16552734375,
272
+ "step": 704
273
+ },
274
+ {
275
+ "epoch": 8.0,
276
+ "eval_bigbrother_loss": 1.967365026473999,
277
+ "eval_bigbrother_runtime": 45.046,
278
+ "eval_bigbrother_samples_per_second": 30.413,
279
+ "eval_bigbrother_steps_per_second": 0.955,
280
+ "eval_bigbrother_wer": 0.5337146539917829,
281
+ "step": 704
282
+ },
283
+ {
284
+ "epoch": 9.0,
285
+ "grad_norm": 12.103546142578125,
286
+ "learning_rate": 1.0546666666666667e-05,
287
+ "loss": 0.4194,
288
+ "step": 792
289
+ },
290
+ {
291
+ "epoch": 9.0,
292
+ "eval_rundkast_loss": 0.37225037813186646,
293
+ "eval_rundkast_runtime": 31.1897,
294
+ "eval_rundkast_samples_per_second": 42.931,
295
+ "eval_rundkast_steps_per_second": 1.347,
296
+ "eval_rundkast_wer": 0.1936490161574148,
297
+ "step": 792
298
+ },
299
+ {
300
+ "epoch": 9.0,
301
+ "eval_nb_samtale_loss": 0.42615896463394165,
302
+ "eval_nb_samtale_runtime": 38.2289,
303
+ "eval_nb_samtale_samples_per_second": 13.942,
304
+ "eval_nb_samtale_steps_per_second": 0.445,
305
+ "eval_nb_samtale_wer": 0.164306640625,
306
+ "step": 792
307
+ },
308
+ {
309
+ "epoch": 9.0,
310
+ "eval_bigbrother_loss": 1.930216908454895,
311
+ "eval_bigbrother_runtime": 44.8984,
312
+ "eval_bigbrother_samples_per_second": 30.513,
313
+ "eval_bigbrother_steps_per_second": 0.958,
314
+ "eval_bigbrother_wer": 0.5332312897768469,
315
+ "step": 792
316
+ },
317
+ {
318
+ "epoch": 10.0,
319
+ "grad_norm": 8.539125442504883,
320
+ "learning_rate": 1.172e-05,
321
+ "loss": 0.4004,
322
+ "step": 880
323
+ },
324
+ {
325
+ "epoch": 10.0,
326
+ "eval_rundkast_loss": 0.3706592917442322,
327
+ "eval_rundkast_runtime": 31.2141,
328
+ "eval_rundkast_samples_per_second": 42.897,
329
+ "eval_rundkast_steps_per_second": 1.346,
330
+ "eval_rundkast_wer": 0.1950887857942729,
331
+ "step": 880
332
+ },
333
+ {
334
+ "epoch": 10.0,
335
+ "eval_nb_samtale_loss": 0.4074735939502716,
336
+ "eval_nb_samtale_runtime": 38.275,
337
+ "eval_nb_samtale_samples_per_second": 13.926,
338
+ "eval_nb_samtale_steps_per_second": 0.444,
339
+ "eval_nb_samtale_wer": 0.16300455729166666,
340
+ "step": 880
341
+ },
342
+ {
343
+ "epoch": 10.0,
344
+ "eval_bigbrother_loss": 1.9817578792572021,
345
+ "eval_bigbrother_runtime": 44.6517,
346
+ "eval_bigbrother_samples_per_second": 30.682,
347
+ "eval_bigbrother_steps_per_second": 0.963,
348
+ "eval_bigbrother_wer": 0.5371787641988238,
349
+ "step": 880
350
+ },
351
+ {
352
+ "epoch": 11.0,
353
+ "grad_norm": 4.635890007019043,
354
+ "learning_rate": 1.2893333333333336e-05,
355
+ "loss": 0.3842,
356
+ "step": 968
357
+ },
358
+ {
359
+ "epoch": 11.0,
360
+ "eval_rundkast_loss": 0.39748790860176086,
361
+ "eval_rundkast_runtime": 31.2552,
362
+ "eval_rundkast_samples_per_second": 42.841,
363
+ "eval_rundkast_steps_per_second": 1.344,
364
+ "eval_rundkast_wer": 0.19588865781474965,
365
+ "step": 968
366
+ },
367
+ {
368
+ "epoch": 11.0,
369
+ "eval_nb_samtale_loss": 0.4311392307281494,
370
+ "eval_nb_samtale_runtime": 38.4527,
371
+ "eval_nb_samtale_samples_per_second": 13.861,
372
+ "eval_nb_samtale_steps_per_second": 0.442,
373
+ "eval_nb_samtale_wer": 0.164794921875,
374
+ "step": 968
375
+ },
376
+ {
377
+ "epoch": 11.0,
378
+ "eval_bigbrother_loss": 1.9875516891479492,
379
+ "eval_bigbrother_runtime": 44.694,
380
+ "eval_bigbrother_samples_per_second": 30.653,
381
+ "eval_bigbrother_steps_per_second": 0.962,
382
+ "eval_bigbrother_wer": 0.5363731571739305,
383
+ "step": 968
384
+ },
385
+ {
386
+ "epoch": 12.0,
387
+ "grad_norm": 7.757190704345703,
388
+ "learning_rate": 1.4053333333333335e-05,
389
+ "loss": 0.3739,
390
+ "step": 1056
391
+ },
392
+ {
393
+ "epoch": 12.0,
394
+ "eval_rundkast_loss": 0.41941559314727783,
395
+ "eval_rundkast_runtime": 31.7689,
396
+ "eval_rundkast_samples_per_second": 42.148,
397
+ "eval_rundkast_steps_per_second": 1.322,
398
+ "eval_rundkast_wer": 0.191649336106223,
399
+ "step": 1056
400
+ },
401
+ {
402
+ "epoch": 12.0,
403
+ "eval_nb_samtale_loss": 0.4516970217227936,
404
+ "eval_nb_samtale_runtime": 38.6901,
405
+ "eval_nb_samtale_samples_per_second": 13.776,
406
+ "eval_nb_samtale_steps_per_second": 0.439,
407
+ "eval_nb_samtale_wer": 0.15885416666666666,
408
+ "step": 1056
409
+ },
410
+ {
411
+ "epoch": 12.0,
412
+ "eval_bigbrother_loss": 2.081986904144287,
413
+ "eval_bigbrother_runtime": 44.6453,
414
+ "eval_bigbrother_samples_per_second": 30.686,
415
+ "eval_bigbrother_steps_per_second": 0.963,
416
+ "eval_bigbrother_wer": 0.5287198904374446,
417
+ "step": 1056
418
+ },
419
+ {
420
+ "epoch": 13.0,
421
+ "grad_norm": 12.277324676513672,
422
+ "learning_rate": 1.5226666666666668e-05,
423
+ "loss": 0.3641,
424
+ "step": 1144
425
+ },
426
+ {
427
+ "epoch": 13.0,
428
+ "eval_rundkast_loss": 0.4138753414154053,
429
+ "eval_rundkast_runtime": 31.1718,
430
+ "eval_rundkast_samples_per_second": 42.956,
431
+ "eval_rundkast_steps_per_second": 1.347,
432
+ "eval_rundkast_wer": 0.19524876019836826,
433
+ "step": 1144
434
+ },
435
+ {
436
+ "epoch": 13.0,
437
+ "eval_nb_samtale_loss": 0.42660218477249146,
438
+ "eval_nb_samtale_runtime": 38.7065,
439
+ "eval_nb_samtale_samples_per_second": 13.77,
440
+ "eval_nb_samtale_steps_per_second": 0.439,
441
+ "eval_nb_samtale_wer": 0.16324869791666666,
442
+ "step": 1144
443
+ },
444
+ {
445
+ "epoch": 13.0,
446
+ "eval_bigbrother_loss": 2.037122964859009,
447
+ "eval_bigbrother_runtime": 45.1132,
448
+ "eval_bigbrother_samples_per_second": 30.368,
449
+ "eval_bigbrother_steps_per_second": 0.953,
450
+ "eval_bigbrother_wer": 0.5277531620075727,
451
+ "step": 1144
452
+ },
453
+ {
454
+ "epoch": 14.0,
455
+ "grad_norm": 6.313786506652832,
456
+ "learning_rate": 1.64e-05,
457
+ "loss": 0.3745,
458
+ "step": 1232
459
+ },
460
+ {
461
+ "epoch": 14.0,
462
+ "eval_rundkast_loss": 0.40043848752975464,
463
+ "eval_rundkast_runtime": 31.4205,
464
+ "eval_rundkast_samples_per_second": 42.616,
465
+ "eval_rundkast_steps_per_second": 1.337,
466
+ "eval_rundkast_wer": 0.196048632218845,
467
+ "step": 1232
468
+ },
469
+ {
470
+ "epoch": 14.0,
471
+ "eval_nb_samtale_loss": 0.43297895789146423,
472
+ "eval_nb_samtale_runtime": 38.2868,
473
+ "eval_nb_samtale_samples_per_second": 13.921,
474
+ "eval_nb_samtale_steps_per_second": 0.444,
475
+ "eval_nb_samtale_wer": 0.15926106770833334,
476
+ "step": 1232
477
+ },
478
+ {
479
+ "epoch": 14.0,
480
+ "eval_bigbrother_loss": 2.02278995513916,
481
+ "eval_bigbrother_runtime": 44.7867,
482
+ "eval_bigbrother_samples_per_second": 30.589,
483
+ "eval_bigbrother_steps_per_second": 0.96,
484
+ "eval_bigbrother_wer": 0.5295254974623379,
485
+ "step": 1232
486
+ },
487
+ {
488
+ "epoch": 15.0,
489
+ "grad_norm": 8.265440940856934,
490
+ "learning_rate": 1.756e-05,
491
+ "loss": 0.3399,
492
+ "step": 1320
493
+ },
494
+ {
495
+ "epoch": 15.0,
496
+ "eval_rundkast_loss": 0.4764655530452728,
497
+ "eval_rundkast_runtime": 31.1836,
498
+ "eval_rundkast_samples_per_second": 42.939,
499
+ "eval_rundkast_steps_per_second": 1.347,
500
+ "eval_rundkast_wer": 0.196528555431131,
501
+ "step": 1320
502
+ },
503
+ {
504
+ "epoch": 15.0,
505
+ "eval_nb_samtale_loss": 0.4471026062965393,
506
+ "eval_nb_samtale_runtime": 38.1832,
507
+ "eval_nb_samtale_samples_per_second": 13.959,
508
+ "eval_nb_samtale_steps_per_second": 0.445,
509
+ "eval_nb_samtale_wer": 0.15681966145833334,
510
+ "step": 1320
511
+ },
512
+ {
513
+ "epoch": 15.0,
514
+ "eval_bigbrother_loss": 2.3220856189727783,
515
+ "eval_bigbrother_runtime": 44.7848,
516
+ "eval_bigbrother_samples_per_second": 30.591,
517
+ "eval_bigbrother_steps_per_second": 0.96,
518
+ "eval_bigbrother_wer": 0.5332312897768469,
519
+ "step": 1320
520
+ },
521
+ {
522
+ "epoch": 16.0,
523
+ "grad_norm": 7.783019542694092,
524
+ "learning_rate": 1.8733333333333336e-05,
525
+ "loss": 0.3234,
526
+ "step": 1408
527
+ },
528
+ {
529
+ "epoch": 16.0,
530
+ "eval_rundkast_loss": 0.46145308017730713,
531
+ "eval_rundkast_runtime": 31.6896,
532
+ "eval_rundkast_samples_per_second": 42.254,
533
+ "eval_rundkast_steps_per_second": 1.325,
534
+ "eval_rundkast_wer": 0.19580867061270196,
535
+ "step": 1408
536
+ },
537
+ {
538
+ "epoch": 16.0,
539
+ "eval_nb_samtale_loss": 0.42990735173225403,
540
+ "eval_nb_samtale_runtime": 38.25,
541
+ "eval_nb_samtale_samples_per_second": 13.935,
542
+ "eval_nb_samtale_steps_per_second": 0.444,
543
+ "eval_nb_samtale_wer": 0.156494140625,
544
+ "step": 1408
545
+ },
546
+ {
547
+ "epoch": 16.0,
548
+ "eval_bigbrother_loss": 2.225994110107422,
549
+ "eval_bigbrother_runtime": 44.8274,
550
+ "eval_bigbrother_samples_per_second": 30.562,
551
+ "eval_bigbrother_steps_per_second": 0.959,
552
+ "eval_bigbrother_wer": 0.5260613872552968,
553
+ "step": 1408
554
+ },
555
+ {
556
+ "epoch": 17.0,
557
+ "grad_norm": 4.970972537994385,
558
+ "learning_rate": 1.9906666666666667e-05,
559
+ "loss": 0.3201,
560
+ "step": 1496
561
+ },
562
+ {
563
+ "epoch": 17.0,
564
+ "eval_rundkast_loss": 0.4546896815299988,
565
+ "eval_rundkast_runtime": 31.3176,
566
+ "eval_rundkast_samples_per_second": 42.756,
567
+ "eval_rundkast_steps_per_second": 1.341,
568
+ "eval_rundkast_wer": 0.19300911854103345,
569
+ "step": 1496
570
+ },
571
+ {
572
+ "epoch": 17.0,
573
+ "eval_nb_samtale_loss": 0.4340006113052368,
574
+ "eval_nb_samtale_runtime": 38.2709,
575
+ "eval_nb_samtale_samples_per_second": 13.927,
576
+ "eval_nb_samtale_steps_per_second": 0.444,
577
+ "eval_nb_samtale_wer": 0.15616861979166666,
578
+ "step": 1496
579
+ },
580
+ {
581
+ "epoch": 17.0,
582
+ "eval_bigbrother_loss": 2.141953945159912,
583
+ "eval_bigbrother_runtime": 44.7194,
584
+ "eval_bigbrother_samples_per_second": 30.635,
585
+ "eval_bigbrother_steps_per_second": 0.962,
586
+ "eval_bigbrother_wer": 0.5173608313864497,
587
+ "step": 1496
588
+ },
589
+ {
590
+ "epoch": 18.0,
591
+ "grad_norm": 4.4445576667785645,
592
+ "learning_rate": 1.91980198019802e-05,
593
+ "loss": 0.3069,
594
+ "step": 1584
595
+ },
596
+ {
597
+ "epoch": 18.0,
598
+ "eval_rundkast_loss": 0.47925588488578796,
599
+ "eval_rundkast_runtime": 31.2291,
600
+ "eval_rundkast_samples_per_second": 42.877,
601
+ "eval_rundkast_steps_per_second": 1.345,
602
+ "eval_rundkast_wer": 0.19676851703727405,
603
+ "step": 1584
604
+ },
605
+ {
606
+ "epoch": 18.0,
607
+ "eval_nb_samtale_loss": 0.4501398503780365,
608
+ "eval_nb_samtale_runtime": 38.2766,
609
+ "eval_nb_samtale_samples_per_second": 13.925,
610
+ "eval_nb_samtale_steps_per_second": 0.444,
611
+ "eval_nb_samtale_wer": 0.158203125,
612
+ "step": 1584
613
+ },
614
+ {
615
+ "epoch": 18.0,
616
+ "eval_bigbrother_loss": 2.33626651763916,
617
+ "eval_bigbrother_runtime": 44.7536,
618
+ "eval_bigbrother_samples_per_second": 30.612,
619
+ "eval_bigbrother_steps_per_second": 0.961,
620
+ "eval_bigbrother_wer": 0.5310561508096351,
621
+ "step": 1584
622
+ },
623
+ {
624
+ "epoch": 19.0,
625
+ "grad_norm": 4.55182409286499,
626
+ "learning_rate": 1.832673267326733e-05,
627
+ "loss": 0.2912,
628
+ "step": 1672
629
+ },
630
+ {
631
+ "epoch": 19.0,
632
+ "eval_rundkast_loss": 0.4635383188724518,
633
+ "eval_rundkast_runtime": 31.6064,
634
+ "eval_rundkast_samples_per_second": 42.365,
635
+ "eval_rundkast_steps_per_second": 1.329,
636
+ "eval_rundkast_wer": 0.20028795392737161,
637
+ "step": 1672
638
+ },
639
+ {
640
+ "epoch": 19.0,
641
+ "eval_nb_samtale_loss": 0.4135359227657318,
642
+ "eval_nb_samtale_runtime": 38.4073,
643
+ "eval_nb_samtale_samples_per_second": 13.878,
644
+ "eval_nb_samtale_steps_per_second": 0.443,
645
+ "eval_nb_samtale_wer": 0.156494140625,
646
+ "step": 1672
647
+ },
648
+ {
649
+ "epoch": 19.0,
650
+ "eval_bigbrother_loss": 2.3355190753936768,
651
+ "eval_bigbrother_runtime": 44.9365,
652
+ "eval_bigbrother_samples_per_second": 30.487,
653
+ "eval_bigbrother_steps_per_second": 0.957,
654
+ "eval_bigbrother_wer": 0.5369370820913558,
655
+ "step": 1672
656
+ },
657
+ {
658
+ "epoch": 20.0,
659
+ "grad_norm": 4.11824893951416,
660
+ "learning_rate": 1.7455445544554458e-05,
661
+ "loss": 0.2824,
662
+ "step": 1760
663
+ },
664
+ {
665
+ "epoch": 20.0,
666
+ "eval_rundkast_loss": 0.5276007652282715,
667
+ "eval_rundkast_runtime": 31.3512,
668
+ "eval_rundkast_samples_per_second": 42.71,
669
+ "eval_rundkast_steps_per_second": 1.34,
670
+ "eval_rundkast_wer": 0.196048632218845,
671
+ "step": 1760
672
+ },
673
+ {
674
+ "epoch": 20.0,
675
+ "eval_nb_samtale_loss": 0.47676244378089905,
676
+ "eval_nb_samtale_runtime": 38.3319,
677
+ "eval_nb_samtale_samples_per_second": 13.905,
678
+ "eval_nb_samtale_steps_per_second": 0.443,
679
+ "eval_nb_samtale_wer": 0.15804036458333334,
680
+ "step": 1760
681
+ },
682
+ {
683
+ "epoch": 20.0,
684
+ "eval_bigbrother_loss": 2.57776141166687,
685
+ "eval_bigbrother_runtime": 44.7907,
686
+ "eval_bigbrother_samples_per_second": 30.587,
687
+ "eval_bigbrother_steps_per_second": 0.96,
688
+ "eval_bigbrother_wer": 0.5436236203979699,
689
+ "step": 1760
690
+ },
691
+ {
692
+ "epoch": 21.0,
693
+ "grad_norm": 4.462925434112549,
694
+ "learning_rate": 1.6584158415841584e-05,
695
+ "loss": 0.2661,
696
+ "step": 1848
697
+ },
698
+ {
699
+ "epoch": 21.0,
700
+ "eval_rundkast_loss": 0.5087462067604065,
701
+ "eval_rundkast_runtime": 31.2914,
702
+ "eval_rundkast_samples_per_second": 42.791,
703
+ "eval_rundkast_steps_per_second": 1.342,
704
+ "eval_rundkast_wer": 0.19732842745160775,
705
+ "step": 1848
706
+ },
707
+ {
708
+ "epoch": 21.0,
709
+ "eval_nb_samtale_loss": 0.44534921646118164,
710
+ "eval_nb_samtale_runtime": 38.226,
711
+ "eval_nb_samtale_samples_per_second": 13.943,
712
+ "eval_nb_samtale_steps_per_second": 0.445,
713
+ "eval_nb_samtale_wer": 0.15413411458333334,
714
+ "step": 1848
715
+ },
716
+ {
717
+ "epoch": 21.0,
718
+ "eval_bigbrother_loss": 2.2979142665863037,
719
+ "eval_bigbrother_runtime": 44.8351,
720
+ "eval_bigbrother_samples_per_second": 30.556,
721
+ "eval_bigbrother_steps_per_second": 0.959,
722
+ "eval_bigbrother_wer": 0.5297671795698059,
723
+ "step": 1848
724
+ },
725
+ {
726
+ "epoch": 22.0,
727
+ "grad_norm": 3.7362682819366455,
728
+ "learning_rate": 1.5712871287128716e-05,
729
+ "loss": 0.2543,
730
+ "step": 1936
731
+ },
732
+ {
733
+ "epoch": 22.0,
734
+ "eval_rundkast_loss": 0.5440049171447754,
735
+ "eval_rundkast_runtime": 31.2654,
736
+ "eval_rundkast_samples_per_second": 42.827,
737
+ "eval_rundkast_steps_per_second": 1.343,
738
+ "eval_rundkast_wer": 0.19756838905775076,
739
+ "step": 1936
740
+ },
741
+ {
742
+ "epoch": 22.0,
743
+ "eval_nb_samtale_loss": 0.486325204372406,
744
+ "eval_nb_samtale_runtime": 38.3702,
745
+ "eval_nb_samtale_samples_per_second": 13.891,
746
+ "eval_nb_samtale_steps_per_second": 0.443,
747
+ "eval_nb_samtale_wer": 0.15763346354166666,
748
+ "step": 1936
749
+ },
750
+ {
751
+ "epoch": 22.0,
752
+ "eval_bigbrother_loss": 2.4817354679107666,
753
+ "eval_bigbrother_runtime": 44.7425,
754
+ "eval_bigbrother_samples_per_second": 30.62,
755
+ "eval_bigbrother_steps_per_second": 0.961,
756
+ "eval_bigbrother_wer": 0.5300088616772738,
757
+ "step": 1936
758
+ },
759
+ {
760
+ "epoch": 23.0,
761
+ "grad_norm": 3.388705015182495,
762
+ "learning_rate": 1.4841584158415842e-05,
763
+ "loss": 0.2522,
764
+ "step": 2024
765
+ },
766
+ {
767
+ "epoch": 23.0,
768
+ "eval_rundkast_loss": 0.5401936173439026,
769
+ "eval_rundkast_runtime": 31.3567,
770
+ "eval_rundkast_samples_per_second": 42.702,
771
+ "eval_rundkast_steps_per_second": 1.339,
772
+ "eval_rundkast_wer": 0.19380899056151016,
773
+ "step": 2024
774
+ },
775
+ {
776
+ "epoch": 23.0,
777
+ "eval_nb_samtale_loss": 0.45578888058662415,
778
+ "eval_nb_samtale_runtime": 38.2879,
779
+ "eval_nb_samtale_samples_per_second": 13.921,
780
+ "eval_nb_samtale_steps_per_second": 0.444,
781
+ "eval_nb_samtale_wer": 0.15315755208333334,
782
+ "step": 2024
783
+ },
784
+ {
785
+ "epoch": 23.0,
786
+ "eval_bigbrother_loss": 2.4559385776519775,
787
+ "eval_bigbrother_runtime": 44.8922,
788
+ "eval_bigbrother_samples_per_second": 30.518,
789
+ "eval_bigbrother_steps_per_second": 0.958,
790
+ "eval_bigbrother_wer": 0.5254169016353822,
791
+ "step": 2024
792
+ },
793
+ {
794
+ "epoch": 24.0,
795
+ "grad_norm": 4.662117004394531,
796
+ "learning_rate": 1.3970297029702971e-05,
797
+ "loss": 0.2544,
798
+ "step": 2112
799
+ },
800
+ {
801
+ "epoch": 24.0,
802
+ "eval_rundkast_loss": 0.5467772483825684,
803
+ "eval_rundkast_runtime": 31.3965,
804
+ "eval_rundkast_samples_per_second": 42.648,
805
+ "eval_rundkast_steps_per_second": 1.338,
806
+ "eval_rundkast_wer": 0.1951687729963206,
807
+ "step": 2112
808
+ },
809
+ {
810
+ "epoch": 24.0,
811
+ "eval_nb_samtale_loss": 0.45863205194473267,
812
+ "eval_nb_samtale_runtime": 38.2548,
813
+ "eval_nb_samtale_samples_per_second": 13.933,
814
+ "eval_nb_samtale_steps_per_second": 0.444,
815
+ "eval_nb_samtale_wer": 0.151123046875,
816
+ "step": 2112
817
+ },
818
+ {
819
+ "epoch": 24.0,
820
+ "eval_bigbrother_loss": 2.302259922027588,
821
+ "eval_bigbrother_runtime": 44.9057,
822
+ "eval_bigbrother_samples_per_second": 30.508,
823
+ "eval_bigbrother_steps_per_second": 0.958,
824
+ "eval_bigbrother_wer": 0.5301699830822525,
825
+ "step": 2112
826
+ },
827
+ {
828
+ "epoch": 25.0,
829
+ "grad_norm": 2.7924931049346924,
830
+ "learning_rate": 1.30990099009901e-05,
831
+ "loss": 0.2445,
832
+ "step": 2200
833
+ },
834
+ {
835
+ "epoch": 25.0,
836
+ "eval_rundkast_loss": 0.5389044880867004,
837
+ "eval_rundkast_runtime": 31.3233,
838
+ "eval_rundkast_samples_per_second": 42.748,
839
+ "eval_rundkast_steps_per_second": 1.341,
840
+ "eval_rundkast_wer": 0.19500879859222525,
841
+ "step": 2200
842
+ },
843
+ {
844
+ "epoch": 25.0,
845
+ "eval_nb_samtale_loss": 0.4871143698692322,
846
+ "eval_nb_samtale_runtime": 38.3162,
847
+ "eval_nb_samtale_samples_per_second": 13.911,
848
+ "eval_nb_samtale_steps_per_second": 0.444,
849
+ "eval_nb_samtale_wer": 0.15299479166666666,
850
+ "step": 2200
851
+ },
852
+ {
853
+ "epoch": 25.0,
854
+ "eval_bigbrother_loss": 2.405670166015625,
855
+ "eval_bigbrother_runtime": 44.8162,
856
+ "eval_bigbrother_samples_per_second": 30.569,
857
+ "eval_bigbrother_steps_per_second": 0.959,
858
+ "eval_bigbrother_wer": 0.5264641907677435,
859
+ "step": 2200
860
+ },
861
+ {
862
+ "epoch": 26.0,
863
+ "grad_norm": 5.481632232666016,
864
+ "learning_rate": 1.2227722772277228e-05,
865
+ "loss": 0.2255,
866
+ "step": 2288
867
+ },
868
+ {
869
+ "epoch": 26.0,
870
+ "eval_rundkast_loss": 0.5802582502365112,
871
+ "eval_rundkast_runtime": 31.3722,
872
+ "eval_rundkast_samples_per_second": 42.681,
873
+ "eval_rundkast_steps_per_second": 1.339,
874
+ "eval_rundkast_wer": 0.19820828667413215,
875
+ "step": 2288
876
+ },
877
+ {
878
+ "epoch": 26.0,
879
+ "eval_nb_samtale_loss": 0.5078674554824829,
880
+ "eval_nb_samtale_runtime": 38.1701,
881
+ "eval_nb_samtale_samples_per_second": 13.964,
882
+ "eval_nb_samtale_steps_per_second": 0.445,
883
+ "eval_nb_samtale_wer": 0.1533203125,
884
+ "step": 2288
885
+ },
886
+ {
887
+ "epoch": 26.0,
888
+ "eval_bigbrother_loss": 2.5950069427490234,
889
+ "eval_bigbrother_runtime": 45.0629,
890
+ "eval_bigbrother_samples_per_second": 30.402,
891
+ "eval_bigbrother_steps_per_second": 0.954,
892
+ "eval_bigbrother_wer": 0.5358897929589946,
893
+ "step": 2288
894
+ },
895
+ {
896
+ "epoch": 27.0,
897
+ "grad_norm": 6.247470855712891,
898
+ "learning_rate": 1.1356435643564357e-05,
899
+ "loss": 0.2317,
900
+ "step": 2376
901
+ },
902
+ {
903
+ "epoch": 27.0,
904
+ "eval_rundkast_loss": 0.5534220337867737,
905
+ "eval_rundkast_runtime": 31.8039,
906
+ "eval_rundkast_samples_per_second": 42.102,
907
+ "eval_rundkast_steps_per_second": 1.321,
908
+ "eval_rundkast_wer": 0.19908814589665655,
909
+ "step": 2376
910
+ },
911
+ {
912
+ "epoch": 27.0,
913
+ "eval_nb_samtale_loss": 0.46262821555137634,
914
+ "eval_nb_samtale_runtime": 38.5085,
915
+ "eval_nb_samtale_samples_per_second": 13.841,
916
+ "eval_nb_samtale_steps_per_second": 0.441,
917
+ "eval_nb_samtale_wer": 0.15234375,
918
+ "step": 2376
919
+ },
920
+ {
921
+ "epoch": 27.0,
922
+ "eval_bigbrother_loss": 2.396604537963867,
923
+ "eval_bigbrother_runtime": 45.0988,
924
+ "eval_bigbrother_samples_per_second": 30.378,
925
+ "eval_bigbrother_steps_per_second": 0.953,
926
+ "eval_bigbrother_wer": 0.5242084910980424,
927
+ "step": 2376
928
+ },
929
+ {
930
+ "epoch": 28.0,
931
+ "grad_norm": 5.388619899749756,
932
+ "learning_rate": 1.0485148514851486e-05,
933
+ "loss": 0.2157,
934
+ "step": 2464
935
+ },
936
+ {
937
+ "epoch": 28.0,
938
+ "eval_rundkast_loss": 0.565626859664917,
939
+ "eval_rundkast_runtime": 31.3606,
940
+ "eval_rundkast_samples_per_second": 42.697,
941
+ "eval_rundkast_steps_per_second": 1.339,
942
+ "eval_rundkast_wer": 0.1988481842905135,
943
+ "step": 2464
944
+ },
945
+ {
946
+ "epoch": 28.0,
947
+ "eval_nb_samtale_loss": 0.4518119990825653,
948
+ "eval_nb_samtale_runtime": 38.2392,
949
+ "eval_nb_samtale_samples_per_second": 13.939,
950
+ "eval_nb_samtale_steps_per_second": 0.445,
951
+ "eval_nb_samtale_wer": 0.15185546875,
952
+ "step": 2464
953
+ },
954
+ {
955
+ "epoch": 28.0,
956
+ "eval_bigbrother_loss": 2.401461601257324,
957
+ "eval_bigbrother_runtime": 44.8671,
958
+ "eval_bigbrother_samples_per_second": 30.535,
959
+ "eval_bigbrother_steps_per_second": 0.958,
960
+ "eval_bigbrother_wer": 0.5246918553129784,
961
+ "step": 2464
962
+ },
963
+ {
964
+ "epoch": 29.0,
965
+ "grad_norm": 6.6439738273620605,
966
+ "learning_rate": 9.613861386138615e-06,
967
+ "loss": 0.22,
968
+ "step": 2552
969
+ },
970
+ {
971
+ "epoch": 29.0,
972
+ "eval_rundkast_loss": 0.5526180267333984,
973
+ "eval_rundkast_runtime": 31.4809,
974
+ "eval_rundkast_samples_per_second": 42.534,
975
+ "eval_rundkast_steps_per_second": 1.334,
976
+ "eval_rundkast_wer": 0.19996800511918092,
977
+ "step": 2552
978
+ },
979
+ {
980
+ "epoch": 29.0,
981
+ "eval_nb_samtale_loss": 0.4727042317390442,
982
+ "eval_nb_samtale_runtime": 38.2477,
983
+ "eval_nb_samtale_samples_per_second": 13.935,
984
+ "eval_nb_samtale_steps_per_second": 0.444,
985
+ "eval_nb_samtale_wer": 0.15364583333333334,
986
+ "step": 2552
987
+ },
988
+ {
989
+ "epoch": 29.0,
990
+ "eval_bigbrother_loss": 2.432448387145996,
991
+ "eval_bigbrother_runtime": 45.1692,
992
+ "eval_bigbrother_samples_per_second": 30.33,
993
+ "eval_bigbrother_steps_per_second": 0.952,
994
+ "eval_bigbrother_wer": 0.5283976476274873,
995
+ "step": 2552
996
+ },
997
+ {
998
+ "epoch": 30.0,
999
+ "grad_norm": 6.641559600830078,
1000
+ "learning_rate": 8.742574257425743e-06,
1001
+ "loss": 0.2142,
1002
+ "step": 2640
1003
+ },
1004
+ {
1005
+ "epoch": 30.0,
1006
+ "eval_rundkast_loss": 0.5641056299209595,
1007
+ "eval_rundkast_runtime": 31.6944,
1008
+ "eval_rundkast_samples_per_second": 42.247,
1009
+ "eval_rundkast_steps_per_second": 1.325,
1010
+ "eval_rundkast_wer": 0.19580867061270196,
1011
+ "step": 2640
1012
+ },
1013
+ {
1014
+ "epoch": 30.0,
1015
+ "eval_nb_samtale_loss": 0.48357564210891724,
1016
+ "eval_nb_samtale_runtime": 38.4852,
1017
+ "eval_nb_samtale_samples_per_second": 13.849,
1018
+ "eval_nb_samtale_steps_per_second": 0.442,
1019
+ "eval_nb_samtale_wer": 0.14640299479166666,
1020
+ "step": 2640
1021
+ },
1022
+ {
1023
+ "epoch": 30.0,
1024
+ "eval_bigbrother_loss": 2.3856699466705322,
1025
+ "eval_bigbrother_runtime": 44.9444,
1026
+ "eval_bigbrother_samples_per_second": 30.482,
1027
+ "eval_bigbrother_steps_per_second": 0.957,
1028
+ "eval_bigbrother_wer": 0.52090550229598,
1029
+ "step": 2640
1030
+ },
1031
+ {
1032
+ "epoch": 31.0,
1033
+ "grad_norm": 5.838632106781006,
1034
+ "learning_rate": 7.871287128712872e-06,
1035
+ "loss": 0.1948,
1036
+ "step": 2728
1037
+ },
1038
+ {
1039
+ "epoch": 31.0,
1040
+ "eval_rundkast_loss": 0.5922185778617859,
1041
+ "eval_rundkast_runtime": 31.5341,
1042
+ "eval_rundkast_samples_per_second": 42.462,
1043
+ "eval_rundkast_steps_per_second": 1.332,
1044
+ "eval_rundkast_wer": 0.19708846584546472,
1045
+ "step": 2728
1046
+ },
1047
+ {
1048
+ "epoch": 31.0,
1049
+ "eval_nb_samtale_loss": 0.5342143774032593,
1050
+ "eval_nb_samtale_runtime": 38.0861,
1051
+ "eval_nb_samtale_samples_per_second": 13.995,
1052
+ "eval_nb_samtale_steps_per_second": 0.446,
1053
+ "eval_nb_samtale_wer": 0.14860026041666666,
1054
+ "step": 2728
1055
+ },
1056
+ {
1057
+ "epoch": 31.0,
1058
+ "eval_bigbrother_loss": 2.5876622200012207,
1059
+ "eval_bigbrother_runtime": 44.9279,
1060
+ "eval_bigbrother_samples_per_second": 30.493,
1061
+ "eval_bigbrother_steps_per_second": 0.957,
1062
+ "eval_bigbrother_wer": 0.5268669942801901,
1063
+ "step": 2728
1064
+ },
1065
+ {
1066
+ "epoch": 32.0,
1067
+ "grad_norm": 7.094357967376709,
1068
+ "learning_rate": 7e-06,
1069
+ "loss": 0.1949,
1070
+ "step": 2816
1071
+ },
1072
+ {
1073
+ "epoch": 32.0,
1074
+ "eval_rundkast_loss": 0.5746641159057617,
1075
+ "eval_rundkast_runtime": 31.5695,
1076
+ "eval_rundkast_samples_per_second": 42.414,
1077
+ "eval_rundkast_steps_per_second": 1.33,
1078
+ "eval_rundkast_wer": 0.19524876019836826,
1079
+ "step": 2816
1080
+ },
1081
+ {
1082
+ "epoch": 32.0,
1083
+ "eval_nb_samtale_loss": 0.5246254801750183,
1084
+ "eval_nb_samtale_runtime": 38.4186,
1085
+ "eval_nb_samtale_samples_per_second": 13.873,
1086
+ "eval_nb_samtale_steps_per_second": 0.442,
1087
+ "eval_nb_samtale_wer": 0.15120442708333334,
1088
+ "step": 2816
1089
+ },
1090
+ {
1091
+ "epoch": 32.0,
1092
+ "eval_bigbrother_loss": 2.5409083366394043,
1093
+ "eval_bigbrother_runtime": 45.1035,
1094
+ "eval_bigbrother_samples_per_second": 30.375,
1095
+ "eval_bigbrother_steps_per_second": 0.953,
1096
+ "eval_bigbrother_wer": 0.5232417626681705,
1097
+ "step": 2816
1098
+ },
1099
+ {
1100
+ "epoch": 33.0,
1101
+ "grad_norm": 3.003549098968506,
1102
+ "learning_rate": 6.128712871287129e-06,
1103
+ "loss": 0.204,
1104
+ "step": 2904
1105
+ },
1106
+ {
1107
+ "epoch": 33.0,
1108
+ "eval_rundkast_loss": 0.5838789939880371,
1109
+ "eval_rundkast_runtime": 31.6764,
1110
+ "eval_rundkast_samples_per_second": 42.271,
1111
+ "eval_rundkast_steps_per_second": 1.326,
1112
+ "eval_rundkast_wer": 0.19460886258198687,
1113
+ "step": 2904
1114
+ },
1115
+ {
1116
+ "epoch": 33.0,
1117
+ "eval_nb_samtale_loss": 0.5083252787590027,
1118
+ "eval_nb_samtale_runtime": 38.236,
1119
+ "eval_nb_samtale_samples_per_second": 13.94,
1120
+ "eval_nb_samtale_steps_per_second": 0.445,
1121
+ "eval_nb_samtale_wer": 0.14876302083333334,
1122
+ "step": 2904
1123
+ },
1124
+ {
1125
+ "epoch": 33.0,
1126
+ "eval_bigbrother_loss": 2.4765820503234863,
1127
+ "eval_bigbrother_runtime": 45.0387,
1128
+ "eval_bigbrother_samples_per_second": 30.418,
1129
+ "eval_bigbrother_steps_per_second": 0.955,
1130
+ "eval_bigbrother_wer": 0.5217111093208733,
1131
+ "step": 2904
1132
+ },
1133
+ {
1134
+ "epoch": 34.0,
1135
+ "grad_norm": 13.520666122436523,
1136
+ "learning_rate": 5.257425742574258e-06,
1137
+ "loss": 0.1946,
1138
+ "step": 2992
1139
+ },
1140
+ {
1141
+ "epoch": 34.0,
1142
+ "eval_rundkast_loss": 0.5890854597091675,
1143
+ "eval_rundkast_runtime": 32.1786,
1144
+ "eval_rundkast_samples_per_second": 41.611,
1145
+ "eval_rundkast_steps_per_second": 1.305,
1146
+ "eval_rundkast_wer": 0.19380899056151016,
1147
+ "step": 2992
1148
+ },
1149
+ {
1150
+ "epoch": 34.0,
1151
+ "eval_nb_samtale_loss": 0.5050138235092163,
1152
+ "eval_nb_samtale_runtime": 38.4424,
1153
+ "eval_nb_samtale_samples_per_second": 13.865,
1154
+ "eval_nb_samtale_steps_per_second": 0.442,
1155
+ "eval_nb_samtale_wer": 0.14876302083333334,
1156
+ "step": 2992
1157
+ },
1158
+ {
1159
+ "epoch": 34.0,
1160
+ "eval_bigbrother_loss": 2.5397651195526123,
1161
+ "eval_bigbrother_runtime": 46.0852,
1162
+ "eval_bigbrother_samples_per_second": 29.728,
1163
+ "eval_bigbrother_steps_per_second": 0.933,
1164
+ "eval_bigbrother_wer": 0.5208249415934907,
1165
+ "step": 2992
1166
+ },
1167
+ {
1168
+ "epoch": 35.0,
1169
+ "grad_norm": 13.422295570373535,
1170
+ "learning_rate": 4.386138613861386e-06,
1171
+ "loss": 0.1953,
1172
+ "step": 3080
1173
+ },
1174
+ {
1175
+ "epoch": 35.0,
1176
+ "eval_rundkast_loss": 0.5813275575637817,
1177
+ "eval_rundkast_runtime": 31.8014,
1178
+ "eval_rundkast_samples_per_second": 42.105,
1179
+ "eval_rundkast_steps_per_second": 1.321,
1180
+ "eval_rundkast_wer": 0.19404895216765317,
1181
+ "step": 3080
1182
+ },
1183
+ {
1184
+ "epoch": 35.0,
1185
+ "eval_nb_samtale_loss": 0.5057322382926941,
1186
+ "eval_nb_samtale_runtime": 38.3631,
1187
+ "eval_nb_samtale_samples_per_second": 13.894,
1188
+ "eval_nb_samtale_steps_per_second": 0.443,
1189
+ "eval_nb_samtale_wer": 0.14925130208333334,
1190
+ "step": 3080
1191
+ },
1192
+ {
1193
+ "epoch": 35.0,
1194
+ "eval_bigbrother_loss": 2.4785802364349365,
1195
+ "eval_bigbrother_runtime": 45.4371,
1196
+ "eval_bigbrother_samples_per_second": 30.152,
1197
+ "eval_bigbrother_steps_per_second": 0.946,
1198
+ "eval_bigbrother_wer": 0.5193748489486828,
1199
+ "step": 3080
1200
+ },
1201
+ {
1202
+ "epoch": 36.0,
1203
+ "grad_norm": 5.827705383300781,
1204
+ "learning_rate": 3.514851485148515e-06,
1205
+ "loss": 0.1873,
1206
+ "step": 3168
1207
+ },
1208
+ {
1209
+ "epoch": 36.0,
1210
+ "eval_rundkast_loss": 0.5837633609771729,
1211
+ "eval_rundkast_runtime": 31.7208,
1212
+ "eval_rundkast_samples_per_second": 42.212,
1213
+ "eval_rundkast_steps_per_second": 1.324,
1214
+ "eval_rundkast_wer": 0.19444888817789155,
1215
+ "step": 3168
1216
+ },
1217
+ {
1218
+ "epoch": 36.0,
1219
+ "eval_nb_samtale_loss": 0.5117051005363464,
1220
+ "eval_nb_samtale_runtime": 38.4155,
1221
+ "eval_nb_samtale_samples_per_second": 13.875,
1222
+ "eval_nb_samtale_steps_per_second": 0.443,
1223
+ "eval_nb_samtale_wer": 0.147216796875,
1224
+ "step": 3168
1225
+ },
1226
+ {
1227
+ "epoch": 36.0,
1228
+ "eval_bigbrother_loss": 2.5473718643188477,
1229
+ "eval_bigbrother_runtime": 45.3311,
1230
+ "eval_bigbrother_samples_per_second": 30.222,
1231
+ "eval_bigbrother_steps_per_second": 0.949,
1232
+ "eval_bigbrother_wer": 0.5215499879158946,
1233
+ "step": 3168
1234
+ },
1235
+ {
1236
+ "epoch": 37.0,
1237
+ "grad_norm": 3.566361427307129,
1238
+ "learning_rate": 2.6435643564356437e-06,
1239
+ "loss": 0.1791,
1240
+ "step": 3256
1241
+ },
1242
+ {
1243
+ "epoch": 37.0,
1244
+ "eval_rundkast_loss": 0.6002511382102966,
1245
+ "eval_rundkast_runtime": 31.7763,
1246
+ "eval_rundkast_samples_per_second": 42.138,
1247
+ "eval_rundkast_steps_per_second": 1.322,
1248
+ "eval_rundkast_wer": 0.19588865781474965,
1249
+ "step": 3256
1250
+ },
1251
+ {
1252
+ "epoch": 37.0,
1253
+ "eval_nb_samtale_loss": 0.5211741328239441,
1254
+ "eval_nb_samtale_runtime": 38.5386,
1255
+ "eval_nb_samtale_samples_per_second": 13.83,
1256
+ "eval_nb_samtale_steps_per_second": 0.441,
1257
+ "eval_nb_samtale_wer": 0.14713541666666666,
1258
+ "step": 3256
1259
+ },
1260
+ {
1261
+ "epoch": 37.0,
1262
+ "eval_bigbrother_loss": 2.629565715789795,
1263
+ "eval_bigbrother_runtime": 45.36,
1264
+ "eval_bigbrother_samples_per_second": 30.203,
1265
+ "eval_bigbrother_steps_per_second": 0.948,
1266
+ "eval_bigbrother_wer": 0.5238056875855958,
1267
+ "step": 3256
1268
+ },
1269
+ {
1270
+ "epoch": 38.0,
1271
+ "grad_norm": 6.092939376831055,
1272
+ "learning_rate": 1.7722772277227724e-06,
1273
+ "loss": 0.193,
1274
+ "step": 3344
1275
+ },
1276
+ {
1277
+ "epoch": 38.0,
1278
+ "eval_rundkast_loss": 0.595504641532898,
1279
+ "eval_rundkast_runtime": 31.8783,
1280
+ "eval_rundkast_samples_per_second": 42.004,
1281
+ "eval_rundkast_steps_per_second": 1.318,
1282
+ "eval_rundkast_wer": 0.19356902895536715,
1283
+ "step": 3344
1284
+ },
1285
+ {
1286
+ "epoch": 38.0,
1287
+ "eval_nb_samtale_loss": 0.5151902437210083,
1288
+ "eval_nb_samtale_runtime": 38.9536,
1289
+ "eval_nb_samtale_samples_per_second": 13.683,
1290
+ "eval_nb_samtale_steps_per_second": 0.436,
1291
+ "eval_nb_samtale_wer": 0.14737955729166666,
1292
+ "step": 3344
1293
+ },
1294
+ {
1295
+ "epoch": 38.0,
1296
+ "eval_bigbrother_loss": 2.59112548828125,
1297
+ "eval_bigbrother_runtime": 45.3015,
1298
+ "eval_bigbrother_samples_per_second": 30.242,
1299
+ "eval_bigbrother_steps_per_second": 0.949,
1300
+ "eval_bigbrother_wer": 0.5234028840731492,
1301
+ "step": 3344
1302
+ },
1303
+ {
1304
+ "epoch": 39.0,
1305
+ "grad_norm": 4.503037452697754,
1306
+ "learning_rate": 9.00990099009901e-07,
1307
+ "loss": 0.1767,
1308
+ "step": 3432
1309
+ },
1310
+ {
1311
+ "epoch": 39.0,
1312
+ "eval_rundkast_loss": 0.5997776389122009,
1313
+ "eval_rundkast_runtime": 31.7079,
1314
+ "eval_rundkast_samples_per_second": 42.229,
1315
+ "eval_rundkast_steps_per_second": 1.325,
1316
+ "eval_rundkast_wer": 0.1945288753799392,
1317
+ "step": 3432
1318
+ },
1319
+ {
1320
+ "epoch": 39.0,
1321
+ "eval_nb_samtale_loss": 0.5143499970436096,
1322
+ "eval_nb_samtale_runtime": 38.5399,
1323
+ "eval_nb_samtale_samples_per_second": 13.83,
1324
+ "eval_nb_samtale_steps_per_second": 0.441,
1325
+ "eval_nb_samtale_wer": 0.14607747395833334,
1326
+ "step": 3432
1327
+ },
1328
+ {
1329
+ "epoch": 39.0,
1330
+ "eval_bigbrother_loss": 2.604764938354492,
1331
+ "eval_bigbrother_runtime": 45.3914,
1332
+ "eval_bigbrother_samples_per_second": 30.182,
1333
+ "eval_bigbrother_steps_per_second": 0.947,
1334
+ "eval_bigbrother_wer": 0.5224361556432772,
1335
+ "step": 3432
1336
+ },
1337
+ {
1338
+ "epoch": 40.0,
1339
+ "grad_norm": 5.29674768447876,
1340
+ "learning_rate": 2.9702970297029707e-08,
1341
+ "loss": 0.1726,
1342
+ "step": 3520
1343
+ },
1344
+ {
1345
+ "epoch": 40.0,
1346
+ "eval_rundkast_loss": 0.6034849286079407,
1347
+ "eval_rundkast_runtime": 31.8224,
1348
+ "eval_rundkast_samples_per_second": 42.077,
1349
+ "eval_rundkast_steps_per_second": 1.32,
1350
+ "eval_rundkast_wer": 0.19476883698608222,
1351
+ "step": 3520
1352
+ },
1353
+ {
1354
+ "epoch": 40.0,
1355
+ "eval_nb_samtale_loss": 0.5175977945327759,
1356
+ "eval_nb_samtale_runtime": 38.5824,
1357
+ "eval_nb_samtale_samples_per_second": 13.815,
1358
+ "eval_nb_samtale_steps_per_second": 0.441,
1359
+ "eval_nb_samtale_wer": 0.146484375,
1360
+ "step": 3520
1361
+ },
1362
+ {
1363
+ "epoch": 40.0,
1364
+ "eval_bigbrother_loss": 2.614839553833008,
1365
+ "eval_bigbrother_runtime": 45.4875,
1366
+ "eval_bigbrother_samples_per_second": 30.118,
1367
+ "eval_bigbrother_steps_per_second": 0.945,
1368
+ "eval_bigbrother_wer": 0.5227583984532345,
1369
+ "step": 3520
1370
+ }
1371
+ ],
1372
+ "logging_steps": 500,
1373
+ "max_steps": 3520,
1374
+ "num_input_tokens_seen": 0,
1375
+ "num_train_epochs": 40,
1376
+ "save_steps": 500,
1377
+ "total_flos": 1.468412648283622e+20,
1378
+ "train_batch_size": 48,
1379
+ "trial_name": null,
1380
+ "trial_params": null
1381
+ }
checkpoint-3520/training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d398cf74e9c3dfb84e4b7970c10ef91e54973a6fa45689917df89793be64f273
3
+ size 4984
config.json ADDED
@@ -0,0 +1,108 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "NbAiLab/nb-wav2vec2-1b-bokmaal",
3
+ "activation_dropout": 0.055,
4
+ "adapter_attn_dim": null,
5
+ "adapter_kernel_size": 3,
6
+ "adapter_stride": 2,
7
+ "add_adapter": false,
8
+ "apply_spec_augment": true,
9
+ "architectures": [
10
+ "Wav2Vec2ForCTC"
11
+ ],
12
+ "attention_dropout": 0.094,
13
+ "bos_token_id": 1,
14
+ "classifier_proj_size": 256,
15
+ "codevector_dim": 1024,
16
+ "contrastive_logits_temperature": 0.1,
17
+ "conv_bias": true,
18
+ "conv_dim": [
19
+ 512,
20
+ 512,
21
+ 512,
22
+ 512,
23
+ 512,
24
+ 512,
25
+ 512
26
+ ],
27
+ "conv_kernel": [
28
+ 10,
29
+ 3,
30
+ 3,
31
+ 3,
32
+ 3,
33
+ 2,
34
+ 2
35
+ ],
36
+ "conv_stride": [
37
+ 5,
38
+ 2,
39
+ 2,
40
+ 2,
41
+ 2,
42
+ 2,
43
+ 2
44
+ ],
45
+ "ctc_loss_reduction": "mean",
46
+ "ctc_zero_infinity": true,
47
+ "diversity_loss_weight": 0.1,
48
+ "do_stable_layer_norm": true,
49
+ "eos_token_id": 2,
50
+ "feat_extract_activation": "gelu",
51
+ "feat_extract_dropout": 0.0,
52
+ "feat_extract_norm": "layer",
53
+ "feat_proj_dropout": 0.04,
54
+ "feat_quantizer_dropout": 0.0,
55
+ "final_dropout": 0.0,
56
+ "hidden_act": "gelu",
57
+ "hidden_dropout": 0.047,
58
+ "hidden_size": 1280,
59
+ "initializer_range": 0.02,
60
+ "intermediate_size": 5120,
61
+ "layer_norm_eps": 1e-05,
62
+ "layerdrop": 0.041,
63
+ "mask_feature_length": 64,
64
+ "mask_feature_min_masks": 0,
65
+ "mask_feature_prob": 0.25,
66
+ "mask_time_length": 10,
67
+ "mask_time_min_masks": 2,
68
+ "mask_time_prob": 0.082,
69
+ "model_type": "wav2vec2",
70
+ "num_adapter_layers": 3,
71
+ "num_attention_heads": 16,
72
+ "num_codevector_groups": 2,
73
+ "num_codevectors_per_group": 320,
74
+ "num_conv_pos_embedding_groups": 16,
75
+ "num_conv_pos_embeddings": 128,
76
+ "num_feat_extract_layers": 7,
77
+ "num_hidden_layers": 48,
78
+ "num_negatives": 100,
79
+ "output_hidden_size": 1280,
80
+ "pad_token_id": 31,
81
+ "proj_codevector_dim": 1024,
82
+ "tdnn_dilation": [
83
+ 1,
84
+ 2,
85
+ 3,
86
+ 1,
87
+ 1
88
+ ],
89
+ "tdnn_dim": [
90
+ 512,
91
+ 512,
92
+ 512,
93
+ 512,
94
+ 1500
95
+ ],
96
+ "tdnn_kernel": [
97
+ 5,
98
+ 3,
99
+ 3,
100
+ 1,
101
+ 1
102
+ ],
103
+ "torch_dtype": "float32",
104
+ "transformers_version": "4.38.1",
105
+ "use_weighted_layer_sum": false,
106
+ "vocab_size": 34,
107
+ "xvector_output_dim": 512
108
+ }
language_model/5gram.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7b41c24c63f2f0585bea83666369593f3b3e6d047f327a90f36ebca2c35ef0ff
3
+ size 4243671427
language_model/attrs.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"alpha": 0.5, "beta": 0.1, "unk_score_offset": -10.0, "score_boundary": true}
language_model/unigrams.txt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ac3e71ca49838ca355df6fdcb8d89344a5a9bf9e1a76587cdf5df1367c19b9a9
3
+ size 16759269
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:20c77978e409c821f7a1bf59c121b9a47ae8192725878db239ae531f6db6ed82
3
+ size 3850265216
preprocessor_config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "do_normalize": true,
3
+ "feature_extractor_type": "Wav2Vec2FeatureExtractor",
4
+ "feature_size": 1,
5
+ "padding_side": "right",
6
+ "padding_value": 0,
7
+ "processor_class": "Wav2Vec2ProcessorWithLM",
8
+ "return_attention_mask": true,
9
+ "sampling_rate": 16000
10
+ }
runs/Jun20_01-58-46_idun-05-07/events.out.tfevents.1718841722.idun-05-07.1642107.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1b33325687bc8314beb4e4c54331c5545fe03c048f838caa7acdee90119f6efb
3
+ size 59287
special_tokens_map.json ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": "<s>",
3
+ "eos_token": "</s>",
4
+ "pad_token": "[PAD]",
5
+ "unk_token": "[UNK]"
6
+ }
tokenizer_config.json ADDED
@@ -0,0 +1,47 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "added_tokens_decoder": {
3
+ "30": {
4
+ "content": "[UNK]",
5
+ "lstrip": true,
6
+ "normalized": false,
7
+ "rstrip": true,
8
+ "single_word": false,
9
+ "special": false
10
+ },
11
+ "31": {
12
+ "content": "[PAD]",
13
+ "lstrip": true,
14
+ "normalized": false,
15
+ "rstrip": true,
16
+ "single_word": false,
17
+ "special": false
18
+ },
19
+ "32": {
20
+ "content": "<s>",
21
+ "lstrip": false,
22
+ "normalized": false,
23
+ "rstrip": false,
24
+ "single_word": false,
25
+ "special": true
26
+ },
27
+ "33": {
28
+ "content": "</s>",
29
+ "lstrip": false,
30
+ "normalized": false,
31
+ "rstrip": false,
32
+ "single_word": false,
33
+ "special": true
34
+ }
35
+ },
36
+ "bos_token": "<s>",
37
+ "clean_up_tokenization_spaces": true,
38
+ "do_lower_case": false,
39
+ "eos_token": "</s>",
40
+ "model_max_length": 1000000000000000019884624838656,
41
+ "pad_token": "[PAD]",
42
+ "replace_word_delimiter_char": " ",
43
+ "target_lang": null,
44
+ "tokenizer_class": "Wav2Vec2CTCTokenizer",
45
+ "unk_token": "[UNK]",
46
+ "word_delimiter_token": "|"
47
+ }
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d398cf74e9c3dfb84e4b7970c10ef91e54973a6fa45689917df89793be64f273
3
+ size 4984
vocab.json ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "[PAD]": 31,
3
+ "[UNK]": 30,
4
+ "a": 1,
5
+ "b": 2,
6
+ "c": 3,
7
+ "d": 4,
8
+ "e": 5,
9
+ "f": 6,
10
+ "g": 7,
11
+ "h": 8,
12
+ "i": 9,
13
+ "j": 10,
14
+ "k": 11,
15
+ "l": 12,
16
+ "m": 13,
17
+ "n": 14,
18
+ "o": 15,
19
+ "p": 16,
20
+ "q": 17,
21
+ "r": 18,
22
+ "s": 19,
23
+ "t": 20,
24
+ "u": 21,
25
+ "v": 22,
26
+ "w": 23,
27
+ "x": 24,
28
+ "y": 25,
29
+ "z": 26,
30
+ "|": 0,
31
+ "å": 27,
32
+ "æ": 28,
33
+ "ø": 29
34
+ }