w11wo commited on
Commit
e1be7ec
โ€ข
1 Parent(s): f636312

Training in progress, step 500

Browse files
.gitignore ADDED
@@ -0,0 +1 @@
 
1
+ checkpoint-*/
added_tokens.json ADDED
@@ -0,0 +1 @@
 
1
+ {"<s>": 1205, "</s>": 1206}
config.json ADDED
@@ -0,0 +1,107 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "facebook/wav2vec2-xls-r-300m",
3
+ "activation_dropout": 0.1,
4
+ "adapter_kernel_size": 3,
5
+ "adapter_stride": 2,
6
+ "add_adapter": false,
7
+ "apply_spec_augment": true,
8
+ "architectures": [
9
+ "Wav2Vec2ForCTC"
10
+ ],
11
+ "attention_dropout": 0.0,
12
+ "bos_token_id": 1,
13
+ "classifier_proj_size": 256,
14
+ "codevector_dim": 768,
15
+ "contrastive_logits_temperature": 0.1,
16
+ "conv_bias": true,
17
+ "conv_dim": [
18
+ 512,
19
+ 512,
20
+ 512,
21
+ 512,
22
+ 512,
23
+ 512,
24
+ 512
25
+ ],
26
+ "conv_kernel": [
27
+ 10,
28
+ 3,
29
+ 3,
30
+ 3,
31
+ 3,
32
+ 2,
33
+ 2
34
+ ],
35
+ "conv_stride": [
36
+ 5,
37
+ 2,
38
+ 2,
39
+ 2,
40
+ 2,
41
+ 2,
42
+ 2
43
+ ],
44
+ "ctc_loss_reduction": "mean",
45
+ "ctc_zero_infinity": false,
46
+ "diversity_loss_weight": 0.1,
47
+ "do_stable_layer_norm": true,
48
+ "eos_token_id": 2,
49
+ "feat_extract_activation": "gelu",
50
+ "feat_extract_dropout": 0.0,
51
+ "feat_extract_norm": "layer",
52
+ "feat_proj_dropout": 0.0,
53
+ "feat_quantizer_dropout": 0.0,
54
+ "final_dropout": 0.0,
55
+ "hidden_act": "gelu",
56
+ "hidden_dropout": 0.0,
57
+ "hidden_size": 1024,
58
+ "initializer_range": 0.02,
59
+ "intermediate_size": 4096,
60
+ "layer_norm_eps": 1e-05,
61
+ "layerdrop": 0.0,
62
+ "mask_feature_length": 64,
63
+ "mask_feature_min_masks": 0,
64
+ "mask_feature_prob": 0.25,
65
+ "mask_time_length": 10,
66
+ "mask_time_min_masks": 2,
67
+ "mask_time_prob": 0.75,
68
+ "model_type": "wav2vec2",
69
+ "num_adapter_layers": 3,
70
+ "num_attention_heads": 16,
71
+ "num_codevector_groups": 2,
72
+ "num_codevectors_per_group": 320,
73
+ "num_conv_pos_embedding_groups": 16,
74
+ "num_conv_pos_embeddings": 128,
75
+ "num_feat_extract_layers": 7,
76
+ "num_hidden_layers": 24,
77
+ "num_negatives": 100,
78
+ "output_hidden_size": 1024,
79
+ "pad_token_id": 1204,
80
+ "proj_codevector_dim": 768,
81
+ "tdnn_dilation": [
82
+ 1,
83
+ 2,
84
+ 3,
85
+ 1,
86
+ 1
87
+ ],
88
+ "tdnn_dim": [
89
+ 512,
90
+ 512,
91
+ 512,
92
+ 512,
93
+ 1500
94
+ ],
95
+ "tdnn_kernel": [
96
+ 5,
97
+ 3,
98
+ 3,
99
+ 1,
100
+ 1
101
+ ],
102
+ "torch_dtype": "float32",
103
+ "transformers_version": "4.17.0.dev0",
104
+ "use_weighted_layer_sum": false,
105
+ "vocab_size": 1207,
106
+ "xvector_output_dim": 512
107
+ }
nohup.out ADDED
@@ -0,0 +1,569 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
0
  0%| | 0/1 [00:00<?, ?ba/s]
 
1
  0%| | 0/1 [00:00<?, ?ba/s]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2
  0%| | 0/23 [00:00<?, ?ba/s]
 
3
  0%| | 0/1 [00:00<?, ?ba/s]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
4
  0%| | 0/34750 [00:00<?, ?it/s]
5
  0%| | 1/34750 [00:06<64:28:32, 6.68s/it]
6
  0%| | 2/34750 [00:12<57:43:53, 5.98s/it]
7
  0%| | 3/34750 [00:17<54:31:03, 5.65s/it]
8
  0%| | 4/34750 [00:22<50:51:50, 5.27s/it]
9
  0%| | 5/34750 [00:26<48:33:10, 5.03s/it]
10
  0%| | 6/34750 [00:31<46:50:38, 4.85s/it]
11
  0%| | 7/34750 [00:35<45:36:23, 4.73s/it]
12
  0%| | 8/34750 [00:40<44:29:00, 4.61s/it]
13
  0%| | 9/34750 [00:44<43:10:17, 4.47s/it]
14
  0%| | 10/34750 [00:48<42:06:37, 4.36s/it]
15
  0%| | 11/34750 [00:52<40:57:19, 4.24s/it]
16
  0%| | 12/34750 [00:56<39:38:33, 4.11s/it]
17
  0%| | 13/34750 [00:59<38:31:48, 3.99s/it]
18
  0%| | 14/34750 [01:03<37:54:45, 3.93s/it]
19
  0%| | 15/34750 [01:07<36:35:46, 3.79s/it]
20
  0%| | 16/34750 [01:10<35:45:25, 3.71s/it]
21
  0%| | 17/34750 [01:13<34:45:16, 3.60s/it]
22
  0%| | 18/34750 [01:17<33:59:39, 3.52s/it]
23
  0%| | 19/34750 [01:20<33:05:56, 3.43s/it]
24
  0%| | 20/34750 [01:23<32:33:40, 3.38s/it]
25
  0%| | 21/34750 [01:26<32:01:32, 3.32s/it]
26
  0%| | 22/34750 [01:30<31:21:02, 3.25s/it]
27
  0%| | 23/34750 [01:33<30:52:11, 3.20s/it]
28
  0%| | 24/34750 [01:36<30:24:22, 3.15s/it]
29
  0%| | 25/34750 [01:39<29:53:04, 3.10s/it]
30
  0%| | 26/34750 [01:42<29:25:40, 3.05s/it]
31
  0%| | 27/34750 [01:44<28:34:56, 2.96s/it]
32
  0%| | 28/34750 [01:47<28:11:40, 2.92s/it]
33
  0%| | 29/34750 [01:50<27:54:42, 2.89s/it]
34
  0%| | 30/34750 [01:53<27:27:22, 2.85s/it]
35
  0%| | 31/34750 [01:55<27:01:28, 2.80s/it]
36
  0%| | 32/34750 [01:58<26:40:58, 2.77s/it]
37
  0%| | 33/34750 [02:01<26:24:14, 2.74s/it]
38
  0%| | 34/34750 [02:03<25:52:04, 2.68s/it]
39
  0%| | 35/34750 [02:06<25:43:25, 2.67s/it]
40
  0%| | 36/34750 [02:08<25:11:08, 2.61s/it]
41
  0%| | 37/34750 [02:11<24:28:28, 2.54s/it]
42
  0%| | 38/34750 [02:13<23:51:25, 2.47s/it]
43
  0%| | 39/34750 [02:15<23:19:33, 2.42s/it]
44
  0%| | 40/34750 [02:18<22:52:11, 2.37s/it]
45
  0%| | 41/34750 [02:20<22:39:10, 2.35s/it]
46
  0%| | 42/34750 [02:22<22:09:25, 2.30s/it]
47
  0%| | 43/34750 [02:24<21:47:00, 2.26s/it]
48
  0%| | 44/34750 [02:26<21:18:01, 2.21s/it]
49
  0%| | 45/34750 [02:28<20:48:32, 2.16s/it]
50
  0%| | 46/34750 [02:31<20:29:08, 2.13s/it]
51
  0%| | 47/34750 [02:32<20:01:46, 2.08s/it]
52
  0%| | 48/34750 [02:34<19:24:47, 2.01s/it]
53
  0%| | 49/34750 [02:36<18:42:30, 1.94s/it]
54
  0%| | 50/34750 [02:38<17:57:24, 1.86s/it]
55
  0%| | 51/34750 [02:44<31:34:21, 3.28s/it]
56
  0%| | 52/34750 [02:50<38:06:08, 3.95s/it]
57
  0%| | 53/34750 [02:55<41:11:56, 4.27s/it]
58
  0%| | 54/34750 [03:00<42:58:47, 4.46s/it]
59
  0%| | 55/34750 [03:04<43:32:12, 4.52s/it]
60
  0%| | 56/34750 [03:09<43:38:21, 4.53s/it]
61
  0%| | 57/34750 [03:13<43:18:06, 4.49s/it]
62
  0%| | 58/34750 [03:18<42:30:15, 4.41s/it]
63
  0%| | 59/34750 [03:22<42:02:18, 4.36s/it]
64
  0%| | 60/34750 [03:26<41:31:48, 4.31s/it]
65
  0%| | 61/34750 [03:30<40:32:29, 4.21s/it]
66
  0%| | 62/34750 [03:34<39:22:08, 4.09s/it]
67
  0%| | 63/34750 [03:38<38:18:34, 3.98s/it]
68
  0%| | 64/34750 [03:41<37:37:25, 3.90s/it]
69
  0%| | 65/34750 [03:45<36:45:09, 3.81s/it]
70
  0%| | 66/34750 [03:48<35:44:42, 3.71s/it]
71
  0%| | 67/34750 [03:52<35:21:38, 3.67s/it]
72
  0%| | 68/34750 [03:55<34:38:18, 3.60s/it]
73
  0%| | 69/34750 [03:59<33:35:03, 3.49s/it]
74
  0%| | 70/34750 [04:02<32:57:42, 3.42s/it]
75
  0%| | 71/34750 [04:05<32:30:58, 3.38s/it]
76
  0%| | 72/34750 [04:08<31:58:21, 3.32s/it]
77
  0%| | 73/34750 [04:12<31:39:22, 3.29s/it]
78
  0%| | 74/34750 [04:15<30:59:41, 3.22s/it]
79
  0%| | 75/34750 [04:18<30:22:31, 3.15s/it]
80
  0%| | 76/34750 [04:21<30:16:16, 3.14s/it]
81
  0%| | 77/34750 [04:24<30:00:28, 3.12s/it]
82
  0%| | 78/34750 [04:27<29:35:28, 3.07s/it]
83
  0%| | 79/34750 [04:30<28:52:03, 3.00s/it]
84
  0%| | 80/34750 [04:32<28:15:57, 2.94s/it]
85
  0%| | 81/34750 [04:35<27:39:57, 2.87s/it]
86
  0%| | 82/34750 [04:38<27:02:17, 2.81s/it]
87
  0%| | 83/34750 [04:40<26:42:09, 2.77s/it]
88
  0%| | 84/34750 [04:43<26:15:39, 2.73s/it]
89
  0%| | 85/34750 [04:46<26:03:43, 2.71s/it]
90
  0%| | 86/34750 [04:48<25:34:47, 2.66s/it]
91
  0%| | 87/34750 [04:51<25:14:44, 2.62s/it]
92
  0%| | 88/34750 [04:53<24:52:15, 2.58s/it]
93
  0%| | 89/34750 [04:56<24:13:33, 2.52s/it]
94
  0%| | 90/34750 [04:58<23:45:50, 2.47s/it]
95
  0%| | 91/34750 [05:00<23:15:35, 2.42s/it]
96
  0%| | 92/34750 [05:03<22:43:29, 2.36s/it]
97
  0%| | 93/34750 [05:05<22:28:51, 2.34s/it]
98
  0%| | 94/34750 [05:07<21:54:02, 2.28s/it]
99
  0%| | 95/34750 [05:09<21:18:57, 2.21s/it]
100
  0%| | 96/34750 [05:11<20:44:18, 2.15s/it]
101
  0%| | 97/34750 [05:13<20:04:09, 2.08s/it]
102
  0%| | 98/34750 [05:15<19:41:42, 2.05s/it]
103
  0%| | 99/34750 [05:17<19:05:17, 1.98s/it]
104
  0%| | 100/34750 [05:18<18:21:38, 1.91s/it]
105
 
106
  0%| | 100/34750 [05:18<18:21:38, 1.91s/it]
107
  0%| | 101/34750 [05:25<31:59:06, 3.32s/it]
108
  0%| | 102/34750 [05:31<38:44:13, 4.02s/it]
109
  0%| | 103/34750 [05:36<42:04:31, 4.37s/it]
110
  0%| | 104/34750 [05:41<43:20:27, 4.50s/it]
111
  0%| | 105/34750 [05:45<43:33:06, 4.53s/it]
112
  0%| | 106/34750 [05:50<43:01:31, 4.47s/it]
113
  0%| | 107/34750 [05:54<42:35:58, 4.43s/it]
114
  0%| | 108/34750 [05:58<41:36:32, 4.32s/it]
115
  0%| | 109/34750 [06:02<40:49:12, 4.24s/it]
116
  0%| | 110/34750 [06:06<40:24:00, 4.20s/it]
117
  0%| | 111/34750 [06:10<39:21:11, 4.09s/it]
118
  0%| | 112/34750 [06:14<38:20:20, 3.98s/it]
119
  0%| | 113/34750 [06:18<37:45:38, 3.92s/it]
120
  0%| | 114/34750 [06:21<36:57:18, 3.84s/it]
121
  0%| | 115/34750 [06:25<36:23:40, 3.78s/it]
122
  0%| | 116/34750 [06:28<35:34:12, 3.70s/it]
123
  0%| | 117/34750 [06:32<34:43:35, 3.61s/it]
124
  0%| | 118/34750 [06:35<33:56:27, 3.53s/it]
125
  0%| | 119/34750 [06:38<33:21:27, 3.47s/it]
126
  0%| | 120/34750 [06:42<32:59:05, 3.43s/it]
127
  0%| | 121/34750 [06:45<32:17:52, 3.36s/it]
128
  0%| | 122/34750 [06:48<31:38:10, 3.29s/it]
129
  0%| | 123/34750 [06:51<31:05:15, 3.23s/it]
130
  0%| | 124/34750 [06:54<30:46:22, 3.20s/it]
131
  0%| | 125/34750 [06:57<30:10:54, 3.14s/it]
132
  0%| | 126/34750 [07:00<29:41:42, 3.09s/it]
133
  0%| | 127/34750 [07:03<29:03:34, 3.02s/it]
134
  0%| | 128/34750 [07:06<28:35:44, 2.97s/it]
135
  0%| | 129/34750 [07:09<28:00:19, 2.91s/it]
136
  0%| | 130/34750 [07:12<27:21:27, 2.84s/it]
137
  0%| | 131/34750 [07:14<27:10:16, 2.83s/it]
138
  0%| | 132/34750 [07:17<26:33:47, 2.76s/it]
139
  0%| | 133/34750 [07:20<26:14:29, 2.73s/it]
140
  0%| | 134/34750 [07:22<25:45:14, 2.68s/it]
141
  0%| | 135/34750 [07:25<25:17:13, 2.63s/it]
142
  0%| | 136/34750 [07:27<24:57:13, 2.60s/it]
143
  0%| | 137/34750 [07:30<24:24:51, 2.54s/it]
144
  0%| | 138/34750 [07:32<23:51:55, 2.48s/it]
145
  0%| | 139/34750 [07:34<23:21:09, 2.43s/it]
146
  0%| | 140/34750 [07:36<22:52:59, 2.38s/it]
147
  0%| | 141/34750 [07:39<22:43:48, 2.36s/it]
148
  0%| | 142/34750 [07:41<22:20:24, 2.32s/it]
149
  0%| | 143/34750 [07:43<22:04:04, 2.30s/it]
150
  0%| | 144/34750 [07:45<21:37:56, 2.25s/it]
151
  0%| | 145/34750 [07:47<21:03:48, 2.19s/it]
152
  0%| | 146/34750 [07:50<20:40:12, 2.15s/it]
153
  0%| | 147/34750 [07:51<20:03:46, 2.09s/it]
154
  0%| | 148/34750 [07:53<19:25:14, 2.02s/it]
155
  0%| | 149/34750 [07:55<18:55:40, 1.97s/it]
156
  0%| | 150/34750 [07:57<18:20:39, 1.91s/it]
157
  0%| | 151/34750 [08:03<31:24:03, 3.27s/it]
158
  0%| | 152/34750 [08:09<37:53:01, 3.94s/it]
159
  0%| | 153/34750 [08:14<41:53:08, 4.36s/it]
160
  0%| | 154/34750 [08:19<43:48:48, 4.56s/it]
161
  0%| | 155/34750 [08:24<43:58:26, 4.58s/it]
162
  0%| | 156/34750 [08:28<43:45:25, 4.55s/it]
163
  0%| | 157/34750 [08:33<43:43:02, 4.55s/it]
164
  0%| | 158/34750 [08:37<42:58:24, 4.47s/it]
165
  0%| | 159/34750 [08:41<41:59:46, 4.37s/it]
166
  0%| | 160/34750 [08:45<40:59:58, 4.27s/it]
167
  0%| | 161/34750 [08:49<40:07:34, 4.18s/it]
168
  0%| | 162/34750 [08:53<39:31:01, 4.11s/it]
169
  0%| | 163/34750 [08:57<38:50:29, 4.04s/it]
170
  0%| | 164/34750 [09:01<38:04:36, 3.96s/it]
171
  0%| | 165/34750 [09:05<37:16:49, 3.88s/it]
172
  0%| | 166/34750 [09:08<36:26:52, 3.79s/it]
173
  0%| | 167/34750 [09:12<35:48:44, 3.73s/it]
174
  0%| | 168/34750 [09:15<35:16:30, 3.67s/it]
175
  0%| | 169/34750 [09:19<34:42:11, 3.61s/it]
176
  0%| | 170/34750 [09:22<33:58:16, 3.54s/it]
177
  0%| | 171/34750 [09:25<33:07:26, 3.45s/it]
178
  0%| | 172/34750 [09:29<32:20:46, 3.37s/it]
179
  0%| | 173/34750 [09:32<31:43:56, 3.30s/it]
180
  1%| | 174/34750 [09:35<31:16:09, 3.26s/it]
181
  1%| | 175/34750 [09:38<31:01:53, 3.23s/it]
182
  1%| | 176/34750 [09:41<30:33:05, 3.18s/it]
183
  1%| | 177/34750 [09:44<30:02:14, 3.13s/it]
184
  1%| | 178/34750 [09:47<29:31:44, 3.07s/it]
185
  1%| | 179/34750 [09:50<28:51:20, 3.00s/it]
186
  1%| | 180/34750 [09:53<28:07:59, 2.93s/it]
187
  1%| | 181/34750 [09:55<27:42:48, 2.89s/it]
188
  1%| | 182/34750 [09:58<27:05:20, 2.82s/it]
189
  1%| | 183/34750 [10:01<26:42:02, 2.78s/it]
190
  1%| | 184/34750 [10:03<26:09:40, 2.72s/it]
191
  1%| | 185/34750 [10:06<25:42:04, 2.68s/it]
192
  1%| | 186/34750 [10:08<25:13:14, 2.63s/it]
193
  1%| | 187/34750 [10:11<24:58:54, 2.60s/it]
194
  1%| | 188/34750 [10:13<24:31:39, 2.55s/it]
195
  1%| | 189/34750 [10:16<23:54:11, 2.49s/it]
196
  1%| | 190/34750 [10:18<23:19:20, 2.43s/it]
197
  1%| | 191/34750 [10:20<22:53:19, 2.38s/it]
198
  1%| | 192/34750 [10:23<22:30:07, 2.34s/it]
199
  1%| | 193/34750 [10:25<21:58:47, 2.29s/it]
200
  1%| | 194/34750 [10:27<21:25:10, 2.23s/it]
201
  1%| | 195/34750 [10:29<20:52:07, 2.17s/it]
202
  1%| | 196/34750 [10:31<20:21:29, 2.12s/it]
203
  1%| | 197/34750 [10:33<19:45:12, 2.06s/it]
204
  1%| | 198/34750 [10:35<19:10:57, 2.00s/it]
205
  1%| | 199/34750 [10:36<18:35:35, 1.94s/it]
206
  1%| | 200/34750 [10:38<17:58:57, 1.87s/it]
207
 
208
  1%| | 200/34750 [10:38<17:58:57, 1.87s/it]
209
  1%| | 201/34750 [10:44<30:29:43, 3.18s/it]
210
  1%| | 202/34750 [10:50<36:43:22, 3.83s/it]
211
  1%| | 203/34750 [10:55<40:17:17, 4.20s/it]
212
  1%| | 204/34750 [11:00<41:57:33, 4.37s/it]
213
  1%| | 205/34750 [11:04<42:29:54, 4.43s/it]
214
  1%| | 206/34750 [11:09<42:44:42, 4.45s/it]
215
  1%| | 207/34750 [11:13<41:56:02, 4.37s/it]
216
  1%| | 208/34750 [11:17<41:20:03, 4.31s/it]
217
  1%| | 209/34750 [11:21<40:36:04, 4.23s/it]
218
  1%| | 210/34750 [11:25<40:15:07, 4.20s/it]
219
  1%| | 211/34750 [11:29<39:13:51, 4.09s/it]
220
  1%| | 212/34750 [11:33<38:18:15, 3.99s/it]
221
  1%| | 213/34750 [11:37<37:31:08, 3.91s/it]
222
  1%| | 214/34750 [11:40<36:50:20, 3.84s/it]
223
  1%| | 215/34750 [11:44<36:03:03, 3.76s/it]
224
  1%| | 216/34750 [11:47<35:19:22, 3.68s/it]
225
  1%| | 217/34750 [11:51<34:49:41, 3.63s/it]
226
  1%| | 218/34750 [11:54<33:58:09, 3.54s/it]
227
  1%| | 219/34750 [11:57<33:17:16, 3.47s/it]
228
  1%| | 220/34750 [12:01<32:42:24, 3.41s/it]
229
  1%| | 221/34750 [12:04<32:12:04, 3.36s/it]
230
  1%| | 222/34750 [12:07<31:21:23, 3.27s/it]
231
  1%| | 223/34750 [12:10<30:51:33, 3.22s/it]
232
  1%| | 224/34750 [12:13<30:13:54, 3.15s/it]
233
  1%| | 225/34750 [12:16<29:45:11, 3.10s/it]
234
  1%| | 226/34750 [12:19<29:23:35, 3.06s/it]
235
  1%| | 227/34750 [12:22<28:42:32, 2.99s/it]
236
  1%| | 228/34750 [12:25<28:22:04, 2.96s/it]
237
  1%| | 229/34750 [12:28<27:48:51, 2.90s/it]
238
  1%| | 230/34750 [12:30<27:11:50, 2.84s/it]
239
  1%| | 231/34750 [12:33<26:35:09, 2.77s/it]
240
  1%| | 232/34750 [12:35<26:10:51, 2.73s/it]
241
  1%| | 233/34750 [12:38<25:52:41, 2.70s/it]
242
  1%| | 234/34750 [12:41<25:20:57, 2.64s/it]
243
  1%| | 235/34750 [12:43<24:56:17, 2.60s/it]
244
  1%| | 236/34750 [12:45<24:12:05, 2.52s/it]
245
  1%| | 237/34750 [12:48<23:36:45, 2.46s/it]
246
  1%| | 238/34750 [12:50<23:15:05, 2.43s/it]
247
  1%| | 239/34750 [12:52<22:55:05, 2.39s/it]
248
  1%| | 240/34750 [12:55<22:24:01, 2.34s/it]
249
  1%| | 241/34750 [12:57<22:06:03, 2.31s/it]
250
  1%| | 242/34750 [12:59<21:38:36, 2.26s/it]
251
  1%| | 243/34750 [13:01<21:15:14, 2.22s/it]
252
  1%| | 244/34750 [13:03<20:47:51, 2.17s/it]
253
  1%| | 245/34750 [13:05<20:23:43, 2.13s/it]
254
  1%| | 246/34750 [13:07<20:17:09, 2.12s/it]
255
  1%| | 247/34750 [13:09<19:45:30, 2.06s/it]
256
  1%| | 248/34750 [13:11<19:07:56, 2.00s/it]
257
  1%| | 249/34750 [13:13<18:36:53, 1.94s/it]
258
  1%| | 250/34750 [13:15<17:51:35, 1.86s/it]
259
  1%| | 251/34750 [13:21<31:21:56, 3.27s/it]
260
  1%| | 252/34750 [13:27<37:56:29, 3.96s/it]
261
  1%| | 253/34750 [13:32<41:27:01, 4.33s/it]
262
  1%| | 254/34750 [13:37<43:20:41, 4.52s/it]
263
  1%| | 255/34750 [13:42<43:46:29, 4.57s/it]
264
  1%| | 256/34750 [13:46<44:07:24, 4.60s/it]
265
  1%| | 257/34750 [13:51<43:48:40, 4.57s/it]
266
  1%| | 258/34750 [13:55<42:57:47, 4.48s/it]
267
  1%| | 259/34750 [13:59<42:16:34, 4.41s/it]
268
  1%| | 260/34750 [14:03<41:36:00, 4.34s/it]
269
  1%| | 261/34750 [14:07<40:22:55, 4.22s/it]
270
  1%| | 262/34750 [14:11<39:34:28, 4.13s/it]
271
  1%| | 263/34750 [14:15<38:38:45, 4.03s/it]
272
  1%| | 264/34750 [14:19<37:37:21, 3.93s/it]
273
  1%| | 265/34750 [14:23<37:05:51, 3.87s/it]
274
  1%| | 266/34750 [14:26<36:12:42, 3.78s/it]
275
  1%| | 267/34750 [14:30<35:15:32, 3.68s/it]
276
  1%| | 268/34750 [14:33<34:42:08, 3.62s/it]
277
  1%| | 269/34750 [14:36<34:05:26, 3.56s/it]
278
  1%| | 270/34750 [14:40<33:17:54, 3.48s/it]
279
  1%| | 271/34750 [14:43<32:24:37, 3.38s/it]
280
  1%| | 272/34750 [14:46<31:57:35, 3.34s/it]
281
  1%| | 273/34750 [14:49<31:33:17, 3.29s/it]
282
  1%| | 274/34750 [14:52<31:13:36, 3.26s/it]
283
  1%| | 275/34750 [14:56<30:33:40, 3.19s/it]
284
  1%| | 276/34750 [14:58<29:57:52, 3.13s/it]
285
  1%| | 277/34750 [15:01<29:24:24, 3.07s/it]
286
  1%| | 278/34750 [15:04<28:49:51, 3.01s/it]
287
  1%| | 279/34750 [15:07<28:08:27, 2.94s/it]
288
  1%| | 280/34750 [15:10<27:22:33, 2.86s/it]
289
  1%| | 281/34750 [15:12<26:57:28, 2.82s/it]
290
  1%| | 282/34750 [15:15<26:27:44, 2.76s/it]
291
  1%| | 283/34750 [15:18<26:13:36, 2.74s/it]
292
  1%| | 284/34750 [15:21<26:13:15, 2.74s/it]
293
  1%| | 285/34750 [15:23<25:57:40, 2.71s/it]
294
  1%| | 286/34750 [15:26<25:28:50, 2.66s/it]
295
  1%| | 287/34750 [15:28<25:04:44, 2.62s/it]
296
  1%| | 288/34750 [15:31<24:26:40, 2.55s/it]
297
  1%| | 289/34750 [15:33<23:41:08, 2.47s/it]
298
  1%| | 290/34750 [15:35<23:22:38, 2.44s/it]
299
  1%| | 291/34750 [15:38<22:57:12, 2.40s/it]
300
  1%| | 292/34750 [15:40<22:17:49, 2.33s/it]
301
  1%| | 293/34750 [15:42<21:41:20, 2.27s/it]
302
  1%| | 294/34750 [15:44<21:08:58, 2.21s/it]
303
  1%| | 295/34750 [15:46<20:50:26, 2.18s/it]
304
  1%| | 296/34750 [15:48<20:32:20, 2.15s/it]
305
  1%| | 297/34750 [15:50<19:59:15, 2.09s/it]
306
  1%| | 298/34750 [15:52<19:26:06, 2.03s/it]
307
  1%| | 299/34750 [15:54<18:49:19, 1.97s/it]
308
  1%| | 300/34750 [15:56<18:10:12, 1.90s/it]
309
 
310
  1%| | 300/34750 [15:56<18:10:12, 1.90s/it]
311
  1%| | 301/34750 [16:02<31:58:03, 3.34s/it]
312
  1%| | 302/34750 [16:08<37:55:08, 3.96s/it]
313
  1%| | 303/34750 [16:13<41:36:50, 4.35s/it]
314
  1%| | 304/34750 [16:18<43:17:31, 4.52s/it]
315
  1%| | 305/34750 [16:23<43:51:54, 4.58s/it]
316
  1%| | 306/34750 [16:27<43:56:28, 4.59s/it]
317
  1%| | 307/34750 [16:32<43:43:04, 4.57s/it]
318
  1%| | 308/34750 [16:36<42:48:58, 4.48s/it]
319
  1%| | 309/34750 [16:40<42:16:02, 4.42s/it]
320
  1%| | 310/34750 [16:44<41:16:33, 4.31s/it]
321
  1%| | 311/34750 [16:48<40:43:30, 4.26s/it]
322
  1%| | 312/34750 [16:52<39:52:16, 4.17s/it]
323
  1%| | 313/34750 [16:56<38:22:24, 4.01s/it]
324
  1%| | 314/34750 [17:00<37:28:22, 3.92s/it]
325
  1%| | 315/34750 [17:03<36:46:54, 3.85s/it]
326
  1%| | 316/34750 [17:07<35:58:05, 3.76s/it]
327
  1%| | 317/34750 [17:10<35:05:57, 3.67s/it]
328
  1%| | 318/34750 [17:14<34:07:17, 3.57s/it]
329
  1%| | 319/34750 [17:17<33:31:36, 3.51s/it]
330
  1%| | 320/34750 [17:20<32:47:49, 3.43s/it]
331
  1%| | 321/34750 [17:24<32:01:42, 3.35s/it]
332
  1%| | 322/34750 [17:27<31:36:22, 3.30s/it]
333
  1%| | 323/34750 [17:30<31:07:44, 3.26s/it]
334
  1%| | 324/34750 [17:33<30:53:20, 3.23s/it]
335
  1%| | 325/34750 [17:36<30:26:15, 3.18s/it]
336
  1%| | 326/34750 [17:39<29:47:46, 3.12s/it]
337
  1%| | 327/34750 [17:42<29:12:55, 3.06s/it]
338
  1%| | 328/34750 [17:45<28:47:06, 3.01s/it]
339
  1%| | 329/34750 [17:48<28:08:18, 2.94s/it]
340
  1%| | 330/34750 [17:50<27:36:09, 2.89s/it]
341
  1%| | 331/34750 [17:53<27:21:46, 2.86s/it]
342
  1%| | 332/34750 [17:56<27:03:08, 2.83s/it]
343
  1%| | 333/34750 [17:59<26:20:26, 2.76s/it]
344
  1%| | 334/34750 [18:01<26:12:57, 2.74s/it]
345
  1%| | 335/34750 [18:04<25:41:28, 2.69s/it]
346
  1%| | 336/34750 [18:06<25:12:54, 2.64s/it]
347
  1%| | 337/34750 [18:09<24:41:53, 2.58s/it]
348
  1%| | 338/34750 [18:11<24:12:43, 2.53s/it]
349
  1%| | 339/34750 [18:14<23:30:50, 2.46s/it]
350
  1%| | 340/34750 [18:16<22:57:39, 2.40s/it]
351
  1%| | 341/34750 [18:18<22:20:00, 2.34s/it]
352
  1%| | 342/34750 [18:20<22:25:17, 2.35s/it]
353
  1%| | 343/34750 [18:23<22:01:56, 2.31s/it]
354
  1%| | 344/34750 [18:25<21:44:05, 2.27s/it]
355
  1%| | 345/34750 [18:27<21:18:47, 2.23s/it]
356
  1%| | 346/34750 [18:29<20:40:52, 2.16s/it]
357
  1%| | 347/34750 [18:31<20:05:18, 2.10s/it]
358
  1%| | 348/34750 [18:33<19:18:53, 2.02s/it]
359
  1%| | 349/34750 [18:34<18:30:11, 1.94s/it]
360
  1%| | 350/34750 [18:36<17:49:48, 1.87s/it]
361
  1%| | 351/34750 [18:43<30:43:41, 3.22s/it]
362
  1%| | 352/34750 [18:48<36:25:36, 3.81s/it]
363
  1%| | 353/34750 [18:53<40:38:35, 4.25s/it]
364
  1%| | 354/34750 [18:58<42:21:14, 4.43s/it]
365
  1%| | 355/34750 [19:03<43:07:44, 4.51s/it]
366
  1%| | 356/34750 [19:07<42:26:41, 4.44s/it]
367
  1%| | 357/34750 [19:11<41:53:53, 4.39s/it]
368
  1%| | 358/34750 [19:15<40:39:51, 4.26s/it]
369
  1%| | 359/34750 [19:19<40:16:11, 4.22s/it]
370
  1%| | 360/34750 [19:23<39:41:45, 4.16s/it]
371
  1%| | 361/34750 [19:27<38:47:40, 4.06s/it]
372
  1%| | 362/34750 [19:31<38:03:09, 3.98s/it]
373
  1%| | 363/34750 [19:35<37:17:42, 3.90s/it]
374
  1%| | 364/34750 [19:38<36:50:26, 3.86s/it]
375
  1%| | 365/34750 [19:42<36:07:43, 3.78s/it]
376
  1%| | 366/34750 [19:45<35:02:58, 3.67s/it]
377
  1%| | 367/34750 [19:49<34:07:02, 3.57s/it]
378
  1%| | 368/34750 [19:52<33:13:38, 3.48s/it]
379
  1%| | 369/34750 [19:55<32:49:53, 3.44s/it]
380
  1%| | 370/34750 [19:58<32:19:00, 3.38s/it]
381
  1%| | 371/34750 [20:02<31:58:00, 3.35s/it]
382
  1%| | 372/34750 [20:05<31:12:15, 3.27s/it]
383
  1%| | 373/34750 [20:08<30:34:34, 3.20s/it]
384
  1%| | 374/34750 [20:11<30:28:31, 3.19s/it]
385
  1%| | 375/34750 [20:14<29:47:15, 3.12s/it]
386
  1%| | 376/34750 [20:17<29:13:07, 3.06s/it]
387
  1%| | 377/34750 [20:20<28:44:15, 3.01s/it]
388
  1%| | 378/34750 [20:23<28:26:34, 2.98s/it]
389
  1%| | 379/34750 [20:26<27:56:33, 2.93s/it]
390
  1%| | 380/34750 [20:28<27:25:39, 2.87s/it]
391
  1%| | 381/34750 [20:31<26:59:30, 2.83s/it]
392
  1%| | 382/34750 [20:34<26:29:51, 2.78s/it]
393
  1%| | 383/34750 [20:36<25:56:46, 2.72s/it]
394
  1%| | 384/34750 [20:39<25:35:53, 2.68s/it]
395
  1%| | 385/34750 [20:41<25:02:32, 2.62s/it]
396
  1%| | 386/34750 [20:44<24:39:09, 2.58s/it]
397
  1%| | 387/34750 [20:46<24:21:30, 2.55s/it]
398
  1%| | 388/34750 [20:49<23:38:31, 2.48s/it]
399
  1%| | 389/34750 [20:51<23:10:07, 2.43s/it]
400
  1%| | 390/34750 [20:53<22:38:55, 2.37s/it]
401
  1%| | 391/34750 [20:55<22:09:02, 2.32s/it]
402
  1%| | 392/34750 [20:58<21:49:26, 2.29s/it]
403
  1%| | 393/34750 [21:00<21:26:35, 2.25s/it]
404
  1%| | 394/34750 [21:02<21:03:47, 2.21s/it]
405
  1%| | 395/34750 [21:04<20:38:57, 2.16s/it]
406
  1%| | 396/34750 [21:06<20:07:32, 2.11s/it]
407
  1%| | 397/34750 [21:08<19:41:03, 2.06s/it]
408
  1%| | 398/34750 [21:10<19:14:02, 2.02s/it]
409
  1%| | 399/34750 [21:12<18:36:22, 1.95s/it]
410
  1%| | 400/34750 [21:13<17:49:36, 1.87s/it]
411
 
412
  1%| | 400/34750 [21:13<17:49:36, 1.87s/it]
413
  1%| | 401/34750 [21:20<31:30:35, 3.30s/it]
414
  1%| | 402/34750 [21:26<38:21:07, 4.02s/it]
415
  1%| | 403/34750 [21:31<41:14:36, 4.32s/it]
416
  1%| | 404/34750 [21:35<42:39:12, 4.47s/it]
417
  1%| | 405/34750 [21:40<43:39:49, 4.58s/it]
418
  1%| | 406/34750 [21:45<44:18:50, 4.65s/it]
419
  1%| | 407/34750 [21:49<43:34:28, 4.57s/it]
420
  1%| | 408/34750 [21:54<42:43:07, 4.48s/it]
421
  1%| | 409/34750 [21:58<42:01:38, 4.41s/it]
422
  1%| | 410/34750 [22:02<41:12:36, 4.32s/it]
423
  1%| | 411/34750 [22:06<40:08:54, 4.21s/it]
424
  1%| | 412/34750 [22:10<39:03:22, 4.09s/it]
425
  1%| | 413/34750 [22:14<38:34:32, 4.04s/it]
426
  1%| | 414/34750 [22:17<37:40:02, 3.95s/it]
427
  1%| | 415/34750 [22:21<36:37:15, 3.84s/it]
428
  1%| | 416/34750 [22:25<36:12:41, 3.80s/it]
429
  1%| | 417/34750 [22:28<35:24:02, 3.71s/it]
430
  1%| | 418/34750 [22:32<34:40:10, 3.64s/it]
431
  1%| | 419/34750 [22:35<33:57:25, 3.56s/it]
432
  1%| | 420/34750 [22:38<33:08:40, 3.48s/it]
433
  1%| | 421/34750 [22:42<32:18:45, 3.39s/it]
434
  1%| | 422/34750 [22:45<31:30:05, 3.30s/it]
435
  1%| | 423/34750 [22:48<30:52:25, 3.24s/it]
436
  1%| | 424/34750 [22:51<30:20:07, 3.18s/it]
437
  1%| | 425/34750 [22:54<30:16:08, 3.17s/it]
438
  1%| | 426/34750 [22:57<29:37:23, 3.11s/it]
439
  1%| | 427/34750 [23:00<29:02:17, 3.05s/it]
440
  1%| | 428/34750 [23:03<28:32:26, 2.99s/it]
441
  1%| | 429/34750 [23:05<27:55:36, 2.93s/it]
442
  1%| | 430/34750 [23:08<27:22:38, 2.87s/it]
443
  1%| | 431/34750 [23:11<26:42:52, 2.80s/it]
444
  1%| | 432/34750 [23:13<26:09:53, 2.74s/it]
445
  1%| | 433/34750 [23:16<25:33:50, 2.68s/it]
446
  1%| | 434/34750 [23:19<25:26:14, 2.67s/it]
447
  1%|โ– | 435/34750 [23:21<25:08:01, 2.64s/it]
448
  1%|โ– | 436/34750 [23:24<24:28:11, 2.57s/it]
449
  1%|โ– | 437/34750 [23:26<23:45:04, 2.49s/it]
450
  1%|โ– | 438/34750 [23:28<23:09:23, 2.43s/it]
451
  1%|โ– | 439/34750 [23:30<22:35:50, 2.37s/it]
452
  1%|โ– | 440/34750 [23:33<22:09:21, 2.32s/it]
453
  1%|โ– | 441/34750 [23:35<21:32:40, 2.26s/it]
454
  1%|โ– | 442/34750 [23:37<21:02:35, 2.21s/it]
455
  1%|โ– | 443/34750 [23:39<20:37:00, 2.16s/it]
456
  1%|โ– | 444/34750 [23:41<20:17:59, 2.13s/it]
457
  1%|โ– | 445/34750 [23:43<20:03:47, 2.11s/it]
458
  1%|โ– | 446/34750 [23:45<19:49:33, 2.08s/it]
459
  1%|โ– | 447/34750 [23:47<19:19:48, 2.03s/it]
460
  1%|โ– | 448/34750 [23:49<18:48:10, 1.97s/it]
461
  1%|โ– | 449/34750 [23:51<18:28:39, 1.94s/it]
462
  1%|โ– | 450/34750 [23:52<17:45:13, 1.86s/it]
463
  1%|โ– | 451/34750 [23:59<30:08:37, 3.16s/it]
464
  1%|โ– | 452/34750 [24:04<37:08:51, 3.90s/it]
465
  1%|โ– | 453/34750 [24:09<40:16:04, 4.23s/it]
466
  1%|โ– | 454/34750 [24:14<41:18:56, 4.34s/it]
467
  1%|โ– | 455/34750 [24:18<42:06:45, 4.42s/it]
468
  1%|โ– | 456/34750 [24:23<41:51:37, 4.39s/it]
469
  1%|โ– | 457/34750 [24:27<41:49:04, 4.39s/it]
470
  1%|โ– | 458/34750 [24:31<41:03:14, 4.31s/it]
471
  1%|โ– | 459/34750 [24:35<40:29:08, 4.25s/it]
472
  1%|โ– | 460/34750 [24:39<39:39:10, 4.16s/it]
473
  1%|โ– | 461/34750 [24:43<38:56:37, 4.09s/it]
474
  1%|โ– | 462/34750 [24:47<38:30:33, 4.04s/it]
475
  1%|โ– | 463/34750 [24:51<37:55:04, 3.98s/it]
476
  1%|โ– | 464/34750 [24:55<37:01:56, 3.89s/it]
477
  1%|โ– | 465/34750 [24:58<36:01:26, 3.78s/it]
478
  1%|โ– | 466/34750 [25:02<35:06:25, 3.69s/it]
479
  1%|โ– | 467/34750 [25:05<34:18:13, 3.60s/it]
480
  1%|โ– | 468/34750 [25:08<33:47:41, 3.55s/it]
481
  1%|โ– | 469/34750 [25:12<32:59:07, 3.46s/it]
482
  1%|โ– | 470/34750 [25:15<32:19:51, 3.40s/it]
483
  1%|โ– | 471/34750 [25:18<31:52:25, 3.35s/it]
484
  1%|โ– | 472/34750 [25:21<31:32:56, 3.31s/it]
485
  1%|โ– | 473/34750 [25:24<30:51:50, 3.24s/it]
486
  1%|โ– | 474/34750 [25:28<30:17:54, 3.18s/it]
487
  1%|โ– | 475/34750 [25:31<29:52:40, 3.14s/it]
488
  1%|โ– | 476/34750 [25:34<29:21:56, 3.08s/it]
489
  1%|โ– | 477/34750 [25:37<29:08:35, 3.06s/it]
490
  1%|โ– | 478/34750 [25:39<28:25:03, 2.99s/it]
491
  1%|โ– | 479/34750 [25:42<28:15:33, 2.97s/it]
492
  1%|โ– | 480/34750 [25:45<27:32:19, 2.89s/it]
493
  1%|โ– | 481/34750 [25:48<27:02:24, 2.84s/it]
494
  1%|โ– | 482/34750 [25:50<26:38:29, 2.80s/it]
495
  1%|โ– | 483/34750 [25:53<26:09:21, 2.75s/it]
496
  1%|โ– | 484/34750 [25:56<26:07:06, 2.74s/it]
497
  1%|โ– | 485/34750 [25:58<25:46:30, 2.71s/it]
498
  1%|โ– | 486/34750 [26:01<25:11:05, 2.65s/it]
499
  1%|โ– | 487/34750 [26:03<24:41:17, 2.59s/it]
500
  1%|โ– | 488/34750 [26:06<24:11:10, 2.54s/it]
501
  1%|โ– | 489/34750 [26:08<23:45:21, 2.50s/it]
502
  1%|โ– | 490/34750 [26:10<23:12:34, 2.44s/it]
503
  1%|โ– | 491/34750 [26:13<22:40:29, 2.38s/it]
504
  1%|โ– | 492/34750 [26:15<22:19:29, 2.35s/it]
505
  1%|โ– | 493/34750 [26:17<22:03:05, 2.32s/it]
506
  1%|โ– | 494/34750 [26:19<21:32:01, 2.26s/it]
507
  1%|โ– | 495/34750 [26:21<21:00:30, 2.21s/it]
508
  1%|โ– | 496/34750 [26:23<20:25:39, 2.15s/it]
509
  1%|โ– | 497/34750 [26:25<19:55:06, 2.09s/it]
510
  1%|โ– | 498/34750 [26:27<19:13:21, 2.02s/it]
511
  1%|โ– | 499/34750 [26:29<18:38:20, 1.96s/it]
512
  1%|โ– | 500/34750 [26:31<17:50:56, 1.88s/it]
513
 
514
  1%|โ– | 500/34750 [26:31<17:50:56, 1.88s/it]The following columns in the evaluation set don't have a corresponding argument in `Wav2Vec2ForCTC.forward` and have been ignored: input_length.
 
 
 
 
 
 
 
 
 
 
515
  0%| | 0/57 [00:00<?, ?it/s]
 
516
  4%|โ–Ž | 2/57 [00:00<00:19, 2.82it/s]
 
517
  5%|โ–Œ | 3/57 [00:01<00:25, 2.12it/s]
 
518
  7%|โ–‹ | 4/57 [00:02<00:28, 1.83it/s]
 
519
  9%|โ–‰ | 5/57 [00:02<00:28, 1.85it/s]
 
520
  11%|โ–ˆ | 6/57 [00:03<00:28, 1.78it/s]
 
521
  12%|โ–ˆโ– | 7/57 [00:03<00:28, 1.76it/s]
 
522
  14%|โ–ˆโ– | 8/57 [00:04<00:28, 1.72it/s]
 
523
  16%|โ–ˆโ–Œ | 9/57 [00:04<00:27, 1.72it/s]
 
524
  18%|โ–ˆโ–Š | 10/57 [00:05<00:26, 1.75it/s]
 
525
  19%|โ–ˆโ–‰ | 11/57 [00:06<00:27, 1.68it/s]
 
526
  21%|โ–ˆโ–ˆ | 12/57 [00:06<00:29, 1.53it/s]
 
527
  23%|โ–ˆโ–ˆโ–Ž | 13/57 [00:07<00:31, 1.42it/s]
 
528
  25%|โ–ˆโ–ˆโ– | 14/57 [00:08<00:29, 1.44it/s]
 
529
  26%|โ–ˆโ–ˆโ–‹ | 15/57 [00:09<00:31, 1.34it/s]
 
530
  28%|โ–ˆโ–ˆโ–Š | 16/57 [00:09<00:28, 1.43it/s]
 
531
  30%|โ–ˆโ–ˆโ–‰ | 17/57 [00:10<00:26, 1.50it/s]
 
532
  32%|โ–ˆโ–ˆโ–ˆโ– | 18/57 [00:11<00:24, 1.58it/s]
 
533
  33%|โ–ˆโ–ˆโ–ˆโ–Ž | 19/57 [00:11<00:23, 1.65it/s]
 
534
  35%|โ–ˆโ–ˆโ–ˆโ–Œ | 20/57 [00:12<00:22, 1.64it/s]
 
535
  37%|โ–ˆโ–ˆโ–ˆโ–‹ | 21/57 [00:12<00:21, 1.65it/s]
 
536
  39%|โ–ˆโ–ˆโ–ˆโ–Š | 22/57 [00:13<00:23, 1.52it/s]
 
537
  40%|โ–ˆโ–ˆโ–ˆโ–ˆ | 23/57 [00:14<00:24, 1.40it/s]
 
538
  42%|โ–ˆโ–ˆโ–ˆโ–ˆโ– | 24/57 [00:15<00:23, 1.42it/s]
 
539
  44%|โ–ˆโ–ˆโ–ˆโ–ˆโ– | 25/57 [00:15<00:21, 1.47it/s]
 
540
  46%|โ–ˆโ–ˆโ–ˆโ–ˆโ–Œ | 26/57 [00:16<00:19, 1.55it/s]
 
541
  47%|โ–ˆโ–ˆโ–ˆโ–ˆโ–‹ | 27/57 [00:16<00:18, 1.65it/s]
 
542
  49%|โ–ˆโ–ˆโ–ˆโ–ˆโ–‰ | 28/57 [00:17<00:18, 1.59it/s]
 
543
  51%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆ | 29/57 [00:18<00:17, 1.57it/s]
 
544
  53%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–Ž | 30/57 [00:18<00:15, 1.70it/s]
 
545
  54%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ– | 31/57 [00:19<00:14, 1.83it/s]
 
546
  56%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–Œ | 32/57 [00:19<00:14, 1.78it/s]
 
547
  58%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–Š | 33/57 [00:20<00:14, 1.66it/s]
 
548
  60%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–‰ | 34/57 [00:20<00:14, 1.63it/s]
 
549
  61%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ– | 35/57 [00:21<00:14, 1.56it/s]
 
550
  63%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–Ž | 36/57 [00:22<00:13, 1.57it/s]
 
551
  65%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ– | 37/57 [00:22<00:13, 1.53it/s]
 
552
  67%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–‹ | 38/57 [00:23<00:12, 1.47it/s]
 
553
  68%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–Š | 39/57 [00:24<00:12, 1.46it/s]
 
554
  70%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆ | 40/57 [00:25<00:11, 1.43it/s]
 
555
  72%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ– | 41/57 [00:25<00:11, 1.37it/s]
 
556
  74%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–Ž | 42/57 [00:26<00:11, 1.35it/s]
 
557
  75%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–Œ | 43/57 [00:27<00:10, 1.39it/s]
 
558
  77%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–‹ | 44/57 [00:28<00:09, 1.36it/s]
 
559
  79%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–‰ | 45/57 [00:28<00:07, 1.54it/s]
 
560
  81%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆ | 46/57 [00:29<00:06, 1.58it/s]
 
561
  82%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ– | 47/57 [00:29<00:06, 1.55it/s]
 
562
  84%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ– | 48/57 [00:30<00:05, 1.63it/s]
 
563
  86%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–Œ | 49/57 [00:31<00:04, 1.67it/s]
 
564
  88%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–Š | 50/57 [00:31<00:04, 1.67it/s]
 
565
  89%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–‰ | 51/57 [00:32<00:03, 1.66it/s]
 
566
  91%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆ | 52/57 [00:32<00:02, 1.69it/s]
 
567
  93%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–Ž| 53/57 [00:33<00:02, 1.80it/s]
 
568
  95%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–| 54/57 [00:33<00:01, 1.77it/s]
 
569
  96%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–‹| 55/57 [00:34<00:01, 1.61it/s]
 
570
  98%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–Š| 56/57 [00:35<00:00, 1.56it/s]
 
571
 
 
572
 
573
  1%|โ– | 500/34750 [27:12<17:50:56, 1.88s/it]
 
 
574
  Saving model checkpoint to ./checkpoint-500
 
 
 
 
1
+ 01/31/2022 07:15:59 - WARNING - __main__ - Process rank: -1, device: cuda:0, n_gpu: 1distributed training: False, 16-bits training: True
2
+ 01/31/2022 07:15:59 - INFO - __main__ - Training/evaluation parameters TrainingArguments(
3
+ _n_gpu=1,
4
+ adafactor=False,
5
+ adam_beta1=0.9,
6
+ adam_beta2=0.999,
7
+ adam_epsilon=1e-08,
8
+ bf16=False,
9
+ bf16_full_eval=False,
10
+ dataloader_drop_last=False,
11
+ dataloader_num_workers=0,
12
+ dataloader_pin_memory=True,
13
+ ddp_bucket_cap_mb=None,
14
+ ddp_find_unused_parameters=None,
15
+ debug=[],
16
+ deepspeed=None,
17
+ disable_tqdm=False,
18
+ do_eval=True,
19
+ do_predict=False,
20
+ do_train=True,
21
+ eval_accumulation_steps=None,
22
+ eval_steps=500,
23
+ evaluation_strategy=IntervalStrategy.STEPS,
24
+ fp16=True,
25
+ fp16_backend=auto,
26
+ fp16_full_eval=False,
27
+ fp16_opt_level=O1,
28
+ gradient_accumulation_steps=4,
29
+ gradient_checkpointing=True,
30
+ greater_is_better=None,
31
+ group_by_length=True,
32
+ half_precision_backend=auto,
33
+ hub_model_id=None,
34
+ hub_strategy=HubStrategy.EVERY_SAVE,
35
+ hub_token=<HUB_TOKEN>,
36
+ ignore_data_skip=False,
37
+ label_names=None,
38
+ label_smoothing_factor=0.0,
39
+ learning_rate=7.5e-05,
40
+ length_column_name=input_length,
41
+ load_best_model_at_end=False,
42
+ local_rank=-1,
43
+ log_level=-1,
44
+ log_level_replica=-1,
45
+ log_on_each_node=True,
46
+ logging_dir=./runs/Jan31_07-15-59_job-2c68f48a-2d5d-4013-9043-3f2cb25f3ff6,
47
+ logging_first_step=False,
48
+ logging_nan_inf_filter=True,
49
+ logging_steps=100,
50
+ logging_strategy=IntervalStrategy.STEPS,
51
+ lr_scheduler_type=SchedulerType.LINEAR,
52
+ max_grad_norm=1.0,
53
+ max_steps=-1,
54
+ metric_for_best_model=None,
55
+ mp_parameters=,
56
+ no_cuda=False,
57
+ num_train_epochs=50.0,
58
+ optim=OptimizerNames.ADAMW_HF,
59
+ output_dir=./,
60
+ overwrite_output_dir=True,
61
+ past_index=-1,
62
+ per_device_eval_batch_size=8,
63
+ per_device_train_batch_size=8,
64
+ prediction_loss_only=False,
65
+ push_to_hub=True,
66
+ push_to_hub_model_id=None,
67
+ push_to_hub_organization=None,
68
+ push_to_hub_token=<PUSH_TO_HUB_TOKEN>,
69
+ remove_unused_columns=True,
70
+ report_to=['tensorboard'],
71
+ resume_from_checkpoint=None,
72
+ run_name=./,
73
+ save_on_each_node=False,
74
+ save_steps=500,
75
+ save_strategy=IntervalStrategy.STEPS,
76
+ save_total_limit=3,
77
+ seed=42,
78
+ sharded_ddp=[],
79
+ skip_memory_metrics=True,
80
+ tf32=None,
81
+ tpu_metrics_debug=False,
82
+ tpu_num_cores=None,
83
+ use_legacy_prediction_loop=False,
84
+ warmup_ratio=0.0,
85
+ warmup_steps=2000,
86
+ weight_decay=0.0,
87
+ xpu_backend=None,
88
+ )
89
+ 01/31/2022 07:16:01 - WARNING - datasets.builder - Reusing dataset zeroth_korean_asr (/workspace/.cache/huggingface/datasets/kresnik___zeroth_korean_asr/clean/1.0.1/f6cf96a53d5512525e3113bab8048d36ce268658d6e0c40d45f65dfa3f0bc343)
90
+ 01/31/2022 07:16:03 - WARNING - datasets.builder - Reusing dataset zeroth_korean_asr (/workspace/.cache/huggingface/datasets/kresnik___zeroth_korean_asr/clean/1.0.1/f6cf96a53d5512525e3113bab8048d36ce268658d6e0c40d45f65dfa3f0bc343)
91
+
92
+
93
+ loading configuration file https://huggingface.co/facebook/wav2vec2-xls-r-300m/resolve/main/config.json from cache at /workspace/.cache/huggingface/transformers/dabc27df63e37bd2a7a221c7774e35f36a280fbdf917cf54cadfc7df8c786f6f.a3e4c3c967d9985881e0ae550a5f6f668f897db5ab2e0802f9b97973b15970e6
94
+ Model config Wav2Vec2Config {
95
+ "_name_or_path": "facebook/wav2vec2-xls-r-300m",
96
+ "activation_dropout": 0.0,
97
+ "adapter_kernel_size": 3,
98
+ "adapter_stride": 2,
99
+ "add_adapter": false,
100
+ "apply_spec_augment": true,
101
+ "architectures": [
102
+ "Wav2Vec2ForPreTraining"
103
+ ],
104
+ "attention_dropout": 0.1,
105
+ "bos_token_id": 1,
106
+ "classifier_proj_size": 256,
107
+ "codevector_dim": 768,
108
+ "contrastive_logits_temperature": 0.1,
109
+ "conv_bias": true,
110
+ "conv_dim": [
111
+ 512,
112
+ 512,
113
+ 512,
114
+ 512,
115
+ 512,
116
+ 512,
117
+ 512
118
+ ],
119
+ "conv_kernel": [
120
+ 10,
121
+ 3,
122
+ 3,
123
+ 3,
124
+ 3,
125
+ 2,
126
+ 2
127
+ ],
128
+ "conv_stride": [
129
+ 5,
130
+ 2,
131
+ 2,
132
+ 2,
133
+ 2,
134
+ 2,
135
+ 2
136
+ ],
137
+ "ctc_loss_reduction": "sum",
138
+ "ctc_zero_infinity": false,
139
+ "diversity_loss_weight": 0.1,
140
+ "do_stable_layer_norm": true,
141
+ "eos_token_id": 2,
142
+ "feat_extract_activation": "gelu",
143
+ "feat_extract_dropout": 0.0,
144
+ "feat_extract_norm": "layer",
145
+ "feat_proj_dropout": 0.1,
146
+ "feat_quantizer_dropout": 0.0,
147
+ "final_dropout": 0.0,
148
+ "gradient_checkpointing": false,
149
+ "hidden_act": "gelu",
150
+ "hidden_dropout": 0.1,
151
+ "hidden_size": 1024,
152
+ "initializer_range": 0.02,
153
+ "intermediate_size": 4096,
154
+ "layer_norm_eps": 1e-05,
155
+ "layerdrop": 0.1,
156
+ "mask_feature_length": 10,
157
+ "mask_feature_min_masks": 0,
158
+ "mask_feature_prob": 0.0,
159
+ "mask_time_length": 10,
160
+ "mask_time_min_masks": 2,
161
+ "mask_time_prob": 0.075,
162
+ "model_type": "wav2vec2",
163
+ "num_adapter_layers": 3,
164
+ "num_attention_heads": 16,
165
+ "num_codevector_groups": 2,
166
+ "num_codevectors_per_group": 320,
167
+ "num_conv_pos_embedding_groups": 16,
168
+ "num_conv_pos_embeddings": 128,
169
+ "num_feat_extract_layers": 7,
170
+ "num_hidden_layers": 24,
171
+ "num_negatives": 100,
172
+ "output_hidden_size": 1024,
173
+ "pad_token_id": 0,
174
+ "proj_codevector_dim": 768,
175
+ "tdnn_dilation": [
176
+ 1,
177
+ 2,
178
+ 3,
179
+ 1,
180
+ 1
181
+ ],
182
+ "tdnn_dim": [
183
+ 512,
184
+ 512,
185
+ 512,
186
+ 512,
187
+ 1500
188
+ ],
189
+ "tdnn_kernel": [
190
+ 5,
191
+ 3,
192
+ 3,
193
+ 1,
194
+ 1
195
+ ],
196
+ "torch_dtype": "float32",
197
+ "transformers_version": "4.17.0.dev0",
198
+ "use_weighted_layer_sum": false,
199
+ "vocab_size": 32,
200
+ "xvector_output_dim": 512
201
+ }
202
+
203
+
204
  0%| | 0/1 [00:00<?, ?ba/s]
205
+
206
  0%| | 0/1 [00:00<?, ?ba/s]
207
+ Didn't find file ./tokenizer_config.json. We won't load it.
208
+ Didn't find file ./added_tokens.json. We won't load it.
209
+ Didn't find file ./special_tokens_map.json. We won't load it.
210
+ Didn't find file ./tokenizer.json. We won't load it.
211
+ loading file ./vocab.json
212
+ loading file None
213
+ loading file None
214
+ loading file None
215
+ loading file None
216
+ file ./config.json not found
217
+ Adding <s> to the vocabulary
218
+ Adding </s> to the vocabulary
219
+ Special tokens have been added in the vocabulary, make sure the associated word embeddings are fine-tuned or trained.
220
+ loading configuration file https://huggingface.co/facebook/wav2vec2-xls-r-300m/resolve/main/config.json from cache at /workspace/.cache/huggingface/transformers/dabc27df63e37bd2a7a221c7774e35f36a280fbdf917cf54cadfc7df8c786f6f.a3e4c3c967d9985881e0ae550a5f6f668f897db5ab2e0802f9b97973b15970e6
221
+ Model config Wav2Vec2Config {
222
+ "_name_or_path": "facebook/wav2vec2-xls-r-300m",
223
+ "activation_dropout": 0.0,
224
+ "adapter_kernel_size": 3,
225
+ "adapter_stride": 2,
226
+ "add_adapter": false,
227
+ "apply_spec_augment": true,
228
+ "architectures": [
229
+ "Wav2Vec2ForPreTraining"
230
+ ],
231
+ "attention_dropout": 0.1,
232
+ "bos_token_id": 1,
233
+ "classifier_proj_size": 256,
234
+ "codevector_dim": 768,
235
+ "contrastive_logits_temperature": 0.1,
236
+ "conv_bias": true,
237
+ "conv_dim": [
238
+ 512,
239
+ 512,
240
+ 512,
241
+ 512,
242
+ 512,
243
+ 512,
244
+ 512
245
+ ],
246
+ "conv_kernel": [
247
+ 10,
248
+ 3,
249
+ 3,
250
+ 3,
251
+ 3,
252
+ 2,
253
+ 2
254
+ ],
255
+ "conv_stride": [
256
+ 5,
257
+ 2,
258
+ 2,
259
+ 2,
260
+ 2,
261
+ 2,
262
+ 2
263
+ ],
264
+ "ctc_loss_reduction": "sum",
265
+ "ctc_zero_infinity": false,
266
+ "diversity_loss_weight": 0.1,
267
+ "do_stable_layer_norm": true,
268
+ "eos_token_id": 2,
269
+ "feat_extract_activation": "gelu",
270
+ "feat_extract_dropout": 0.0,
271
+ "feat_extract_norm": "layer",
272
+ "feat_proj_dropout": 0.1,
273
+ "feat_quantizer_dropout": 0.0,
274
+ "final_dropout": 0.0,
275
+ "gradient_checkpointing": false,
276
+ "hidden_act": "gelu",
277
+ "hidden_dropout": 0.1,
278
+ "hidden_size": 1024,
279
+ "initializer_range": 0.02,
280
+ "intermediate_size": 4096,
281
+ "layer_norm_eps": 1e-05,
282
+ "layerdrop": 0.1,
283
+ "mask_feature_length": 10,
284
+ "mask_feature_min_masks": 0,
285
+ "mask_feature_prob": 0.0,
286
+ "mask_time_length": 10,
287
+ "mask_time_min_masks": 2,
288
+ "mask_time_prob": 0.075,
289
+ "model_type": "wav2vec2",
290
+ "num_adapter_layers": 3,
291
+ "num_attention_heads": 16,
292
+ "num_codevector_groups": 2,
293
+ "num_codevectors_per_group": 320,
294
+ "num_conv_pos_embedding_groups": 16,
295
+ "num_conv_pos_embeddings": 128,
296
+ "num_feat_extract_layers": 7,
297
+ "num_hidden_layers": 24,
298
+ "num_negatives": 100,
299
+ "output_hidden_size": 1024,
300
+ "pad_token_id": 0,
301
+ "proj_codevector_dim": 768,
302
+ "tdnn_dilation": [
303
+ 1,
304
+ 2,
305
+ 3,
306
+ 1,
307
+ 1
308
+ ],
309
+ "tdnn_dim": [
310
+ 512,
311
+ 512,
312
+ 512,
313
+ 512,
314
+ 1500
315
+ ],
316
+ "tdnn_kernel": [
317
+ 5,
318
+ 3,
319
+ 3,
320
+ 1,
321
+ 1
322
+ ],
323
+ "torch_dtype": "float32",
324
+ "transformers_version": "4.17.0.dev0",
325
+ "use_weighted_layer_sum": false,
326
+ "vocab_size": 32,
327
+ "xvector_output_dim": 512
328
+ }
329
+
330
+ loading feature extractor configuration file https://huggingface.co/facebook/wav2vec2-xls-r-300m/resolve/main/preprocessor_config.json from cache at /workspace/.cache/huggingface/transformers/6fb028b95b394059e7d3b367bbca2382b576c66aebe896f04d2cd34e1b575f5b.d4484dc1c81456a2461485e7168b04347a7b9a4e3b1ef3aba723323b33e12326
331
+ Feature extractor Wav2Vec2FeatureExtractor {
332
+ "do_normalize": true,
333
+ "feature_extractor_type": "Wav2Vec2FeatureExtractor",
334
+ "feature_size": 1,
335
+ "padding_side": "right",
336
+ "padding_value": 0,
337
+ "return_attention_mask": true,
338
+ "sampling_rate": 16000
339
+ }
340
+
341
+ loading weights file https://huggingface.co/facebook/wav2vec2-xls-r-300m/resolve/main/pytorch_model.bin from cache at /workspace/.cache/huggingface/transformers/1e6a6507f3b689035cd4b247e2a37c154e27f39143f31357a49b4e38baeccc36.1edb32803799e27ed554eb7dd935f6745b1a0b17b0ea256442fe24db6eb546cd
342
+ Some weights of the model checkpoint at facebook/wav2vec2-xls-r-300m were not used when initializing Wav2Vec2ForCTC: ['quantizer.weight_proj.bias', 'project_q.bias', 'quantizer.weight_proj.weight', 'project_hid.bias', 'project_q.weight', 'quantizer.codevectors', 'project_hid.weight']
343
+ - This IS expected if you are initializing Wav2Vec2ForCTC from the checkpoint of a model trained on another task or with another architecture (e.g. initializing a BertForSequenceClassification model from a BertForPreTraining model).
344
+ - This IS NOT expected if you are initializing Wav2Vec2ForCTC from the checkpoint of a model that you expect to be exactly identical (initializing a BertForSequenceClassification model from a BertForSequenceClassification model).
345
+ Some weights of Wav2Vec2ForCTC were not initialized from the model checkpoint at facebook/wav2vec2-xls-r-300m and are newly initialized: ['lm_head.bias', 'lm_head.weight']
346
+ You should probably TRAIN this model on a down-stream task to be able to use it for predictions and inference.
347
+
348
+
349
+
350
  0%| | 0/23 [00:00<?, ?ba/s]
351
+
352
  0%| | 0/1 [00:00<?, ?ba/s]
353
+ Configuration saved in ./preprocessor_config.json
354
+ tokenizer config file saved in ./tokenizer_config.json
355
+ Special tokens file saved in ./special_tokens_map.json
356
+ added tokens file saved in ./added_tokens.json
357
+ Configuration saved in ./config.json
358
+ loading feature extractor configuration file ./preprocessor_config.json
359
+ loading configuration file ./config.json
360
+ Model config Wav2Vec2Config {
361
+ "_name_or_path": "./",
362
+ "activation_dropout": 0.1,
363
+ "adapter_kernel_size": 3,
364
+ "adapter_stride": 2,
365
+ "add_adapter": false,
366
+ "apply_spec_augment": true,
367
+ "architectures": [
368
+ "Wav2Vec2ForPreTraining"
369
+ ],
370
+ "attention_dropout": 0.0,
371
+ "bos_token_id": 1,
372
+ "classifier_proj_size": 256,
373
+ "codevector_dim": 768,
374
+ "contrastive_logits_temperature": 0.1,
375
+ "conv_bias": true,
376
+ "conv_dim": [
377
+ 512,
378
+ 512,
379
+ 512,
380
+ 512,
381
+ 512,
382
+ 512,
383
+ 512
384
+ ],
385
+ "conv_kernel": [
386
+ 10,
387
+ 3,
388
+ 3,
389
+ 3,
390
+ 3,
391
+ 2,
392
+ 2
393
+ ],
394
+ "conv_stride": [
395
+ 5,
396
+ 2,
397
+ 2,
398
+ 2,
399
+ 2,
400
+ 2,
401
+ 2
402
+ ],
403
+ "ctc_loss_reduction": "mean",
404
+ "ctc_zero_infinity": false,
405
+ "diversity_loss_weight": 0.1,
406
+ "do_stable_layer_norm": true,
407
+ "eos_token_id": 2,
408
+ "feat_extract_activation": "gelu",
409
+ "feat_extract_dropout": 0.0,
410
+ "feat_extract_norm": "layer",
411
+ "feat_proj_dropout": 0.0,
412
+ "feat_quantizer_dropout": 0.0,
413
+ "final_dropout": 0.0,
414
+ "hidden_act": "gelu",
415
+ "hidden_dropout": 0.0,
416
+ "hidden_size": 1024,
417
+ "initializer_range": 0.02,
418
+ "intermediate_size": 4096,
419
+ "layer_norm_eps": 1e-05,
420
+ "layerdrop": 0.0,
421
+ "mask_feature_length": 64,
422
+ "mask_feature_min_masks": 0,
423
+ "mask_feature_prob": 0.25,
424
+ "mask_time_length": 10,
425
+ "mask_time_min_masks": 2,
426
+ "mask_time_prob": 0.75,
427
+ "model_type": "wav2vec2",
428
+ "num_adapter_layers": 3,
429
+ "num_attention_heads": 16,
430
+ "num_codevector_groups": 2,
431
+ "num_codevectors_per_group": 320,
432
+ "num_conv_pos_embedding_groups": 16,
433
+ "num_conv_pos_embeddings": 128,
434
+ "num_feat_extract_layers": 7,
435
+ "num_hidden_layers": 24,
436
+ "num_negatives": 100,
437
+ "output_hidden_size": 1024,
438
+ "pad_token_id": 1204,
439
+ "proj_codevector_dim": 768,
440
+ "tdnn_dilation": [
441
+ 1,
442
+ 2,
443
+ 3,
444
+ 1,
445
+ 1
446
+ ],
447
+ "tdnn_dim": [
448
+ 512,
449
+ 512,
450
+ 512,
451
+ 512,
452
+ 1500
453
+ ],
454
+ "tdnn_kernel": [
455
+ 5,
456
+ 3,
457
+ 3,
458
+ 1,
459
+ 1
460
+ ],
461
+ "torch_dtype": "float32",
462
+ "transformers_version": "4.17.0.dev0",
463
+ "use_weighted_layer_sum": false,
464
+ "vocab_size": 1207,
465
+ "xvector_output_dim": 512
466
+ }
467
+
468
+ loading feature extractor configuration file ./preprocessor_config.json
469
+ Feature extractor Wav2Vec2FeatureExtractor {
470
+ "do_normalize": true,
471
+ "feature_extractor_type": "Wav2Vec2FeatureExtractor",
472
+ "feature_size": 1,
473
+ "padding_side": "right",
474
+ "padding_value": 0,
475
+ "return_attention_mask": true,
476
+ "sampling_rate": 16000
477
+ }
478
+
479
+ Didn't find file ./tokenizer.json. We won't load it.
480
+ loading file ./vocab.json
481
+ loading file ./tokenizer_config.json
482
+ loading file ./added_tokens.json
483
+ loading file ./special_tokens_map.json
484
+ loading file None
485
+ Adding <s> to the vocabulary
486
+ Adding </s> to the vocabulary
487
+ /workspace/wav2vec2-xls-r-300m-korean/./ is already a clone of https://huggingface.co/w11wo/wav2vec2-xls-r-300m-korean. Make sure you pull the latest changes with `repo.git_pull()`.
488
+ 01/31/2022 07:18:18 - WARNING - huggingface_hub.repository - /workspace/wav2vec2-xls-r-300m-korean/./ is already a clone of https://huggingface.co/w11wo/wav2vec2-xls-r-300m-korean. Make sure you pull the latest changes with `repo.git_pull()`.
489
+ Using amp half precision backend
490
+ The following columns in the training set don't have a corresponding argument in `Wav2Vec2ForCTC.forward` and have been ignored: input_length.
491
+ /opt/conda/lib/python3.8/site-packages/transformers/optimization.py:306: FutureWarning: This implementation of AdamW is deprecated and will be removed in a future version. Use thePyTorch implementation torch.optim.AdamW instead, or set `no_deprecation_warning=True` to disable this warning
492
+ warnings.warn(
493
+ ***** Running training *****
494
+ Num examples = 22262
495
+ Num Epochs = 50
496
+ Instantaneous batch size per device = 8
497
+ Total train batch size (w. parallel, distributed & accumulation) = 32
498
+ Gradient Accumulation steps = 4
499
+ Total optimization steps = 34750
500
+
501
  0%| | 0/34750 [00:00<?, ?it/s]
502
  0%| | 1/34750 [00:06<64:28:32, 6.68s/it]
503
  0%| | 2/34750 [00:12<57:43:53, 5.98s/it]
504
  0%| | 3/34750 [00:17<54:31:03, 5.65s/it]
505
  0%| | 4/34750 [00:22<50:51:50, 5.27s/it]
506
  0%| | 5/34750 [00:26<48:33:10, 5.03s/it]
507
  0%| | 6/34750 [00:31<46:50:38, 4.85s/it]
508
  0%| | 7/34750 [00:35<45:36:23, 4.73s/it]
509
  0%| | 8/34750 [00:40<44:29:00, 4.61s/it]
510
  0%| | 9/34750 [00:44<43:10:17, 4.47s/it]
511
  0%| | 10/34750 [00:48<42:06:37, 4.36s/it]
512
  0%| | 11/34750 [00:52<40:57:19, 4.24s/it]
513
  0%| | 12/34750 [00:56<39:38:33, 4.11s/it]
514
  0%| | 13/34750 [00:59<38:31:48, 3.99s/it]
515
  0%| | 14/34750 [01:03<37:54:45, 3.93s/it]
516
  0%| | 15/34750 [01:07<36:35:46, 3.79s/it]
517
  0%| | 16/34750 [01:10<35:45:25, 3.71s/it]
518
  0%| | 17/34750 [01:13<34:45:16, 3.60s/it]
519
  0%| | 18/34750 [01:17<33:59:39, 3.52s/it]
520
  0%| | 19/34750 [01:20<33:05:56, 3.43s/it]
521
  0%| | 20/34750 [01:23<32:33:40, 3.38s/it]
522
  0%| | 21/34750 [01:26<32:01:32, 3.32s/it]
523
  0%| | 22/34750 [01:30<31:21:02, 3.25s/it]
524
  0%| | 23/34750 [01:33<30:52:11, 3.20s/it]
525
  0%| | 24/34750 [01:36<30:24:22, 3.15s/it]
526
  0%| | 25/34750 [01:39<29:53:04, 3.10s/it]
527
  0%| | 26/34750 [01:42<29:25:40, 3.05s/it]
528
  0%| | 27/34750 [01:44<28:34:56, 2.96s/it]
529
  0%| | 28/34750 [01:47<28:11:40, 2.92s/it]
530
  0%| | 29/34750 [01:50<27:54:42, 2.89s/it]
531
  0%| | 30/34750 [01:53<27:27:22, 2.85s/it]
532
  0%| | 31/34750 [01:55<27:01:28, 2.80s/it]
533
  0%| | 32/34750 [01:58<26:40:58, 2.77s/it]
534
  0%| | 33/34750 [02:01<26:24:14, 2.74s/it]
535
  0%| | 34/34750 [02:03<25:52:04, 2.68s/it]
536
  0%| | 35/34750 [02:06<25:43:25, 2.67s/it]
537
  0%| | 36/34750 [02:08<25:11:08, 2.61s/it]
538
  0%| | 37/34750 [02:11<24:28:28, 2.54s/it]
539
  0%| | 38/34750 [02:13<23:51:25, 2.47s/it]
540
  0%| | 39/34750 [02:15<23:19:33, 2.42s/it]
541
  0%| | 40/34750 [02:18<22:52:11, 2.37s/it]
542
  0%| | 41/34750 [02:20<22:39:10, 2.35s/it]
543
  0%| | 42/34750 [02:22<22:09:25, 2.30s/it]
544
  0%| | 43/34750 [02:24<21:47:00, 2.26s/it]
545
  0%| | 44/34750 [02:26<21:18:01, 2.21s/it]
546
  0%| | 45/34750 [02:28<20:48:32, 2.16s/it]
547
  0%| | 46/34750 [02:31<20:29:08, 2.13s/it]
548
  0%| | 47/34750 [02:32<20:01:46, 2.08s/it]
549
  0%| | 48/34750 [02:34<19:24:47, 2.01s/it]
550
  0%| | 49/34750 [02:36<18:42:30, 1.94s/it]
551
  0%| | 50/34750 [02:38<17:57:24, 1.86s/it]
552
  0%| | 51/34750 [02:44<31:34:21, 3.28s/it]
553
  0%| | 52/34750 [02:50<38:06:08, 3.95s/it]
554
  0%| | 53/34750 [02:55<41:11:56, 4.27s/it]
555
  0%| | 54/34750 [03:00<42:58:47, 4.46s/it]
556
  0%| | 55/34750 [03:04<43:32:12, 4.52s/it]
557
  0%| | 56/34750 [03:09<43:38:21, 4.53s/it]
558
  0%| | 57/34750 [03:13<43:18:06, 4.49s/it]
559
  0%| | 58/34750 [03:18<42:30:15, 4.41s/it]
560
  0%| | 59/34750 [03:22<42:02:18, 4.36s/it]
561
  0%| | 60/34750 [03:26<41:31:48, 4.31s/it]
562
  0%| | 61/34750 [03:30<40:32:29, 4.21s/it]
563
  0%| | 62/34750 [03:34<39:22:08, 4.09s/it]
564
  0%| | 63/34750 [03:38<38:18:34, 3.98s/it]
565
  0%| | 64/34750 [03:41<37:37:25, 3.90s/it]
566
  0%| | 65/34750 [03:45<36:45:09, 3.81s/it]
567
  0%| | 66/34750 [03:48<35:44:42, 3.71s/it]
568
  0%| | 67/34750 [03:52<35:21:38, 3.67s/it]
569
  0%| | 68/34750 [03:55<34:38:18, 3.60s/it]
570
  0%| | 69/34750 [03:59<33:35:03, 3.49s/it]
571
  0%| | 70/34750 [04:02<32:57:42, 3.42s/it]
572
  0%| | 71/34750 [04:05<32:30:58, 3.38s/it]
573
  0%| | 72/34750 [04:08<31:58:21, 3.32s/it]
574
  0%| | 73/34750 [04:12<31:39:22, 3.29s/it]
575
  0%| | 74/34750 [04:15<30:59:41, 3.22s/it]
576
  0%| | 75/34750 [04:18<30:22:31, 3.15s/it]
577
  0%| | 76/34750 [04:21<30:16:16, 3.14s/it]
578
  0%| | 77/34750 [04:24<30:00:28, 3.12s/it]
579
  0%| | 78/34750 [04:27<29:35:28, 3.07s/it]
580
  0%| | 79/34750 [04:30<28:52:03, 3.00s/it]
581
  0%| | 80/34750 [04:32<28:15:57, 2.94s/it]
582
  0%| | 81/34750 [04:35<27:39:57, 2.87s/it]
583
  0%| | 82/34750 [04:38<27:02:17, 2.81s/it]
584
  0%| | 83/34750 [04:40<26:42:09, 2.77s/it]
585
  0%| | 84/34750 [04:43<26:15:39, 2.73s/it]
586
  0%| | 85/34750 [04:46<26:03:43, 2.71s/it]
587
  0%| | 86/34750 [04:48<25:34:47, 2.66s/it]
588
  0%| | 87/34750 [04:51<25:14:44, 2.62s/it]
589
  0%| | 88/34750 [04:53<24:52:15, 2.58s/it]
590
  0%| | 89/34750 [04:56<24:13:33, 2.52s/it]
591
  0%| | 90/34750 [04:58<23:45:50, 2.47s/it]
592
  0%| | 91/34750 [05:00<23:15:35, 2.42s/it]
593
  0%| | 92/34750 [05:03<22:43:29, 2.36s/it]
594
  0%| | 93/34750 [05:05<22:28:51, 2.34s/it]
595
  0%| | 94/34750 [05:07<21:54:02, 2.28s/it]
596
  0%| | 95/34750 [05:09<21:18:57, 2.21s/it]
597
  0%| | 96/34750 [05:11<20:44:18, 2.15s/it]
598
  0%| | 97/34750 [05:13<20:04:09, 2.08s/it]
599
  0%| | 98/34750 [05:15<19:41:42, 2.05s/it]
600
  0%| | 99/34750 [05:17<19:05:17, 1.98s/it]
601
  0%| | 100/34750 [05:18<18:21:38, 1.91s/it]
602
 
603
  0%| | 100/34750 [05:18<18:21:38, 1.91s/it]
604
  0%| | 101/34750 [05:25<31:59:06, 3.32s/it]
605
  0%| | 102/34750 [05:31<38:44:13, 4.02s/it]
606
  0%| | 103/34750 [05:36<42:04:31, 4.37s/it]
607
  0%| | 104/34750 [05:41<43:20:27, 4.50s/it]
608
  0%| | 105/34750 [05:45<43:33:06, 4.53s/it]
609
  0%| | 106/34750 [05:50<43:01:31, 4.47s/it]
610
  0%| | 107/34750 [05:54<42:35:58, 4.43s/it]
611
  0%| | 108/34750 [05:58<41:36:32, 4.32s/it]
612
  0%| | 109/34750 [06:02<40:49:12, 4.24s/it]
613
  0%| | 110/34750 [06:06<40:24:00, 4.20s/it]
614
  0%| | 111/34750 [06:10<39:21:11, 4.09s/it]
615
  0%| | 112/34750 [06:14<38:20:20, 3.98s/it]
616
  0%| | 113/34750 [06:18<37:45:38, 3.92s/it]
617
  0%| | 114/34750 [06:21<36:57:18, 3.84s/it]
618
  0%| | 115/34750 [06:25<36:23:40, 3.78s/it]
619
  0%| | 116/34750 [06:28<35:34:12, 3.70s/it]
620
  0%| | 117/34750 [06:32<34:43:35, 3.61s/it]
621
  0%| | 118/34750 [06:35<33:56:27, 3.53s/it]
622
  0%| | 119/34750 [06:38<33:21:27, 3.47s/it]
623
  0%| | 120/34750 [06:42<32:59:05, 3.43s/it]
624
  0%| | 121/34750 [06:45<32:17:52, 3.36s/it]
625
  0%| | 122/34750 [06:48<31:38:10, 3.29s/it]
626
  0%| | 123/34750 [06:51<31:05:15, 3.23s/it]
627
  0%| | 124/34750 [06:54<30:46:22, 3.20s/it]
628
  0%| | 125/34750 [06:57<30:10:54, 3.14s/it]
629
  0%| | 126/34750 [07:00<29:41:42, 3.09s/it]
630
  0%| | 127/34750 [07:03<29:03:34, 3.02s/it]
631
  0%| | 128/34750 [07:06<28:35:44, 2.97s/it]
632
  0%| | 129/34750 [07:09<28:00:19, 2.91s/it]
633
  0%| | 130/34750 [07:12<27:21:27, 2.84s/it]
634
  0%| | 131/34750 [07:14<27:10:16, 2.83s/it]
635
  0%| | 132/34750 [07:17<26:33:47, 2.76s/it]
636
  0%| | 133/34750 [07:20<26:14:29, 2.73s/it]
637
  0%| | 134/34750 [07:22<25:45:14, 2.68s/it]
638
  0%| | 135/34750 [07:25<25:17:13, 2.63s/it]
639
  0%| | 136/34750 [07:27<24:57:13, 2.60s/it]
640
  0%| | 137/34750 [07:30<24:24:51, 2.54s/it]
641
  0%| | 138/34750 [07:32<23:51:55, 2.48s/it]
642
  0%| | 139/34750 [07:34<23:21:09, 2.43s/it]
643
  0%| | 140/34750 [07:36<22:52:59, 2.38s/it]
644
  0%| | 141/34750 [07:39<22:43:48, 2.36s/it]
645
  0%| | 142/34750 [07:41<22:20:24, 2.32s/it]
646
  0%| | 143/34750 [07:43<22:04:04, 2.30s/it]
647
  0%| | 144/34750 [07:45<21:37:56, 2.25s/it]
648
  0%| | 145/34750 [07:47<21:03:48, 2.19s/it]
649
  0%| | 146/34750 [07:50<20:40:12, 2.15s/it]
650
  0%| | 147/34750 [07:51<20:03:46, 2.09s/it]
651
  0%| | 148/34750 [07:53<19:25:14, 2.02s/it]
652
  0%| | 149/34750 [07:55<18:55:40, 1.97s/it]
653
  0%| | 150/34750 [07:57<18:20:39, 1.91s/it]
654
  0%| | 151/34750 [08:03<31:24:03, 3.27s/it]
655
  0%| | 152/34750 [08:09<37:53:01, 3.94s/it]
656
  0%| | 153/34750 [08:14<41:53:08, 4.36s/it]
657
  0%| | 154/34750 [08:19<43:48:48, 4.56s/it]
658
  0%| | 155/34750 [08:24<43:58:26, 4.58s/it]
659
  0%| | 156/34750 [08:28<43:45:25, 4.55s/it]
660
  0%| | 157/34750 [08:33<43:43:02, 4.55s/it]
661
  0%| | 158/34750 [08:37<42:58:24, 4.47s/it]
662
  0%| | 159/34750 [08:41<41:59:46, 4.37s/it]
663
  0%| | 160/34750 [08:45<40:59:58, 4.27s/it]
664
  0%| | 161/34750 [08:49<40:07:34, 4.18s/it]
665
  0%| | 162/34750 [08:53<39:31:01, 4.11s/it]
666
  0%| | 163/34750 [08:57<38:50:29, 4.04s/it]
667
  0%| | 164/34750 [09:01<38:04:36, 3.96s/it]
668
  0%| | 165/34750 [09:05<37:16:49, 3.88s/it]
669
  0%| | 166/34750 [09:08<36:26:52, 3.79s/it]
670
  0%| | 167/34750 [09:12<35:48:44, 3.73s/it]
671
  0%| | 168/34750 [09:15<35:16:30, 3.67s/it]
672
  0%| | 169/34750 [09:19<34:42:11, 3.61s/it]
673
  0%| | 170/34750 [09:22<33:58:16, 3.54s/it]
674
  0%| | 171/34750 [09:25<33:07:26, 3.45s/it]
675
  0%| | 172/34750 [09:29<32:20:46, 3.37s/it]
676
  0%| | 173/34750 [09:32<31:43:56, 3.30s/it]
677
  1%| | 174/34750 [09:35<31:16:09, 3.26s/it]
678
  1%| | 175/34750 [09:38<31:01:53, 3.23s/it]
679
  1%| | 176/34750 [09:41<30:33:05, 3.18s/it]
680
  1%| | 177/34750 [09:44<30:02:14, 3.13s/it]
681
  1%| | 178/34750 [09:47<29:31:44, 3.07s/it]
682
  1%| | 179/34750 [09:50<28:51:20, 3.00s/it]
683
  1%| | 180/34750 [09:53<28:07:59, 2.93s/it]
684
  1%| | 181/34750 [09:55<27:42:48, 2.89s/it]
685
  1%| | 182/34750 [09:58<27:05:20, 2.82s/it]
686
  1%| | 183/34750 [10:01<26:42:02, 2.78s/it]
687
  1%| | 184/34750 [10:03<26:09:40, 2.72s/it]
688
  1%| | 185/34750 [10:06<25:42:04, 2.68s/it]
689
  1%| | 186/34750 [10:08<25:13:14, 2.63s/it]
690
  1%| | 187/34750 [10:11<24:58:54, 2.60s/it]
691
  1%| | 188/34750 [10:13<24:31:39, 2.55s/it]
692
  1%| | 189/34750 [10:16<23:54:11, 2.49s/it]
693
  1%| | 190/34750 [10:18<23:19:20, 2.43s/it]
694
  1%| | 191/34750 [10:20<22:53:19, 2.38s/it]
695
  1%| | 192/34750 [10:23<22:30:07, 2.34s/it]
696
  1%| | 193/34750 [10:25<21:58:47, 2.29s/it]
697
  1%| | 194/34750 [10:27<21:25:10, 2.23s/it]
698
  1%| | 195/34750 [10:29<20:52:07, 2.17s/it]
699
  1%| | 196/34750 [10:31<20:21:29, 2.12s/it]
700
  1%| | 197/34750 [10:33<19:45:12, 2.06s/it]
701
  1%| | 198/34750 [10:35<19:10:57, 2.00s/it]
702
  1%| | 199/34750 [10:36<18:35:35, 1.94s/it]
703
  1%| | 200/34750 [10:38<17:58:57, 1.87s/it]
704
 
705
  1%| | 200/34750 [10:38<17:58:57, 1.87s/it]
706
  1%| | 201/34750 [10:44<30:29:43, 3.18s/it]
707
  1%| | 202/34750 [10:50<36:43:22, 3.83s/it]
708
  1%| | 203/34750 [10:55<40:17:17, 4.20s/it]
709
  1%| | 204/34750 [11:00<41:57:33, 4.37s/it]
710
  1%| | 205/34750 [11:04<42:29:54, 4.43s/it]
711
  1%| | 206/34750 [11:09<42:44:42, 4.45s/it]
712
  1%| | 207/34750 [11:13<41:56:02, 4.37s/it]
713
  1%| | 208/34750 [11:17<41:20:03, 4.31s/it]
714
  1%| | 209/34750 [11:21<40:36:04, 4.23s/it]
715
  1%| | 210/34750 [11:25<40:15:07, 4.20s/it]
716
  1%| | 211/34750 [11:29<39:13:51, 4.09s/it]
717
  1%| | 212/34750 [11:33<38:18:15, 3.99s/it]
718
  1%| | 213/34750 [11:37<37:31:08, 3.91s/it]
719
  1%| | 214/34750 [11:40<36:50:20, 3.84s/it]
720
  1%| | 215/34750 [11:44<36:03:03, 3.76s/it]
721
  1%| | 216/34750 [11:47<35:19:22, 3.68s/it]
722
  1%| | 217/34750 [11:51<34:49:41, 3.63s/it]
723
  1%| | 218/34750 [11:54<33:58:09, 3.54s/it]
724
  1%| | 219/34750 [11:57<33:17:16, 3.47s/it]
725
  1%| | 220/34750 [12:01<32:42:24, 3.41s/it]
726
  1%| | 221/34750 [12:04<32:12:04, 3.36s/it]
727
  1%| | 222/34750 [12:07<31:21:23, 3.27s/it]
728
  1%| | 223/34750 [12:10<30:51:33, 3.22s/it]
729
  1%| | 224/34750 [12:13<30:13:54, 3.15s/it]
730
  1%| | 225/34750 [12:16<29:45:11, 3.10s/it]
731
  1%| | 226/34750 [12:19<29:23:35, 3.06s/it]
732
  1%| | 227/34750 [12:22<28:42:32, 2.99s/it]
733
  1%| | 228/34750 [12:25<28:22:04, 2.96s/it]
734
  1%| | 229/34750 [12:28<27:48:51, 2.90s/it]
735
  1%| | 230/34750 [12:30<27:11:50, 2.84s/it]
736
  1%| | 231/34750 [12:33<26:35:09, 2.77s/it]
737
  1%| | 232/34750 [12:35<26:10:51, 2.73s/it]
738
  1%| | 233/34750 [12:38<25:52:41, 2.70s/it]
739
  1%| | 234/34750 [12:41<25:20:57, 2.64s/it]
740
  1%| | 235/34750 [12:43<24:56:17, 2.60s/it]
741
  1%| | 236/34750 [12:45<24:12:05, 2.52s/it]
742
  1%| | 237/34750 [12:48<23:36:45, 2.46s/it]
743
  1%| | 238/34750 [12:50<23:15:05, 2.43s/it]
744
  1%| | 239/34750 [12:52<22:55:05, 2.39s/it]
745
  1%| | 240/34750 [12:55<22:24:01, 2.34s/it]
746
  1%| | 241/34750 [12:57<22:06:03, 2.31s/it]
747
  1%| | 242/34750 [12:59<21:38:36, 2.26s/it]
748
  1%| | 243/34750 [13:01<21:15:14, 2.22s/it]
749
  1%| | 244/34750 [13:03<20:47:51, 2.17s/it]
750
  1%| | 245/34750 [13:05<20:23:43, 2.13s/it]
751
  1%| | 246/34750 [13:07<20:17:09, 2.12s/it]
752
  1%| | 247/34750 [13:09<19:45:30, 2.06s/it]
753
  1%| | 248/34750 [13:11<19:07:56, 2.00s/it]
754
  1%| | 249/34750 [13:13<18:36:53, 1.94s/it]
755
  1%| | 250/34750 [13:15<17:51:35, 1.86s/it]
756
  1%| | 251/34750 [13:21<31:21:56, 3.27s/it]
757
  1%| | 252/34750 [13:27<37:56:29, 3.96s/it]
758
  1%| | 253/34750 [13:32<41:27:01, 4.33s/it]
759
  1%| | 254/34750 [13:37<43:20:41, 4.52s/it]
760
  1%| | 255/34750 [13:42<43:46:29, 4.57s/it]
761
  1%| | 256/34750 [13:46<44:07:24, 4.60s/it]
762
  1%| | 257/34750 [13:51<43:48:40, 4.57s/it]
763
  1%| | 258/34750 [13:55<42:57:47, 4.48s/it]
764
  1%| | 259/34750 [13:59<42:16:34, 4.41s/it]
765
  1%| | 260/34750 [14:03<41:36:00, 4.34s/it]
766
  1%| | 261/34750 [14:07<40:22:55, 4.22s/it]
767
  1%| | 262/34750 [14:11<39:34:28, 4.13s/it]
768
  1%| | 263/34750 [14:15<38:38:45, 4.03s/it]
769
  1%| | 264/34750 [14:19<37:37:21, 3.93s/it]
770
  1%| | 265/34750 [14:23<37:05:51, 3.87s/it]
771
  1%| | 266/34750 [14:26<36:12:42, 3.78s/it]
772
  1%| | 267/34750 [14:30<35:15:32, 3.68s/it]
773
  1%| | 268/34750 [14:33<34:42:08, 3.62s/it]
774
  1%| | 269/34750 [14:36<34:05:26, 3.56s/it]
775
  1%| | 270/34750 [14:40<33:17:54, 3.48s/it]
776
  1%| | 271/34750 [14:43<32:24:37, 3.38s/it]
777
  1%| | 272/34750 [14:46<31:57:35, 3.34s/it]
778
  1%| | 273/34750 [14:49<31:33:17, 3.29s/it]
779
  1%| | 274/34750 [14:52<31:13:36, 3.26s/it]
780
  1%| | 275/34750 [14:56<30:33:40, 3.19s/it]
781
  1%| | 276/34750 [14:58<29:57:52, 3.13s/it]
782
  1%| | 277/34750 [15:01<29:24:24, 3.07s/it]
783
  1%| | 278/34750 [15:04<28:49:51, 3.01s/it]
784
  1%| | 279/34750 [15:07<28:08:27, 2.94s/it]
785
  1%| | 280/34750 [15:10<27:22:33, 2.86s/it]
786
  1%| | 281/34750 [15:12<26:57:28, 2.82s/it]
787
  1%| | 282/34750 [15:15<26:27:44, 2.76s/it]
788
  1%| | 283/34750 [15:18<26:13:36, 2.74s/it]
789
  1%| | 284/34750 [15:21<26:13:15, 2.74s/it]
790
  1%| | 285/34750 [15:23<25:57:40, 2.71s/it]
791
  1%| | 286/34750 [15:26<25:28:50, 2.66s/it]
792
  1%| | 287/34750 [15:28<25:04:44, 2.62s/it]
793
  1%| | 288/34750 [15:31<24:26:40, 2.55s/it]
794
  1%| | 289/34750 [15:33<23:41:08, 2.47s/it]
795
  1%| | 290/34750 [15:35<23:22:38, 2.44s/it]
796
  1%| | 291/34750 [15:38<22:57:12, 2.40s/it]
797
  1%| | 292/34750 [15:40<22:17:49, 2.33s/it]
798
  1%| | 293/34750 [15:42<21:41:20, 2.27s/it]
799
  1%| | 294/34750 [15:44<21:08:58, 2.21s/it]
800
  1%| | 295/34750 [15:46<20:50:26, 2.18s/it]
801
  1%| | 296/34750 [15:48<20:32:20, 2.15s/it]
802
  1%| | 297/34750 [15:50<19:59:15, 2.09s/it]
803
  1%| | 298/34750 [15:52<19:26:06, 2.03s/it]
804
  1%| | 299/34750 [15:54<18:49:19, 1.97s/it]
805
  1%| | 300/34750 [15:56<18:10:12, 1.90s/it]
806
 
807
  1%| | 300/34750 [15:56<18:10:12, 1.90s/it]
808
  1%| | 301/34750 [16:02<31:58:03, 3.34s/it]
809
  1%| | 302/34750 [16:08<37:55:08, 3.96s/it]
810
  1%| | 303/34750 [16:13<41:36:50, 4.35s/it]
811
  1%| | 304/34750 [16:18<43:17:31, 4.52s/it]
812
  1%| | 305/34750 [16:23<43:51:54, 4.58s/it]
813
  1%| | 306/34750 [16:27<43:56:28, 4.59s/it]
814
  1%| | 307/34750 [16:32<43:43:04, 4.57s/it]
815
  1%| | 308/34750 [16:36<42:48:58, 4.48s/it]
816
  1%| | 309/34750 [16:40<42:16:02, 4.42s/it]
817
  1%| | 310/34750 [16:44<41:16:33, 4.31s/it]
818
  1%| | 311/34750 [16:48<40:43:30, 4.26s/it]
819
  1%| | 312/34750 [16:52<39:52:16, 4.17s/it]
820
  1%| | 313/34750 [16:56<38:22:24, 4.01s/it]
821
  1%| | 314/34750 [17:00<37:28:22, 3.92s/it]
822
  1%| | 315/34750 [17:03<36:46:54, 3.85s/it]
823
  1%| | 316/34750 [17:07<35:58:05, 3.76s/it]
824
  1%| | 317/34750 [17:10<35:05:57, 3.67s/it]
825
  1%| | 318/34750 [17:14<34:07:17, 3.57s/it]
826
  1%| | 319/34750 [17:17<33:31:36, 3.51s/it]
827
  1%| | 320/34750 [17:20<32:47:49, 3.43s/it]
828
  1%| | 321/34750 [17:24<32:01:42, 3.35s/it]
829
  1%| | 322/34750 [17:27<31:36:22, 3.30s/it]
830
  1%| | 323/34750 [17:30<31:07:44, 3.26s/it]
831
  1%| | 324/34750 [17:33<30:53:20, 3.23s/it]
832
  1%| | 325/34750 [17:36<30:26:15, 3.18s/it]
833
  1%| | 326/34750 [17:39<29:47:46, 3.12s/it]
834
  1%| | 327/34750 [17:42<29:12:55, 3.06s/it]
835
  1%| | 328/34750 [17:45<28:47:06, 3.01s/it]
836
  1%| | 329/34750 [17:48<28:08:18, 2.94s/it]
837
  1%| | 330/34750 [17:50<27:36:09, 2.89s/it]
838
  1%| | 331/34750 [17:53<27:21:46, 2.86s/it]
839
  1%| | 332/34750 [17:56<27:03:08, 2.83s/it]
840
  1%| | 333/34750 [17:59<26:20:26, 2.76s/it]
841
  1%| | 334/34750 [18:01<26:12:57, 2.74s/it]
842
  1%| | 335/34750 [18:04<25:41:28, 2.69s/it]
843
  1%| | 336/34750 [18:06<25:12:54, 2.64s/it]
844
  1%| | 337/34750 [18:09<24:41:53, 2.58s/it]
845
  1%| | 338/34750 [18:11<24:12:43, 2.53s/it]
846
  1%| | 339/34750 [18:14<23:30:50, 2.46s/it]
847
  1%| | 340/34750 [18:16<22:57:39, 2.40s/it]
848
  1%| | 341/34750 [18:18<22:20:00, 2.34s/it]
849
  1%| | 342/34750 [18:20<22:25:17, 2.35s/it]
850
  1%| | 343/34750 [18:23<22:01:56, 2.31s/it]
851
  1%| | 344/34750 [18:25<21:44:05, 2.27s/it]
852
  1%| | 345/34750 [18:27<21:18:47, 2.23s/it]
853
  1%| | 346/34750 [18:29<20:40:52, 2.16s/it]
854
  1%| | 347/34750 [18:31<20:05:18, 2.10s/it]
855
  1%| | 348/34750 [18:33<19:18:53, 2.02s/it]
856
  1%| | 349/34750 [18:34<18:30:11, 1.94s/it]
857
  1%| | 350/34750 [18:36<17:49:48, 1.87s/it]
858
  1%| | 351/34750 [18:43<30:43:41, 3.22s/it]
859
  1%| | 352/34750 [18:48<36:25:36, 3.81s/it]
860
  1%| | 353/34750 [18:53<40:38:35, 4.25s/it]
861
  1%| | 354/34750 [18:58<42:21:14, 4.43s/it]
862
  1%| | 355/34750 [19:03<43:07:44, 4.51s/it]
863
  1%| | 356/34750 [19:07<42:26:41, 4.44s/it]
864
  1%| | 357/34750 [19:11<41:53:53, 4.39s/it]
865
  1%| | 358/34750 [19:15<40:39:51, 4.26s/it]
866
  1%| | 359/34750 [19:19<40:16:11, 4.22s/it]
867
  1%| | 360/34750 [19:23<39:41:45, 4.16s/it]
868
  1%| | 361/34750 [19:27<38:47:40, 4.06s/it]
869
  1%| | 362/34750 [19:31<38:03:09, 3.98s/it]
870
  1%| | 363/34750 [19:35<37:17:42, 3.90s/it]
871
  1%| | 364/34750 [19:38<36:50:26, 3.86s/it]
872
  1%| | 365/34750 [19:42<36:07:43, 3.78s/it]
873
  1%| | 366/34750 [19:45<35:02:58, 3.67s/it]
874
  1%| | 367/34750 [19:49<34:07:02, 3.57s/it]
875
  1%| | 368/34750 [19:52<33:13:38, 3.48s/it]
876
  1%| | 369/34750 [19:55<32:49:53, 3.44s/it]
877
  1%| | 370/34750 [19:58<32:19:00, 3.38s/it]
878
  1%| | 371/34750 [20:02<31:58:00, 3.35s/it]
879
  1%| | 372/34750 [20:05<31:12:15, 3.27s/it]
880
  1%| | 373/34750 [20:08<30:34:34, 3.20s/it]
881
  1%| | 374/34750 [20:11<30:28:31, 3.19s/it]
882
  1%| | 375/34750 [20:14<29:47:15, 3.12s/it]
883
  1%| | 376/34750 [20:17<29:13:07, 3.06s/it]
884
  1%| | 377/34750 [20:20<28:44:15, 3.01s/it]
885
  1%| | 378/34750 [20:23<28:26:34, 2.98s/it]
886
  1%| | 379/34750 [20:26<27:56:33, 2.93s/it]
887
  1%| | 380/34750 [20:28<27:25:39, 2.87s/it]
888
  1%| | 381/34750 [20:31<26:59:30, 2.83s/it]
889
  1%| | 382/34750 [20:34<26:29:51, 2.78s/it]
890
  1%| | 383/34750 [20:36<25:56:46, 2.72s/it]
891
  1%| | 384/34750 [20:39<25:35:53, 2.68s/it]
892
  1%| | 385/34750 [20:41<25:02:32, 2.62s/it]
893
  1%| | 386/34750 [20:44<24:39:09, 2.58s/it]
894
  1%| | 387/34750 [20:46<24:21:30, 2.55s/it]
895
  1%| | 388/34750 [20:49<23:38:31, 2.48s/it]
896
  1%| | 389/34750 [20:51<23:10:07, 2.43s/it]
897
  1%| | 390/34750 [20:53<22:38:55, 2.37s/it]
898
  1%| | 391/34750 [20:55<22:09:02, 2.32s/it]
899
  1%| | 392/34750 [20:58<21:49:26, 2.29s/it]
900
  1%| | 393/34750 [21:00<21:26:35, 2.25s/it]
901
  1%| | 394/34750 [21:02<21:03:47, 2.21s/it]
902
  1%| | 395/34750 [21:04<20:38:57, 2.16s/it]
903
  1%| | 396/34750 [21:06<20:07:32, 2.11s/it]
904
  1%| | 397/34750 [21:08<19:41:03, 2.06s/it]
905
  1%| | 398/34750 [21:10<19:14:02, 2.02s/it]
906
  1%| | 399/34750 [21:12<18:36:22, 1.95s/it]
907
  1%| | 400/34750 [21:13<17:49:36, 1.87s/it]
908
 
909
  1%| | 400/34750 [21:13<17:49:36, 1.87s/it]
910
  1%| | 401/34750 [21:20<31:30:35, 3.30s/it]
911
  1%| | 402/34750 [21:26<38:21:07, 4.02s/it]
912
  1%| | 403/34750 [21:31<41:14:36, 4.32s/it]
913
  1%| | 404/34750 [21:35<42:39:12, 4.47s/it]
914
  1%| | 405/34750 [21:40<43:39:49, 4.58s/it]
915
  1%| | 406/34750 [21:45<44:18:50, 4.65s/it]
916
  1%| | 407/34750 [21:49<43:34:28, 4.57s/it]
917
  1%| | 408/34750 [21:54<42:43:07, 4.48s/it]
918
  1%| | 409/34750 [21:58<42:01:38, 4.41s/it]
919
  1%| | 410/34750 [22:02<41:12:36, 4.32s/it]
920
  1%| | 411/34750 [22:06<40:08:54, 4.21s/it]
921
  1%| | 412/34750 [22:10<39:03:22, 4.09s/it]
922
  1%| | 413/34750 [22:14<38:34:32, 4.04s/it]
923
  1%| | 414/34750 [22:17<37:40:02, 3.95s/it]
924
  1%| | 415/34750 [22:21<36:37:15, 3.84s/it]
925
  1%| | 416/34750 [22:25<36:12:41, 3.80s/it]
926
  1%| | 417/34750 [22:28<35:24:02, 3.71s/it]
927
  1%| | 418/34750 [22:32<34:40:10, 3.64s/it]
928
  1%| | 419/34750 [22:35<33:57:25, 3.56s/it]
929
  1%| | 420/34750 [22:38<33:08:40, 3.48s/it]
930
  1%| | 421/34750 [22:42<32:18:45, 3.39s/it]
931
  1%| | 422/34750 [22:45<31:30:05, 3.30s/it]
932
  1%| | 423/34750 [22:48<30:52:25, 3.24s/it]
933
  1%| | 424/34750 [22:51<30:20:07, 3.18s/it]
934
  1%| | 425/34750 [22:54<30:16:08, 3.17s/it]
935
  1%| | 426/34750 [22:57<29:37:23, 3.11s/it]
936
  1%| | 427/34750 [23:00<29:02:17, 3.05s/it]
937
  1%| | 428/34750 [23:03<28:32:26, 2.99s/it]
938
  1%| | 429/34750 [23:05<27:55:36, 2.93s/it]
939
  1%| | 430/34750 [23:08<27:22:38, 2.87s/it]
940
  1%| | 431/34750 [23:11<26:42:52, 2.80s/it]
941
  1%| | 432/34750 [23:13<26:09:53, 2.74s/it]
942
  1%| | 433/34750 [23:16<25:33:50, 2.68s/it]
943
  1%| | 434/34750 [23:19<25:26:14, 2.67s/it]
944
  1%|โ– | 435/34750 [23:21<25:08:01, 2.64s/it]
945
  1%|โ– | 436/34750 [23:24<24:28:11, 2.57s/it]
946
  1%|โ– | 437/34750 [23:26<23:45:04, 2.49s/it]
947
  1%|โ– | 438/34750 [23:28<23:09:23, 2.43s/it]
948
  1%|โ– | 439/34750 [23:30<22:35:50, 2.37s/it]
949
  1%|โ– | 440/34750 [23:33<22:09:21, 2.32s/it]
950
  1%|โ– | 441/34750 [23:35<21:32:40, 2.26s/it]
951
  1%|โ– | 442/34750 [23:37<21:02:35, 2.21s/it]
952
  1%|โ– | 443/34750 [23:39<20:37:00, 2.16s/it]
953
  1%|โ– | 444/34750 [23:41<20:17:59, 2.13s/it]
954
  1%|โ– | 445/34750 [23:43<20:03:47, 2.11s/it]
955
  1%|โ– | 446/34750 [23:45<19:49:33, 2.08s/it]
956
  1%|โ– | 447/34750 [23:47<19:19:48, 2.03s/it]
957
  1%|โ– | 448/34750 [23:49<18:48:10, 1.97s/it]
958
  1%|โ– | 449/34750 [23:51<18:28:39, 1.94s/it]
959
  1%|โ– | 450/34750 [23:52<17:45:13, 1.86s/it]
960
  1%|โ– | 451/34750 [23:59<30:08:37, 3.16s/it]
961
  1%|โ– | 452/34750 [24:04<37:08:51, 3.90s/it]
962
  1%|โ– | 453/34750 [24:09<40:16:04, 4.23s/it]
963
  1%|โ– | 454/34750 [24:14<41:18:56, 4.34s/it]
964
  1%|โ– | 455/34750 [24:18<42:06:45, 4.42s/it]
965
  1%|โ– | 456/34750 [24:23<41:51:37, 4.39s/it]
966
  1%|โ– | 457/34750 [24:27<41:49:04, 4.39s/it]
967
  1%|โ– | 458/34750 [24:31<41:03:14, 4.31s/it]
968
  1%|โ– | 459/34750 [24:35<40:29:08, 4.25s/it]
969
  1%|โ– | 460/34750 [24:39<39:39:10, 4.16s/it]
970
  1%|โ– | 461/34750 [24:43<38:56:37, 4.09s/it]
971
  1%|โ– | 462/34750 [24:47<38:30:33, 4.04s/it]
972
  1%|โ– | 463/34750 [24:51<37:55:04, 3.98s/it]
973
  1%|โ– | 464/34750 [24:55<37:01:56, 3.89s/it]
974
  1%|โ– | 465/34750 [24:58<36:01:26, 3.78s/it]
975
  1%|โ– | 466/34750 [25:02<35:06:25, 3.69s/it]
976
  1%|โ– | 467/34750 [25:05<34:18:13, 3.60s/it]
977
  1%|โ– | 468/34750 [25:08<33:47:41, 3.55s/it]
978
  1%|โ– | 469/34750 [25:12<32:59:07, 3.46s/it]
979
  1%|โ– | 470/34750 [25:15<32:19:51, 3.40s/it]
980
  1%|โ– | 471/34750 [25:18<31:52:25, 3.35s/it]
981
  1%|โ– | 472/34750 [25:21<31:32:56, 3.31s/it]
982
  1%|โ– | 473/34750 [25:24<30:51:50, 3.24s/it]
983
  1%|โ– | 474/34750 [25:28<30:17:54, 3.18s/it]
984
  1%|โ– | 475/34750 [25:31<29:52:40, 3.14s/it]
985
  1%|โ– | 476/34750 [25:34<29:21:56, 3.08s/it]
986
  1%|โ– | 477/34750 [25:37<29:08:35, 3.06s/it]
987
  1%|โ– | 478/34750 [25:39<28:25:03, 2.99s/it]
988
  1%|โ– | 479/34750 [25:42<28:15:33, 2.97s/it]
989
  1%|โ– | 480/34750 [25:45<27:32:19, 2.89s/it]
990
  1%|โ– | 481/34750 [25:48<27:02:24, 2.84s/it]
991
  1%|โ– | 482/34750 [25:50<26:38:29, 2.80s/it]
992
  1%|โ– | 483/34750 [25:53<26:09:21, 2.75s/it]
993
  1%|โ– | 484/34750 [25:56<26:07:06, 2.74s/it]
994
  1%|โ– | 485/34750 [25:58<25:46:30, 2.71s/it]
995
  1%|โ– | 486/34750 [26:01<25:11:05, 2.65s/it]
996
  1%|โ– | 487/34750 [26:03<24:41:17, 2.59s/it]
997
  1%|โ– | 488/34750 [26:06<24:11:10, 2.54s/it]
998
  1%|โ– | 489/34750 [26:08<23:45:21, 2.50s/it]
999
  1%|โ– | 490/34750 [26:10<23:12:34, 2.44s/it]
1000
  1%|โ– | 491/34750 [26:13<22:40:29, 2.38s/it]
1001
  1%|โ– | 492/34750 [26:15<22:19:29, 2.35s/it]
1002
  1%|โ– | 493/34750 [26:17<22:03:05, 2.32s/it]
1003
  1%|โ– | 494/34750 [26:19<21:32:01, 2.26s/it]
1004
  1%|โ– | 495/34750 [26:21<21:00:30, 2.21s/it]
1005
  1%|โ– | 496/34750 [26:23<20:25:39, 2.15s/it]
1006
  1%|โ– | 497/34750 [26:25<19:55:06, 2.09s/it]
1007
  1%|โ– | 498/34750 [26:27<19:13:21, 2.02s/it]
1008
  1%|โ– | 499/34750 [26:29<18:38:20, 1.96s/it]
1009
  1%|โ– | 500/34750 [26:31<17:50:56, 1.88s/it]
1010
 
1011
  1%|โ– | 500/34750 [26:31<17:50:56, 1.88s/it]The following columns in the evaluation set don't have a corresponding argument in `Wav2Vec2ForCTC.forward` and have been ignored: input_length.
1012
+ ***** Running Evaluation *****
1013
+ Num examples = 456
1014
+ Batch size = 8
1015
+ {'loss': 47.2908, 'learning_rate': 3.675e-06, 'epoch': 0.14}
1016
+ {'loss': 33.9125, 'learning_rate': 7.425e-06, 'epoch': 0.29}
1017
+ {'loss': 26.6068, 'learning_rate': 1.1174999999999999e-05, 'epoch': 0.43}
1018
+ {'loss': 23.2775, 'learning_rate': 1.4925e-05, 'epoch': 0.57}
1019
+ {'loss': 19.7138, 'learning_rate': 1.8675e-05, 'epoch': 0.72}
1020
+
1021
+
1022
  0%| | 0/57 [00:00<?, ?it/s]
1023
+
1024
  4%|โ–Ž | 2/57 [00:00<00:19, 2.82it/s]
1025
+
1026
  5%|โ–Œ | 3/57 [00:01<00:25, 2.12it/s]
1027
+
1028
  7%|โ–‹ | 4/57 [00:02<00:28, 1.83it/s]
1029
+
1030
  9%|โ–‰ | 5/57 [00:02<00:28, 1.85it/s]
1031
+
1032
  11%|โ–ˆ | 6/57 [00:03<00:28, 1.78it/s]
1033
+
1034
  12%|โ–ˆโ– | 7/57 [00:03<00:28, 1.76it/s]
1035
+
1036
  14%|โ–ˆโ– | 8/57 [00:04<00:28, 1.72it/s]
1037
+
1038
  16%|โ–ˆโ–Œ | 9/57 [00:04<00:27, 1.72it/s]
1039
+
1040
  18%|โ–ˆโ–Š | 10/57 [00:05<00:26, 1.75it/s]
1041
+
1042
  19%|โ–ˆโ–‰ | 11/57 [00:06<00:27, 1.68it/s]
1043
+
1044
  21%|โ–ˆโ–ˆ | 12/57 [00:06<00:29, 1.53it/s]
1045
+
1046
  23%|โ–ˆโ–ˆโ–Ž | 13/57 [00:07<00:31, 1.42it/s]
1047
+
1048
  25%|โ–ˆโ–ˆโ– | 14/57 [00:08<00:29, 1.44it/s]
1049
+
1050
  26%|โ–ˆโ–ˆโ–‹ | 15/57 [00:09<00:31, 1.34it/s]
1051
+
1052
  28%|โ–ˆโ–ˆโ–Š | 16/57 [00:09<00:28, 1.43it/s]
1053
+
1054
  30%|โ–ˆโ–ˆโ–‰ | 17/57 [00:10<00:26, 1.50it/s]
1055
+
1056
  32%|โ–ˆโ–ˆโ–ˆโ– | 18/57 [00:11<00:24, 1.58it/s]
1057
+
1058
  33%|โ–ˆโ–ˆโ–ˆโ–Ž | 19/57 [00:11<00:23, 1.65it/s]
1059
+
1060
  35%|โ–ˆโ–ˆโ–ˆโ–Œ | 20/57 [00:12<00:22, 1.64it/s]
1061
+
1062
  37%|โ–ˆโ–ˆโ–ˆโ–‹ | 21/57 [00:12<00:21, 1.65it/s]
1063
+
1064
  39%|โ–ˆโ–ˆโ–ˆโ–Š | 22/57 [00:13<00:23, 1.52it/s]
1065
+
1066
  40%|โ–ˆโ–ˆโ–ˆโ–ˆ | 23/57 [00:14<00:24, 1.40it/s]
1067
+
1068
  42%|โ–ˆโ–ˆโ–ˆโ–ˆโ– | 24/57 [00:15<00:23, 1.42it/s]
1069
+
1070
  44%|โ–ˆโ–ˆโ–ˆโ–ˆโ– | 25/57 [00:15<00:21, 1.47it/s]
1071
+
1072
  46%|โ–ˆโ–ˆโ–ˆโ–ˆโ–Œ | 26/57 [00:16<00:19, 1.55it/s]
1073
+
1074
  47%|โ–ˆโ–ˆโ–ˆโ–ˆโ–‹ | 27/57 [00:16<00:18, 1.65it/s]
1075
+
1076
  49%|โ–ˆโ–ˆโ–ˆโ–ˆโ–‰ | 28/57 [00:17<00:18, 1.59it/s]
1077
+
1078
  51%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆ | 29/57 [00:18<00:17, 1.57it/s]
1079
+
1080
  53%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–Ž | 30/57 [00:18<00:15, 1.70it/s]
1081
+
1082
  54%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ– | 31/57 [00:19<00:14, 1.83it/s]
1083
+
1084
  56%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–Œ | 32/57 [00:19<00:14, 1.78it/s]
1085
+
1086
  58%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–Š | 33/57 [00:20<00:14, 1.66it/s]
1087
+
1088
  60%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–‰ | 34/57 [00:20<00:14, 1.63it/s]
1089
+
1090
  61%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ– | 35/57 [00:21<00:14, 1.56it/s]
1091
+
1092
  63%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–Ž | 36/57 [00:22<00:13, 1.57it/s]
1093
+
1094
  65%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ– | 37/57 [00:22<00:13, 1.53it/s]
1095
+
1096
  67%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–‹ | 38/57 [00:23<00:12, 1.47it/s]
1097
+
1098
  68%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–Š | 39/57 [00:24<00:12, 1.46it/s]
1099
+
1100
  70%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆ | 40/57 [00:25<00:11, 1.43it/s]
1101
+
1102
  72%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ– | 41/57 [00:25<00:11, 1.37it/s]
1103
+
1104
  74%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–Ž | 42/57 [00:26<00:11, 1.35it/s]
1105
+
1106
  75%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–Œ | 43/57 [00:27<00:10, 1.39it/s]
1107
+
1108
  77%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–‹ | 44/57 [00:28<00:09, 1.36it/s]
1109
+
1110
  79%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–‰ | 45/57 [00:28<00:07, 1.54it/s]
1111
+
1112
  81%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆ | 46/57 [00:29<00:06, 1.58it/s]
1113
+
1114
  82%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ– | 47/57 [00:29<00:06, 1.55it/s]
1115
+
1116
  84%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ– | 48/57 [00:30<00:05, 1.63it/s]
1117
+
1118
  86%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–Œ | 49/57 [00:31<00:04, 1.67it/s]
1119
+
1120
  88%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–Š | 50/57 [00:31<00:04, 1.67it/s]
1121
+
1122
  89%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–‰ | 51/57 [00:32<00:03, 1.66it/s]
1123
+
1124
  91%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆ | 52/57 [00:32<00:02, 1.69it/s]
1125
+
1126
  93%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–Ž| 53/57 [00:33<00:02, 1.80it/s]
1127
+
1128
  95%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–| 54/57 [00:33<00:01, 1.77it/s]
1129
+
1130
  96%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–‹| 55/57 [00:34<00:01, 1.61it/s]
1131
+
1132
  98%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–Š| 56/57 [00:35<00:00, 1.56it/s]
1133
+
1134
 
1135
+
1136
 
1137
  1%|โ– | 500/34750 [27:12<17:50:56, 1.88s/it]
1138
+
1139
+
1140
  Saving model checkpoint to ./checkpoint-500
1141
+ Configuration saved in ./checkpoint-500/config.json
1142
+ Model weights saved in ./checkpoint-500/pytorch_model.bin
1143
+ Configuration saved in ./checkpoint-500/preprocessor_config.json
1144
+ Configuration saved in ./preprocessor_config.json
preprocessor_config.json ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "do_normalize": true,
3
+ "feature_extractor_type": "Wav2Vec2FeatureExtractor",
4
+ "feature_size": 1,
5
+ "padding_side": "right",
6
+ "padding_value": 0,
7
+ "return_attention_mask": true,
8
+ "sampling_rate": 16000
9
+ }
pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:95d95792c6081f64b83bc4a1b3c5f062d412822f9a3fa8e9f2bc12785dd1634d
3
+ size 1266872433
run.sh ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ python run_speech_recognition_ctc.py \
2
+ --dataset_name="kresnik/zeroth_korean" \
3
+ --model_name_or_path="facebook/wav2vec2-xls-r-300m" \
4
+ --dataset_config_name="clean" \
5
+ --output_dir="./" \
6
+ --overwrite_output_dir \
7
+ --num_train_epochs="50" \
8
+ --per_device_train_batch_size="8" \
9
+ --per_device_eval_batch_size="8" \
10
+ --gradient_accumulation_steps="4" \
11
+ --learning_rate="7.5e-5" \
12
+ --warmup_steps="2000" \
13
+ --length_column_name="input_length" \
14
+ --evaluation_strategy="steps" \
15
+ --text_column_name="text" \
16
+ --chars_to_ignore , ? . ! \- \; \: \" โ€œ % โ€˜ โ€ ๏ฟฝ โ€” โ€™ โ€ฆ โ€“ \
17
+ --save_steps="500" \
18
+ --eval_steps="500" \
19
+ --logging_steps="100" \
20
+ --layerdrop="0.0" \
21
+ --activation_dropout="0.1" \
22
+ --save_total_limit="3" \
23
+ --freeze_feature_encoder \
24
+ --feat_proj_dropout="0.0" \
25
+ --mask_time_prob="0.75" \
26
+ --mask_time_length="10" \
27
+ --mask_feature_prob="0.25" \
28
+ --mask_feature_length="64" \
29
+ --gradient_checkpointing \
30
+ --use_auth_token \
31
+ --fp16 \
32
+ --group_by_length \
33
+ --do_train --do_eval \
34
+ --push_to_hub
run_speech_recognition_ctc.py ADDED
@@ -0,0 +1,829 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ # coding=utf-8
3
+ # Copyright 2021 The HuggingFace Inc. team. All rights reserved.
4
+ #
5
+ # Licensed under the Apache License, Version 2.0 (the "License");
6
+ # you may not use this file except in compliance with the License.
7
+ # You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing, software
12
+ # distributed under the License is distributed on an "AS IS" BASIS,
13
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14
+ # See the License for the specific language governing permissions and
15
+
16
+ """ Fine-tuning a ๐Ÿค— Transformers CTC model for automatic speech recognition"""
17
+
18
+ import functools
19
+ import json
20
+ import logging
21
+ import os
22
+ import re
23
+ import sys
24
+ import warnings
25
+ from dataclasses import dataclass, field
26
+ from typing import Dict, List, Optional, Union
27
+
28
+ import datasets
29
+ import numpy as np
30
+ import torch
31
+ from datasets import DatasetDict, load_dataset, load_metric
32
+
33
+ import transformers
34
+ from transformers import (
35
+ AutoConfig,
36
+ AutoFeatureExtractor,
37
+ AutoModelForCTC,
38
+ AutoProcessor,
39
+ AutoTokenizer,
40
+ HfArgumentParser,
41
+ Trainer,
42
+ TrainingArguments,
43
+ Wav2Vec2Processor,
44
+ set_seed,
45
+ )
46
+ from transformers.trainer_utils import get_last_checkpoint, is_main_process
47
+ from transformers.utils import check_min_version
48
+ from transformers.utils.versions import require_version
49
+
50
+
51
+ # Will error if the minimal version of Transformers is not installed. Remove at your own risks.
52
+ check_min_version("4.17.0.dev0")
53
+
54
+ require_version(
55
+ "datasets>=1.13.3",
56
+ "To fix: pip install -r examples/pytorch/text-classification/requirements.txt",
57
+ )
58
+
59
+
60
+ logger = logging.getLogger(__name__)
61
+
62
+
63
+ def list_field(default=None, metadata=None):
64
+ return field(default_factory=lambda: default, metadata=metadata)
65
+
66
+
67
+ @dataclass
68
+ class ModelArguments:
69
+ """
70
+ Arguments pertaining to which model/config/tokenizer we are going to fine-tune from.
71
+ """
72
+
73
+ model_name_or_path: str = field(
74
+ metadata={
75
+ "help": "Path to pretrained model or model identifier from huggingface.co/models"
76
+ }
77
+ )
78
+ tokenizer_name_or_path: Optional[str] = field(
79
+ default=None,
80
+ metadata={
81
+ "help": "Path to pretrained tokenizer or tokenizer identifier from huggingface.co/models"
82
+ },
83
+ )
84
+ cache_dir: Optional[str] = field(
85
+ default=None,
86
+ metadata={
87
+ "help": "Where do you want to store the pretrained models downloaded from huggingface.co"
88
+ },
89
+ )
90
+ freeze_feature_encoder: bool = field(
91
+ default=True,
92
+ metadata={"help": "Whether to freeze the feature encoder layers of the model."},
93
+ )
94
+ attention_dropout: float = field(
95
+ default=0.0,
96
+ metadata={"help": "The dropout ratio for the attention probabilities."},
97
+ )
98
+ activation_dropout: float = field(
99
+ default=0.0,
100
+ metadata={
101
+ "help": "The dropout ratio for activations inside the fully connected layer."
102
+ },
103
+ )
104
+ feat_proj_dropout: float = field(
105
+ default=0.0, metadata={"help": "The dropout ratio for the projected features."}
106
+ )
107
+ hidden_dropout: float = field(
108
+ default=0.0,
109
+ metadata={
110
+ "help": "The dropout probability for all fully connected layers in the embeddings, encoder, and pooler."
111
+ },
112
+ )
113
+ final_dropout: float = field(
114
+ default=0.0,
115
+ metadata={"help": "The dropout probability for the final projection layer."},
116
+ )
117
+ mask_time_prob: float = field(
118
+ default=0.05,
119
+ metadata={
120
+ "help": "Probability of each feature vector along the time axis to be chosen as the start of the vector"
121
+ "span to be masked. Approximately ``mask_time_prob * sequence_length // mask_time_length`` feature"
122
+ "vectors will be masked along the time axis."
123
+ },
124
+ )
125
+ mask_time_length: int = field(
126
+ default=10,
127
+ metadata={"help": "Length of vector span to mask along the time axis."},
128
+ )
129
+ mask_feature_prob: float = field(
130
+ default=0.0,
131
+ metadata={
132
+ "help": "Probability of each feature vector along the feature axis to be chosen as the start of the vector"
133
+ "span to be masked. Approximately ``mask_feature_prob * sequence_length // mask_feature_length`` feature bins will be masked along the time axis."
134
+ },
135
+ )
136
+ mask_feature_length: int = field(
137
+ default=10,
138
+ metadata={"help": "Length of vector span to mask along the feature axis."},
139
+ )
140
+ layerdrop: float = field(
141
+ default=0.0, metadata={"help": "The LayerDrop probability."}
142
+ )
143
+ ctc_loss_reduction: Optional[str] = field(
144
+ default="mean",
145
+ metadata={
146
+ "help": "The way the ctc loss should be reduced. Should be one of 'mean' or 'sum'."
147
+ },
148
+ )
149
+
150
+
151
+ @dataclass
152
+ class DataTrainingArguments:
153
+ """
154
+ Arguments pertaining to what data we are going to input our model for training and eval.
155
+
156
+ Using `HfArgumentParser` we can turn this class
157
+ into argparse arguments to be able to specify them on
158
+ the command line.
159
+ """
160
+
161
+ dataset_name: str = field(
162
+ metadata={
163
+ "help": "The configuration name of the dataset to use (via the datasets library)."
164
+ }
165
+ )
166
+ dataset_config_name: str = field(
167
+ default=None,
168
+ metadata={
169
+ "help": "The configuration name of the dataset to use (via the datasets library)."
170
+ },
171
+ )
172
+ train_split_name: str = field(
173
+ default="train",
174
+ metadata={
175
+ "help": "The name of the training data set split to use (via the datasets library). Defaults to 'train'"
176
+ },
177
+ )
178
+ eval_split_name: str = field(
179
+ default="test",
180
+ metadata={
181
+ "help": "The name of the training data set split to use (via the datasets library). Defaults to 'test'"
182
+ },
183
+ )
184
+ audio_column_name: str = field(
185
+ default="audio",
186
+ metadata={
187
+ "help": "The name of the dataset column containing the audio data. Defaults to 'audio'"
188
+ },
189
+ )
190
+ text_column_name: str = field(
191
+ default="text",
192
+ metadata={
193
+ "help": "The name of the dataset column containing the text data. Defaults to 'text'"
194
+ },
195
+ )
196
+ overwrite_cache: bool = field(
197
+ default=False,
198
+ metadata={"help": "Overwrite the cached preprocessed datasets or not."},
199
+ )
200
+ preprocessing_num_workers: Optional[int] = field(
201
+ default=None,
202
+ metadata={"help": "The number of processes to use for the preprocessing."},
203
+ )
204
+ max_train_samples: Optional[int] = field(
205
+ default=None,
206
+ metadata={
207
+ "help": "For debugging purposes or quicker training, truncate the number of training examples to this "
208
+ "value if set."
209
+ },
210
+ )
211
+ max_eval_samples: Optional[int] = field(
212
+ default=None,
213
+ metadata={
214
+ "help": "For debugging purposes or quicker training, truncate the number of validation examples to this "
215
+ "value if set."
216
+ },
217
+ )
218
+ chars_to_ignore: Optional[List[str]] = list_field(
219
+ default=None,
220
+ metadata={"help": "A list of characters to remove from the transcripts."},
221
+ )
222
+ eval_metrics: List[str] = list_field(
223
+ default=["wer", "cer"],
224
+ metadata={
225
+ "help": "A list of metrics the model should be evaluated on. E.g. `'wer cer'`"
226
+ },
227
+ )
228
+ max_duration_in_seconds: float = field(
229
+ default=20.0,
230
+ metadata={
231
+ "help": "Filter audio files that are longer than `max_duration_in_seconds` seconds to 'max_duration_in_seconds`"
232
+ },
233
+ )
234
+ min_duration_in_seconds: float = field(
235
+ default=0.0,
236
+ metadata={
237
+ "help": "Filter audio files that are shorter than `min_duration_in_seconds` seconds"
238
+ },
239
+ )
240
+ preprocessing_only: bool = field(
241
+ default=False,
242
+ metadata={
243
+ "help": "Whether to only do data preprocessing and skip training. "
244
+ "This is especially useful when data preprocessing errors out in distributed training due to timeout. "
245
+ "In this case, one should run the preprocessing in a non-distributed setup with `preprocessing_only=True` "
246
+ "so that the cached datasets can consequently be loaded in distributed training"
247
+ },
248
+ )
249
+ use_auth_token: bool = field(
250
+ default=False,
251
+ metadata={
252
+ "help": "If :obj:`True`, will use the token generated when running"
253
+ ":obj:`transformers-cli login` as HTTP bearer authorization for remote files."
254
+ },
255
+ )
256
+ unk_token: str = field(
257
+ default="[UNK]", metadata={"help": "The unk token for the tokenizer"},
258
+ )
259
+ pad_token: str = field(
260
+ default="[PAD]", metadata={"help": "The padding token for the tokenizer"},
261
+ )
262
+ word_delimiter_token: str = field(
263
+ default="|", metadata={"help": "The word delimiter token for the tokenizer"},
264
+ )
265
+ phoneme_language: Optional[str] = field(
266
+ default=None,
267
+ metadata={
268
+ "help": "The target language that should be used be"
269
+ " passed to the tokenizer for tokenization. Note that"
270
+ " this is only relevant if the model classifies the"
271
+ " input audio to a sequence of phoneme sequences."
272
+ },
273
+ )
274
+
275
+
276
+ @dataclass
277
+ class DataCollatorCTCWithPadding:
278
+ """
279
+ Data collator that will dynamically pad the inputs received.
280
+ Args:
281
+ processor (:class:`~transformers.AutoProcessor`)
282
+ The processor used for proccessing the data.
283
+ padding (:obj:`bool`, :obj:`str` or :class:`~transformers.tokenization_utils_base.PaddingStrategy`, `optional`, defaults to :obj:`True`):
284
+ Select a strategy to pad the returned sequences (according to the model's padding side and padding index)
285
+ among:
286
+ * :obj:`True` or :obj:`'longest'`: Pad to the longest sequence in the batch (or no padding if only a single
287
+ sequence if provided).
288
+ * :obj:`'max_length'`: Pad to a maximum length specified with the argument :obj:`max_length` or to the
289
+ maximum acceptable input length for the model if that argument is not provided.
290
+ * :obj:`False` or :obj:`'do_not_pad'` (default): No padding (i.e., can output a batch with sequences of
291
+ different lengths).
292
+ max_length (:obj:`int`, `optional`):
293
+ Maximum length of the ``input_values`` of the returned list and optionally padding length (see above).
294
+ max_length_labels (:obj:`int`, `optional`):
295
+ Maximum length of the ``labels`` returned list and optionally padding length (see above).
296
+ pad_to_multiple_of (:obj:`int`, `optional`):
297
+ If set will pad the sequence to a multiple of the provided value.
298
+ This is especially useful to enable the use of Tensor Cores on NVIDIA hardware with compute capability >=
299
+ 7.5 (Volta).
300
+ """
301
+
302
+ processor: AutoProcessor
303
+ padding: Union[bool, str] = "longest"
304
+ pad_to_multiple_of: Optional[int] = None
305
+ pad_to_multiple_of_labels: Optional[int] = None
306
+
307
+ def __call__(
308
+ self, features: List[Dict[str, Union[List[int], torch.Tensor]]]
309
+ ) -> Dict[str, torch.Tensor]:
310
+ # split inputs and labels since they have to be of different lenghts and need
311
+ # different padding methods
312
+ input_features = [
313
+ {"input_values": feature["input_values"]} for feature in features
314
+ ]
315
+ label_features = [{"input_ids": feature["labels"]} for feature in features]
316
+
317
+ batch = self.processor.pad(
318
+ input_features,
319
+ padding=self.padding,
320
+ pad_to_multiple_of=self.pad_to_multiple_of,
321
+ return_tensors="pt",
322
+ )
323
+
324
+ with self.processor.as_target_processor():
325
+ labels_batch = self.processor.pad(
326
+ label_features,
327
+ padding=self.padding,
328
+ pad_to_multiple_of=self.pad_to_multiple_of_labels,
329
+ return_tensors="pt",
330
+ )
331
+
332
+ # replace padding with -100 to ignore loss correctly
333
+ labels = labels_batch["input_ids"].masked_fill(
334
+ labels_batch.attention_mask.ne(1), -100
335
+ )
336
+
337
+ batch["labels"] = labels
338
+
339
+ return batch
340
+
341
+
342
+ def create_vocabulary_from_data(
343
+ datasets: DatasetDict,
344
+ word_delimiter_token: Optional[str] = None,
345
+ unk_token: Optional[str] = None,
346
+ pad_token: Optional[str] = None,
347
+ ):
348
+ # Given training and test labels create vocabulary
349
+ def extract_all_chars(batch):
350
+ all_text = " ".join(batch["target_text"])
351
+ vocab = list(set(all_text))
352
+ return {"vocab": [vocab], "all_text": [all_text]}
353
+
354
+ vocabs = datasets.map(
355
+ extract_all_chars,
356
+ batched=True,
357
+ batch_size=-1,
358
+ keep_in_memory=True,
359
+ remove_columns=datasets["train"].column_names,
360
+ )
361
+
362
+ # take union of all unique characters in each dataset
363
+ vocab_set = functools.reduce(
364
+ lambda vocab_1, vocab_2: set(vocab_1["vocab"][0]) | set(vocab_2["vocab"][0]),
365
+ vocabs.values(),
366
+ )
367
+
368
+ vocab_dict = {v: k for k, v in enumerate(sorted(list(vocab_set)))}
369
+
370
+ # replace white space with delimiter token
371
+ if word_delimiter_token is not None:
372
+ vocab_dict[word_delimiter_token] = vocab_dict[" "]
373
+ del vocab_dict[" "]
374
+
375
+ # add unk and pad token
376
+ if unk_token is not None:
377
+ vocab_dict[unk_token] = len(vocab_dict)
378
+
379
+ if pad_token is not None:
380
+ vocab_dict[pad_token] = len(vocab_dict)
381
+
382
+ return vocab_dict
383
+
384
+
385
+ def main():
386
+ # See all possible arguments in src/transformers/training_args.py
387
+ # or by passing the --help flag to this script.
388
+ # We now keep distinct sets of args, for a cleaner separation of concerns.
389
+
390
+ parser = HfArgumentParser(
391
+ (ModelArguments, DataTrainingArguments, TrainingArguments)
392
+ )
393
+ if len(sys.argv) == 2 and sys.argv[1].endswith(".json"):
394
+ # If we pass only one argument to the script and it's the path to a json file,
395
+ # let's parse it to get our arguments.
396
+ model_args, data_args, training_args = parser.parse_json_file(
397
+ json_file=os.path.abspath(sys.argv[1])
398
+ )
399
+ else:
400
+ model_args, data_args, training_args = parser.parse_args_into_dataclasses()
401
+
402
+ # Detecting last checkpoint.
403
+ last_checkpoint = None
404
+ if (
405
+ os.path.isdir(training_args.output_dir)
406
+ and training_args.do_train
407
+ and not training_args.overwrite_output_dir
408
+ ):
409
+ last_checkpoint = get_last_checkpoint(training_args.output_dir)
410
+ if last_checkpoint is None and len(os.listdir(training_args.output_dir)) > 0:
411
+ raise ValueError(
412
+ f"Output directory ({training_args.output_dir}) already exists and is not empty. "
413
+ "Use --overwrite_output_dir to overcome."
414
+ )
415
+ elif last_checkpoint is not None:
416
+ logger.info(
417
+ f"Checkpoint detected, resuming training at {last_checkpoint}. To avoid this behavior, change "
418
+ "the `--output_dir` or add `--overwrite_output_dir` to train from scratch."
419
+ )
420
+
421
+ # Setup logging
422
+ logging.basicConfig(
423
+ format="%(asctime)s - %(levelname)s - %(name)s - %(message)s",
424
+ datefmt="%m/%d/%Y %H:%M:%S",
425
+ handlers=[logging.StreamHandler(sys.stdout)],
426
+ )
427
+ logger.setLevel(
428
+ logging.INFO if is_main_process(training_args.local_rank) else logging.WARN
429
+ )
430
+
431
+ # Log on each process the small summary:
432
+ logger.warning(
433
+ f"Process rank: {training_args.local_rank}, device: {training_args.device}, n_gpu: {training_args.n_gpu}"
434
+ f"distributed training: {bool(training_args.local_rank != -1)}, 16-bits training: {training_args.fp16}"
435
+ )
436
+ # Set the verbosity to info of the Transformers logger (on main process only):
437
+ if is_main_process(training_args.local_rank):
438
+ transformers.utils.logging.set_verbosity_info()
439
+ logger.info("Training/evaluation parameters %s", training_args)
440
+
441
+ # Set seed before initializing model.
442
+ set_seed(training_args.seed)
443
+
444
+ # 1. First, let's load the dataset
445
+ raw_datasets = DatasetDict()
446
+
447
+ if training_args.do_train:
448
+ raw_datasets["train"] = load_dataset(
449
+ data_args.dataset_name,
450
+ data_args.dataset_config_name,
451
+ split=data_args.train_split_name,
452
+ use_auth_token=data_args.use_auth_token,
453
+ )
454
+
455
+ if data_args.audio_column_name not in raw_datasets["train"].column_names:
456
+ raise ValueError(
457
+ f"--audio_column_name '{data_args.audio_column_name}' not found in dataset '{data_args.dataset_name}'. "
458
+ "Make sure to set `--audio_column_name` to the correct audio column - one of "
459
+ f"{', '.join(raw_datasets['train'].column_names)}."
460
+ )
461
+
462
+ if data_args.text_column_name not in raw_datasets["train"].column_names:
463
+ raise ValueError(
464
+ f"--text_column_name {data_args.text_column_name} not found in dataset '{data_args.dataset_name}'. "
465
+ "Make sure to set `--text_column_name` to the correct text column - one of "
466
+ f"{', '.join(raw_datasets['train'].column_names)}."
467
+ )
468
+
469
+ if data_args.max_train_samples is not None:
470
+ raw_datasets["train"] = raw_datasets["train"].select(
471
+ range(data_args.max_train_samples)
472
+ )
473
+
474
+ if training_args.do_eval:
475
+ raw_datasets["eval"] = load_dataset(
476
+ data_args.dataset_name,
477
+ data_args.dataset_config_name,
478
+ split=data_args.eval_split_name,
479
+ use_auth_token=data_args.use_auth_token,
480
+ )
481
+
482
+ if data_args.max_eval_samples is not None:
483
+ raw_datasets["eval"] = raw_datasets["eval"].select(
484
+ range(data_args.max_eval_samples)
485
+ )
486
+
487
+ # 2. We remove some special characters from the datasets
488
+ # that make training complicated and do not help in transcribing the speech
489
+ # E.g. characters, such as `,` and `.` do not really have an acoustic characteristic
490
+ # that could be easily picked up by the model
491
+ chars_to_ignore_regex = (
492
+ f'[{"".join(data_args.chars_to_ignore)}]'
493
+ if data_args.chars_to_ignore is not None
494
+ else None
495
+ )
496
+ text_column_name = data_args.text_column_name
497
+
498
+ def remove_special_characters(batch):
499
+ if chars_to_ignore_regex is not None:
500
+ batch["target_text"] = (
501
+ re.sub(chars_to_ignore_regex, "", batch[text_column_name]).lower() + " "
502
+ )
503
+ else:
504
+ batch["target_text"] = batch[text_column_name].lower() + " "
505
+ return batch
506
+
507
+ with training_args.main_process_first(
508
+ desc="dataset map special characters removal"
509
+ ):
510
+ raw_datasets = raw_datasets.map(
511
+ remove_special_characters,
512
+ remove_columns=[text_column_name],
513
+ desc="remove special characters from datasets",
514
+ )
515
+
516
+ # save special tokens for tokenizer
517
+ word_delimiter_token = data_args.word_delimiter_token
518
+ unk_token = data_args.unk_token
519
+ pad_token = data_args.pad_token
520
+
521
+ # 3. Next, let's load the config as we might need it to create
522
+ # the tokenizer
523
+ # load config
524
+ config = AutoConfig.from_pretrained(
525
+ model_args.model_name_or_path,
526
+ cache_dir=model_args.cache_dir,
527
+ use_auth_token=data_args.use_auth_token,
528
+ )
529
+
530
+ # 4. Next, if no tokenizer file is defined,
531
+ # we create the vocabulary of the model by extracting all unique characters from
532
+ # the training and evaluation datasets
533
+ # We need to make sure that only first rank saves vocabulary
534
+ # make sure all processes wait until vocab is created
535
+ tokenizer_name_or_path = model_args.tokenizer_name_or_path
536
+ tokenizer_kwargs = {}
537
+ if tokenizer_name_or_path is None:
538
+ # save vocab in training output dir
539
+ tokenizer_name_or_path = training_args.output_dir
540
+
541
+ vocab_file = os.path.join(tokenizer_name_or_path, "vocab.json")
542
+
543
+ with training_args.main_process_first():
544
+ if training_args.overwrite_output_dir and os.path.isfile(vocab_file):
545
+ os.remove(vocab_file)
546
+
547
+ with training_args.main_process_first(desc="dataset map vocabulary creation"):
548
+ if not os.path.isfile(vocab_file):
549
+ os.makedirs(tokenizer_name_or_path, exist_ok=True)
550
+ vocab_dict = create_vocabulary_from_data(
551
+ raw_datasets,
552
+ word_delimiter_token=word_delimiter_token,
553
+ unk_token=unk_token,
554
+ pad_token=pad_token,
555
+ )
556
+
557
+ # save vocab dict to be loaded into tokenizer
558
+ with open(vocab_file, "w") as file:
559
+ json.dump(vocab_dict, file)
560
+
561
+ # if tokenizer has just been created
562
+ # it is defined by `tokenizer_class` if present in config else by `model_type`
563
+ tokenizer_kwargs = {
564
+ "config": config if config.tokenizer_class is not None else None,
565
+ "tokenizer_type": config.model_type
566
+ if config.tokenizer_class is None
567
+ else None,
568
+ "unk_token": unk_token,
569
+ "pad_token": pad_token,
570
+ "word_delimiter_token": word_delimiter_token,
571
+ }
572
+
573
+ # 5. Now we can instantiate the feature extractor, tokenizer and model
574
+ # Note for distributed training, the .from_pretrained methods guarantee that only
575
+ # one local process can concurrently download model & vocab.
576
+
577
+ # load feature_extractor and tokenizer
578
+ tokenizer = AutoTokenizer.from_pretrained(
579
+ tokenizer_name_or_path,
580
+ use_auth_token=data_args.use_auth_token,
581
+ **tokenizer_kwargs,
582
+ )
583
+ feature_extractor = AutoFeatureExtractor.from_pretrained(
584
+ model_args.model_name_or_path,
585
+ cache_dir=model_args.cache_dir,
586
+ use_auth_token=data_args.use_auth_token,
587
+ )
588
+
589
+ # adapt config
590
+ config.update(
591
+ {
592
+ "feat_proj_dropout": model_args.feat_proj_dropout,
593
+ "attention_dropout": model_args.attention_dropout,
594
+ "hidden_dropout": model_args.hidden_dropout,
595
+ "final_dropout": model_args.final_dropout,
596
+ "mask_time_prob": model_args.mask_time_prob,
597
+ "mask_time_length": model_args.mask_time_length,
598
+ "mask_feature_prob": model_args.mask_feature_prob,
599
+ "mask_feature_length": model_args.mask_feature_length,
600
+ "gradient_checkpointing": training_args.gradient_checkpointing,
601
+ "layerdrop": model_args.layerdrop,
602
+ "ctc_loss_reduction": model_args.ctc_loss_reduction,
603
+ "pad_token_id": tokenizer.pad_token_id,
604
+ "vocab_size": len(tokenizer),
605
+ "activation_dropout": model_args.activation_dropout,
606
+ }
607
+ )
608
+
609
+ # create model
610
+ model = AutoModelForCTC.from_pretrained(
611
+ model_args.model_name_or_path,
612
+ cache_dir=model_args.cache_dir,
613
+ config=config,
614
+ use_auth_token=data_args.use_auth_token,
615
+ )
616
+
617
+ # freeze encoder
618
+ if model_args.freeze_feature_encoder:
619
+ model.freeze_feature_encoder()
620
+
621
+ # 6. Now we preprocess the datasets including loading the audio, resampling and normalization
622
+ # Thankfully, `datasets` takes care of automatically loading and resampling the audio,
623
+ # so that we just need to set the correct target sampling rate and normalize the input
624
+ # via the `feature_extractor`
625
+
626
+ # make sure that dataset decodes audio with correct sampling rate
627
+ dataset_sampling_rate = (
628
+ next(iter(raw_datasets.values()))
629
+ .features[data_args.audio_column_name]
630
+ .sampling_rate
631
+ )
632
+ if dataset_sampling_rate != feature_extractor.sampling_rate:
633
+ raw_datasets = raw_datasets.cast_column(
634
+ data_args.audio_column_name,
635
+ datasets.features.Audio(sampling_rate=feature_extractor.sampling_rate),
636
+ )
637
+
638
+ # derive max & min input length for sample rate & max duration
639
+ max_input_length = (
640
+ data_args.max_duration_in_seconds * feature_extractor.sampling_rate
641
+ )
642
+ min_input_length = (
643
+ data_args.min_duration_in_seconds * feature_extractor.sampling_rate
644
+ )
645
+ audio_column_name = data_args.audio_column_name
646
+ num_workers = data_args.preprocessing_num_workers
647
+
648
+ # `phoneme_language` is only relevant if the model is fine-tuned on phoneme classification
649
+ phoneme_language = data_args.phoneme_language
650
+
651
+ # Preprocessing the datasets.
652
+ # We need to read the audio files as arrays and tokenize the targets.
653
+ def prepare_dataset(batch):
654
+ # load audio
655
+ sample = batch[audio_column_name]
656
+
657
+ inputs = feature_extractor(
658
+ sample["array"], sampling_rate=sample["sampling_rate"]
659
+ )
660
+ batch["input_values"] = inputs.input_values[0]
661
+ batch["input_length"] = len(batch["input_values"])
662
+
663
+ # encode targets
664
+ additional_kwargs = {}
665
+ if phoneme_language is not None:
666
+ additional_kwargs["phonemizer_lang"] = phoneme_language
667
+
668
+ batch["labels"] = tokenizer(batch["target_text"], **additional_kwargs).input_ids
669
+ return batch
670
+
671
+ with training_args.main_process_first(desc="dataset map preprocessing"):
672
+ vectorized_datasets = raw_datasets.map(
673
+ prepare_dataset,
674
+ remove_columns=next(iter(raw_datasets.values())).column_names,
675
+ num_proc=num_workers,
676
+ desc="preprocess datasets",
677
+ )
678
+
679
+ def is_audio_in_length_range(length):
680
+ return length > min_input_length and length < max_input_length
681
+
682
+ # filter data that is shorter than min_input_length
683
+ vectorized_datasets = vectorized_datasets.filter(
684
+ is_audio_in_length_range,
685
+ num_proc=num_workers,
686
+ input_columns=["input_length"],
687
+ )
688
+
689
+ # 7. Next, we can prepare the training.
690
+ # Let's use word error rate (WER) as our evaluation metric,
691
+ # instantiate a data collator and the trainer
692
+
693
+ # Define evaluation metrics during training, *i.e.* word error rate, character error rate
694
+ eval_metrics = {metric: load_metric(metric) for metric in data_args.eval_metrics}
695
+
696
+ # for large datasets it is advised to run the preprocessing on a
697
+ # single machine first with ``args.preprocessing_only`` since there will mostly likely
698
+ # be a timeout when running the script in distributed mode.
699
+ # In a second step ``args.preprocessing_only`` can then be set to `False` to load the
700
+ # cached dataset
701
+ if data_args.preprocessing_only:
702
+ logger.info(
703
+ f"Data preprocessing finished. Files cached at {vectorized_datasets.cache_files}"
704
+ )
705
+ return
706
+
707
+ def compute_metrics(pred):
708
+ pred_logits = pred.predictions
709
+ pred_ids = np.argmax(pred_logits, axis=-1)
710
+
711
+ pred.label_ids[pred.label_ids == -100] = tokenizer.pad_token_id
712
+
713
+ pred_str = tokenizer.batch_decode(pred_ids)
714
+ # we do not want to group tokens when computing the metrics
715
+ label_str = tokenizer.batch_decode(pred.label_ids, group_tokens=False)
716
+
717
+ metrics = {
718
+ k: v.compute(predictions=pred_str, references=label_str)
719
+ for k, v in eval_metrics.items()
720
+ }
721
+
722
+ return metrics
723
+
724
+ # Now save everything to be able to create a single processor later
725
+ if is_main_process(training_args.local_rank):
726
+ # save feature extractor, tokenizer and config
727
+ feature_extractor.save_pretrained(training_args.output_dir)
728
+ tokenizer.save_pretrained(training_args.output_dir)
729
+ config.save_pretrained(training_args.output_dir)
730
+
731
+ try:
732
+ processor = AutoProcessor.from_pretrained(training_args.output_dir)
733
+ except (OSError, KeyError):
734
+ warnings.warn(
735
+ "Loading a processor from a feature extractor config that does not"
736
+ " include a `processor_class` attribute is deprecated and will be removed in v5. Please add the following "
737
+ " attribute to your `preprocessor_config.json` file to suppress this warning: "
738
+ " `'processor_class': 'Wav2Vec2Processor'`",
739
+ FutureWarning,
740
+ )
741
+ processor = Wav2Vec2Processor.from_pretrained(training_args.output_dir)
742
+
743
+ # Instantiate custom data collator
744
+ data_collator = DataCollatorCTCWithPadding(processor=processor)
745
+
746
+ # Initialize Trainer
747
+ trainer = Trainer(
748
+ model=model,
749
+ data_collator=data_collator,
750
+ args=training_args,
751
+ compute_metrics=compute_metrics,
752
+ train_dataset=vectorized_datasets["train"] if training_args.do_train else None,
753
+ eval_dataset=vectorized_datasets["eval"] if training_args.do_eval else None,
754
+ tokenizer=feature_extractor,
755
+ )
756
+
757
+ # 8. Finally, we can start training
758
+
759
+ # Training
760
+ if training_args.do_train:
761
+
762
+ # use last checkpoint if exist
763
+ if last_checkpoint is not None:
764
+ checkpoint = last_checkpoint
765
+ elif os.path.isdir(model_args.model_name_or_path):
766
+ checkpoint = model_args.model_name_or_path
767
+ else:
768
+ checkpoint = None
769
+
770
+ train_result = trainer.train(resume_from_checkpoint=checkpoint)
771
+ trainer.save_model()
772
+
773
+ metrics = train_result.metrics
774
+ max_train_samples = (
775
+ data_args.max_train_samples
776
+ if data_args.max_train_samples is not None
777
+ else len(vectorized_datasets["train"])
778
+ )
779
+ metrics["train_samples"] = min(
780
+ max_train_samples, len(vectorized_datasets["train"])
781
+ )
782
+
783
+ trainer.log_metrics("train", metrics)
784
+ trainer.save_metrics("train", metrics)
785
+ trainer.save_state()
786
+
787
+ # Evaluation
788
+ results = {}
789
+ if training_args.do_eval:
790
+ logger.info("*** Evaluate ***")
791
+ metrics = trainer.evaluate()
792
+ max_eval_samples = (
793
+ data_args.max_eval_samples
794
+ if data_args.max_eval_samples is not None
795
+ else len(vectorized_datasets["eval"])
796
+ )
797
+ metrics["eval_samples"] = min(
798
+ max_eval_samples, len(vectorized_datasets["eval"])
799
+ )
800
+
801
+ trainer.log_metrics("eval", metrics)
802
+ trainer.save_metrics("eval", metrics)
803
+
804
+ # Write model card and (optionally) push to hub
805
+ config_name = (
806
+ data_args.dataset_config_name
807
+ if data_args.dataset_config_name is not None
808
+ else "na"
809
+ )
810
+ kwargs = {
811
+ "finetuned_from": model_args.model_name_or_path,
812
+ "tasks": "speech-recognition",
813
+ "tags": ["automatic-speech-recognition", data_args.dataset_name],
814
+ "dataset_args": f"Config: {config_name}, Training split: {data_args.train_split_name}, Eval split: {data_args.eval_split_name}",
815
+ "dataset": f"{data_args.dataset_name.upper()} - {config_name.upper()}",
816
+ }
817
+ if "common_voice" in data_args.dataset_name:
818
+ kwargs["language"] = config_name
819
+
820
+ if training_args.push_to_hub:
821
+ trainer.push_to_hub(**kwargs)
822
+ else:
823
+ trainer.create_model_card(**kwargs)
824
+
825
+ return results
826
+
827
+
828
+ if __name__ == "__main__":
829
+ main()
runs/Jan31_07-15-59_job-2c68f48a-2d5d-4013-9043-3f2cb25f3ff6/1643613501.488685/events.out.tfevents.1643613501.job-2c68f48a-2d5d-4013-9043-3f2cb25f3ff6.1151936.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:28a6ab5fcdee80fd31c69dc157696d3063a1cc27099a2452f6695c51cba48628
3
+ size 4753
runs/Jan31_07-15-59_job-2c68f48a-2d5d-4013-9043-3f2cb25f3ff6/events.out.tfevents.1643613501.job-2c68f48a-2d5d-4013-9043-3f2cb25f3ff6.1151936.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2899aba1a5c0264df3c5c6326a68747e153f39041d06ca0ad3d1b42b63f3b01b
3
+ size 5833
special_tokens_map.json ADDED
@@ -0,0 +1 @@
 
1
+ {"bos_token": "<s>", "eos_token": "</s>", "unk_token": "[UNK]", "pad_token": "[PAD]", "additional_special_tokens": [{"content": "<s>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, {"content": "</s>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}]}
tokenizer_config.json ADDED
@@ -0,0 +1 @@
 
1
+ {"unk_token": "[UNK]", "bos_token": "<s>", "eos_token": "</s>", "pad_token": "[PAD]", "do_lower_case": false, "word_delimiter_token": "|", "special_tokens_map_file": null, "tokenizer_file": null, "name_or_path": "./", "tokenizer_class": "Wav2Vec2CTCTokenizer"}
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4af57085f0712099c06c191a8e3123d5fbba4119a615b1c3ec5ef78e066139b2
3
+ size 2991
vocab.json ADDED
@@ -0,0 +1 @@
 
1
+ {"๊ฐ€": 1, "๊ฐ": 2, "๊ฐ„": 3, "๊ฐˆ": 4, "๊ฐ": 5, "๊ฐ‘": 6, "๊ฐ’": 7, "๊ฐ“": 8, "๊ฐ”": 9, "๊ฐ•": 10, "๊ฐ–": 11, "๊ฐ™": 12, "๊ฐš": 13, "๊ฐœ": 14, "๊ฐ": 15, "๊ฐ ": 16, "๊ฐค": 17, "๊ฐฏ": 18, "๊ฐฑ": 19, "๊ฑฐ": 20, "๊ฑฑ": 21, "๊ฑด": 22, "๊ฑท": 23, "๊ฑธ": 24, "๊ฒ€": 25, "๊ฒ": 26, "๊ฒƒ": 27, "๊ฒ‰": 28, "๊ฒŒ": 29, "๊ฒ": 30, "๊ฒŸ": 31, "๊ฒ ": 32, "๊ฒจ": 33, "๊ฒฉ": 34, "๊ฒช": 35, "๊ฒฌ": 36, "๊ฒฐ": 37, "๊ฒธ": 38, "๊ฒน": 39, "๊ฒผ": 40, "๊ฒฝ": 41, "๊ณ": 42, "๊ณ„": 43, "๊ณ ": 44, "๊ณก": 45, "๊ณค": 46, "๊ณง": 47, "๊ณจ": 48, "๊ณฐ": 49, "๊ณฑ": 50, "๊ณณ": 51, "๊ณต": 52, "๊ณถ": 53, "๊ณผ": 54, "๊ณฝ": 55, "๊ด€": 56, "๊ด„": 57, "๊ดŒ": 58, "๊ด‘": 59, "๊ด˜": 60, "๊ดญ": 61, "๊ดด": 62, "๊ต‰": 63, "๊ต": 64, "๊ตฌ": 65, "๊ตญ": 66, "๊ตฐ": 67, "๊ตณ": 68, "๊ตด": 69, "๊ตต": 70, "๊ตถ": 71, "๊ตฝ": 72, "๊ตฟ": 73, "๊ถ": 74, "๊ถˆ": 75, "๊ถŒ": 76, "๊ถ": 77, "๊ถค": 78, "๊ท€": 79, "๊ทœ": 80, "๊ท ": 81, "๊ทธ": 82, "๊ทน": 83, "๊ทผ": 84, "๊ธ€": 85, "๊ธˆ": 86, "๊ธ‰": 87, "๊ธ‹": 88, "๊ธ": 89, "๊ธฐ": 90, "๊ธด": 91, "๊ธธ": 92, "๊น€": 93, "๊น": 94, "๊นƒ": 95, "๊นŠ": 96, "๊นŒ": 97, "๊นŽ": 98, "๊น”": 99, "๊น": 100, "๊นก": 101, "๊นฅ": 102, "๊นจ": 103, "๊นฌ": 104, "๊บผ": 105, "๊ป": 106, "๊ป": 107, "๊ป‘": 108, "๊ป˜": 109, "๊ปด": 110, "๊ผˆ": 111, "๊ผฌ": 112, "๊ผญ": 113, "๊ผด": 114, "๊ผผ": 115, "๊ผฝ": 116, "๊ฝ": 117, "๊ฝ‚": 118, "๊ฝƒ": 119, "๊ฝ": 120, "๊ฝฅ": 121, "๊พธ": 122, "๊พผ": 123, "๊ฟ€": 124, "๊ฟ‡": 125, "๊ฟˆ": 126, "๊ฟŽ": 127, "๊ฟ”": 128, "๊ฟจ": 129, "๊ฟฐ": 130, "๋€Œ": 131, "๋€": 132, "๋„": 133, "๋…": 134, "๋ˆ": 135, "๋Š": 136, "๋Œ": 137, "๋“": 138, "๋”": 139, "๋—": 140, "๋": 141, "๋ผ": 142, "๋ฝ": 143, "๋‚€": 144, "๋‚Œ": 145, "๋‚˜": 146, "๋‚™": 147, "๋‚œ": 148, "๋‚ ": 149, "๋‚ก": 150, "๋‚จ": 151, "๋‚ฉ": 152, "๋‚ซ": 153, "๋‚ฌ": 154, "๋‚ญ": 155, "๋‚ฎ": 156, "๋‚ฏ": 157, "๋‚ณ": 158, "๋‚ด": 159, "๋‚ธ": 160, "๋‚ผ": 161, "๋ƒ„": 162, "๋ƒ…": 163, "๋ƒ‡": 164, "๋ƒˆ": 165, "๋ƒ‰": 166, "๋ƒ": 167, "๋ƒฅ": 168, "๋„ˆ": 169, "๋„‰": 170, "๋„Œ": 171, "๋„": 172, "๋„“": 173, "๋„˜": 174, "๋„›": 175, "๋„ฃ": 176, "๋„ค": 177, "๋„ฅ": 178, "๋„จ": 179, "๋„ท": 180, "๋…€": 181, "๋…": 182, "๋…„": 183, "๋…": 184, "๋…”": 185, "๋…•": 186, "๋…˜": 187, "๋…ธ": 188, "๋…น": 189, "๋…ผ": 190, "๋†€": 191, "๋†ˆ": 192, "๋†": 193, "๋†’": 194, "๋†“": 195, "๋†จ": 196, "๋‡Œ": 197, "๋‡จ": 198, "๋‡ฝ": 199, "๋ˆ„": 200, "๋ˆˆ": 201, "๋ˆŒ": 202, "๋ˆ ": 203, "๋‰œ": 204, "๋‰ด": 205, "๋Š‰": 206, "๋Š": 207, "๋Š”": 208, "๋Š˜": 209, "๋Š ": 210, "๋Šฅ": 211, "๋Šฆ": 212, "๋Šช": 213, "๋Šฌ": 214, "๋‹ˆ": 215, "๋‹‰": 216, "๋‹Œ": 217, "๋‹": 218, "๋‹˜": 219, "๋‹™": 220, "๋‹›": 221, "๋‹": 222, "๋‹ค": 223, "๋‹ฅ": 224, "๋‹ฆ": 225, "๋‹จ": 226, "๋‹ซ": 227, "๋‹ฌ": 228, "๋‹ญ": 229, "๋‹ฎ": 230, "๋‹ณ": 231, "๋‹ด": 232, "๋‹ต": 233, "๋‹ท": 234, "๋‹น": 235, "๋‹ฟ": 236, "๋Œ€": 237, "๋Œ": 238, "๋Œ": 239, "๋Œ“": 240, "๋”": 241, "๋•": 242, "๋˜": 243, "๋œ": 244, "๋Ÿ": 245, "๋ค": 246, "๋ง": 247, "๋ฉ": 248, "๋ซ": 249, "๋ฎ": 250, "๋ฐ": 251, "๋ด": 252, "๋ธ": 253, "๋Ž…": 254, "๋ŽŒ": 255, "๋„": 256, "๋…": 257, "๋ˆ": 258, "๋‹": 259, "๋Œ": 260, "๋”": 261, "๋•": 262, "๋—": 263, "๋™": 264, "๋›": 265, "๋ผ": 266, "๋": 267, "๋˜": 268, "๋œ": 269, "๋ ": 270, "๋จ": 271, "๋ฉ": 272, "๋‘": 273, "๋‘‘": 274, "๋‘”": 275, "๋‘˜": 276, "๋‘ ": 277, "๋‘ฅ": 278, "๋‘ฌ": 279, "๋’€": 280, "๋’ค": 281, "๋’ท": 282, "๋“€": 283, "๋“œ": 284, "๋“": 285, "๋“ ": 286, "๋“ฃ": 287, "๋“ค": 288, "๋“ฌ": 289, "๋“ญ": 290, "๋“ฏ": 291, "๋“ฑ": 292, "๋””": 293, "๋”˜": 294, "๋”œ": 295, "๋”ฅ": 296, "๋”จ": 297, "๋”ฉ": 298, "๋”ช": 299, "๋”ฐ": 300, "๋”ฑ": 301, "๋”ด": 302, "๋”ธ": 303, "๋•€": 304, "๋•„": 305, "๋•…": 306, "๋•Œ": 307, "๋•": 308, "๋• ": 309, "๋•ก": 310, "๋– ": 311, "๋–ก": 312, "๋–ค": 313, "๋–จ": 314, "๋–ด": 315, "๋–ป": 316, "๋–ผ": 317, "๋—„": 318, "๋˜": 319, "๋˜‘": 320, "๋˜ฅ": 321, "๋šœ": 322, "๋š": 323, "๋šซ": 324, "๋›ฐ": 325, "๋›ด": 326, "๋œจ": 327, "๋œฏ": 328, "๋œธ": 329, "๋œป": 330, "๋„": 331, "๋”": 332, "๋ ": 333, "๋ค": 334, "๋จ": 335, "๋ต": 336, "๋ผ": 337, "๋ฝ": 338, "๋ž€": 339, "๋ž„": 340, "๋žŒ": 341, "๋ž": 342, "๋ž": 343, "๋ž‘": 344, "๋ž˜": 345, "๋ž™": 346, "๋žœ": 347, "๋žจ": 348, "๋žซ": 349, "๋žฌ": 350, "๋žญ": 351, "๋žด": 352, "๋žต": 353, "๋Ÿ‰": 354, "๋Ÿฌ": 355, "๋Ÿญ": 356, "๋Ÿฐ": 357, "๋Ÿด": 358, "๋Ÿผ": 359, "๋Ÿฝ": 360, "๋ €": 361, "๋ ": 362, "๋ ‡": 363, "๋ ˆ": 364, "๋ ‰": 365, "๋ Œ": 366, "๋ ˜": 367, "๋ ›": 368, "๋ ค": 369, "๋ ฅ": 370, "๋ จ": 371, "๋ ฌ": 372, "๋ ด": 373, "๋ ต": 374, "๋ ท": 375, "๋ ธ": 376, "๋ น": 377, "๋ก€": 378, "๋กœ": 379, "๋ก": 380, "๋ก ": 381, "๋กค": 382, "๋กฌ": 383, "๋กญ": 384, "๋กฏ": 385, "๋กฑ": 386, "๋ขฐ": 387, "๋ฃŒ": 388, "๋ฃก": 389, "๋ฃจ": 390, "๋ฃฌ": 391, "๋ฃฐ": 392, "๋ฃธ": 393, "๋ฃน": 394, "๋ค„": 395, "๋ค˜": 396, "๋คผ": 397, "๋ฅ˜": 398, "๋ฅ™": 399, "๋ฅœ": 400, "๋ฅ ": 401, "๋ฅญ": 402, "๋ฅด": 403, "๋ฅต": 404, "๋ฅธ": 405, "๋ฅผ": 406, "๋ฆ„": 407, "๋ฆ…": 408, "๋ฆ‡": 409, "๋ฆ‰": 410, "๋ฆŽ": 411, "๋ฆฌ": 412, "๋ฆญ": 413, "๋ฆฐ": 414, "๋ฆด": 415, "๋ฆผ": 416, "๋ฆฝ": 417, "๋ฆฟ": 418, "๋ง": 419, "๋งˆ": 420, "๋ง‰": 421, "๋งŒ": 422, "๋งŽ": 423, "๋ง": 424, "๋ง": 425, "๋ง‘": 426, "๋ง˜": 427, "๋ง™": 428, "๋ง›": 429, "๋ง": 430, "๋งž": 431, "๋งก": 432, "๋งค": 433, "๋งฅ": 434, "๋งจ": 435, "๋งน": 436, "๋งบ": 437, "๋จธ": 438, "๋จน": 439, "๋จผ": 440, "๋ฉ€": 441, "๋ฉˆ": 442, "๋ฉ‹": 443, "๋ฉ": 444, "๋ฉ”": 445, "๋ฉ•": 446, "๋ฉ˜": 447, "๋ฉœ": 448, "๋ฉง": 449, "๋ฉฐ": 450, "๋ฉด": 451, "๋ฉธ": 452, "๋ช…": 453, "๋ช‡": 454, "๋ชจ": 455, "๋ชฉ": 456, "๋ชซ": 457, "๋ชฌ": 458, "๋ชฐ": 459, "๋ชธ": 460, "๋ชป": 461, "๋ชฝ": 462, "๋ฌ˜": 463, "๋ฌด": 464, "๋ฌต": 465, "๋ฌถ": 466, "๋ฌธ": 467, "๋ฌป": 468, "๋ฌผ": 469, "๋ญ„": 470, "๋ญ‡": 471, "๋ญ": 472, "๋ญ”": 473, "๋ญ˜": 474, "๋ฎค": 475, "๋ฎฌ": 476, "๋ฏ€": 477, "๋ฏˆ": 478, "๋ฏธ": 479, "๋ฏน": 480, "๋ฏผ": 481, "๋ฏฟ": 482, "๋ฐ€": 483, "๋ฐ‹": 484, "๋ฐŒ": 485, "๋ฐ": 486, "๋ฐ": 487, "๋ฐ‘": 488, "๋ฐ”": 489, "๋ฐ•": 490, "๋ฐ–": 491, "๋ฐ˜": 492, "๋ฐ›": 493, "๋ฐœ": 494, "๋ฐ": 495, "๋ฐค": 496, "๋ฐฅ": 497, "๋ฐฉ": 498, "๋ฐญ": 499, "๋ฐฐ": 500, "๋ฐฑ": 501, "๋ฐด": 502, "๋ฑ€": 503, "๋ฑƒ": 504, "๋ฑ…": 505, "๋ฒ„": 506, "๋ฒ…": 507, "๋ฒˆ": 508, "๋ฒŒ": 509, "๋ฒ”": 510, "๋ฒ•": 511, "๋ฒ—": 512, "๋ฒš": 513, "๋ฒ ": 514, "๋ฒค": 515, "๋ฒจ": 516, "๋ฒณ": 517, "๋ฒผ": 518, "๋ฒฝ": 519, "๋ณ€": 520, "๋ณ„": 521, "๋ณ": 522, "๋ณ": 523, "๋ณ‘": 524, "๋ณ•": 525, "๋ณด": 526, "๋ณต": 527, "๋ณถ": 528, "๋ณธ": 529, "๋ณผ": 530, "๋ด„": 531, "๋ด…": 532, "๋ด‡": 533, "๋ด‰": 534, "๋ด": 535, "๋ดค": 536, "๋ตˆ": 537, "๋ต™": 538, "๋ถ€": 539, "๋ถ": 540, "๋ถ„": 541, "๋ถˆ": 542, "๋ถ‰": 543, "๋ถ": 544, "๋ถ“": 545, "๋ถ•": 546, "๋ถ™": 547, "๋ท”": 548, "๋ทฐ": 549, "๋ธŒ": 550, "๋ธ": 551, "๋ธ”": 552, "๋น„": 553, "๋น…": 554, "๋นˆ": 555, "๋นŒ": 556, "๋น—": 557, "๋น™": 558, "๋นš": 559, "๋น›": 560, "๋น ": 561, "๋นจ": 562, "๋นต": 563, "๋นผ": 564, "๋บ€": 565, "๋บŒ": 566, "๋บ": 567, "๋บ‘": 568, "๋ป": 569, "๋ป‘": 570, "๋ป”": 571, "๋ป—": 572, "๋ป˜": 573, "๋ผˆ": 574, "๋ฝ€": 575, "๋ฝ‘": 576, "๋ฝ•": 577, "๋ฟŒ": 578, "๋ฟ": 579, "๋ฟœ": 580, "์˜": 581, "์œ": 582, "์ฉ": 583, "์‚": 584, "์‚ฌ": 585, "์‚ญ": 586, "์‚ฐ": 587, "์‚ด": 588, "์‚ถ": 589, "์‚ผ": 590, "์‚ฝ": 591, "์‚ฟ": 592, "์ƒ€": 593, "์ƒ": 594, "์ƒˆ": 595, "์ƒ‰": 596, "์ƒŒ": 597, "์ƒ": 598, "์ƒ˜": 599, "์ƒ": 600, "์ƒค": 601, "์ƒฌ": 602, "์ƒต": 603, "์ƒท": 604, "์„œ": 605, "์„": 606, "์„ž": 607, "์„ ": 608, "์„ฃ": 609, "์„ค": 610, "์„ฌ": 611, "์„ญ": 612, "์„ฏ": 613, "์„ฐ": 614, "์„ฑ": 615, "์„ธ": 616, "์„น": 617, "์„ผ": 618, "์…€": 619, "์…ˆ": 620, "์…‰": 621, "์…‹": 622, "์…”": 623, "์…˜": 624, "์…œ": 625, "์…จ": 626, "์…ฐ": 627, "์†Œ": 628, "์†": 629, "์†": 630, "์†”": 631, "์†œ": 632, "์†Ÿ": 633, "์†ก": 634, "์†ฅ": 635, "์‡„": 636, "์‡ ": 637, "์‡ค": 638, "์‡ผ": 639, "์ˆ": 640, "์ˆ˜": 641, "์ˆ™": 642, "์ˆœ": 643, "์ˆ ": 644, "์ˆจ": 645, "์ˆญ": 646, "์ˆฒ": 647, "์‰ฌ": 648, "์‰ฐ": 649, "์‰ผ": 650, "์‰ฝ": 651, "์Šˆ": 652, "์Š": 653, "์Šค": 654, "์Šจ": 655, "์Šฌ": 656, "์Šด": 657, "์Šต": 658, "์Šท": 659, "์Šน": 660, "์‹œ": 661, "์‹": 662, "์‹ ": 663, "์‹ค": 664, "์‹ซ": 665, "์‹ฌ": 666, "์‹ญ": 667, "์‹ฑ": 668, "์‹ถ": 669, "์‹ธ": 670, "์‹น": 671, "์‹ผ": 672, "์Œ€": 673, "์Œˆ": 674, "์ŒŒ": 675, "์Œ": 676, "์Œ“": 677, "์จ": 678, "์ฉ": 679, "์ฐ": 680, "์ผ": 681, "์˜": 682, "์œ": 683, "์Ÿ": 684, "์ ": 685, "์‘ค": 686, "์“ฐ": 687, "์“ด": 688, "์“ธ": 689, "์”€": 690, "์”": 691, "์”Œ": 692, "์”จ": 693, "์”ฉ": 694, "์”ฌ": 695, "์”ธ": 696, "์”ป": 697, "์•„": 698, "์•…": 699, "์•ˆ": 700, "์•‰": 701, "์•Š": 702, "์•Œ": 703, "์•“": 704, "์•”": 705, "์••": 706, "์•—": 707, "์•˜": 708, "์•™": 709, "์•ž": 710, "์• ": 711, "์•ก": 712, "์•ค": 713, "์•จ": 714, "์•ฑ": 715, "์•ต": 716, "์•ผ": 717, "์•ฝ": 718, "์–‡": 719, "์–‘": 720, "์–—": 721, "์–˜": 722, "์–ด": 723, "์–ต": 724, "์–ธ": 725, "์–น": 726, "์–ป": 727, "์–ผ": 728, "์–ฝ": 729, "์—„": 730, "์—…": 731, "์—†": 732, "์—‡": 733, "์—ˆ": 734, "์—‰": 735, "์—Ž": 736, "์—": 737, "์—‘": 738, "์—”": 739, "์—˜": 740, "์— ": 741, "์—ก": 742, "์—ฃ": 743, "์—ฌ": 744, "์—ญ": 745, "์—ฐ": 746, "์—ด": 747, "์—ท": 748, "์—ผ": 749, "์—ฝ": 750, "์—ฟ": 751, "์˜€": 752, "์˜": 753, "์˜†": 754, "์˜ˆ": 755, "์˜›": 756, "์˜ค": 757, "์˜ฅ": 758, "์˜จ": 759, "์˜ฌ": 760, "์˜ฎ": 761, "์˜ณ": 762, "์˜ด": 763, "์˜ต": 764, "์˜ท": 765, "์˜น": 766, "์™€": 767, "์™„": 768, "์™ˆ": 769, "์™”": 770, "์™•": 771, "์™œ": 772, "์™ธ": 773, "์™ผ": 774, "์š”": 775, "์š•": 776, "์šฉ": 777, "์šฐ": 778, "์šฑ": 779, "์šด": 780, "์šธ": 781, "์›€": 782, "์›": 783, "์›ƒ": 784, "์›…": 785, "์›Œ": 786, "์›": 787, "์›”": 788, "์› ": 789, "์›จ": 790, "์›ฌ": 791, "์›น": 792, "์œ„": 793, "์œˆ": 794, "์œŒ": 795, "์œ—": 796, "์œ™": 797, "์œ ": 798, "์œก": 799, "์œค": 800, "์œจ": 801, "์œต": 802, "์œผ": 803, "์€": 804, "์„": 805, "์Œ": 806, "์": 807, "์‘": 808, "์˜": 809, "์ด": 810, "์ต": 811, "์ธ": 812, "์ผ": 813, "์ฝ": 814, "์žƒ": 815, "์ž„": 816, "์ž…": 817, "์ž‡": 818, "์žˆ": 819, "์ž‰": 820, "์žŠ": 821, "์žŽ": 822, "์ž": 823, "์ž‘": 824, "์ž”": 825, "์ž–": 826, "์ž˜": 827, "์ž ": 828, "์žก": 829, "์žฃ": 830, "์žฅ": 831, "์žฆ": 832, "์žฌ": 833, "์žญ": 834, "์žฐ": 835, "์žฝ": 836, "์Ÿ": 837, "์ €": 838, "์ ": 839, "์ „": 840, "์ ˆ": 841, "์ Š": 842, "์ ‹": 843, "์ ": 844, "์ ‘": 845, "์ “": 846, "์ •": 847, "์ –": 848, "์ œ": 849, "์ ": 850, "์  ": 851, "์ ค": 852, "์ ธ": 853, "์ ผ": 854, "์กŒ": 855, "์กฐ": 856, "์กฑ": 857, "์กด": 858, "์กธ": 859, "์ข€": 860, "์ข": 861, "์ข…": 862, "์ข‹": 863, "์ขŒ": 864, "์ฃ„": 865, "์ฃ ": 866, "์ฃผ": 867, "์ฃฝ": 868, "์ค€": 869, "์ค„": 870, "์ค": 871, "์ค‘": 872, "์ค˜": 873, "์คฌ": 874, "์ฅ": 875, "์ฅ”": 876, "์ฅ˜": 877, "์ฅฌ": 878, "์ฆˆ": 879, "์ฆ‰": 880, "์ฆŒ": 881, "์ฆ": 882, "์ฆ˜": 883, "์ฆ": 884, "์ง€": 885, "์ง": 886, "์ง„": 887, "์งˆ": 888, "์งŠ": 889, "์ง": 890, "์ง‘": 891, "์ง“": 892, "์ง•": 893, "์ง–": 894, "์ง™": 895, "์งš": 896, "์งœ": 897, "์ง": 898, "์งง": 899, "์งฌ": 900, "์งธ": 901, "์จŒ": 902, "์ฉŒ": 903, "์ฉ": 904, "์ฉ”": 905, "์ฉœ": 906, "์ชผ": 907, "์ชฝ": 908, "์ซ„": 909, "์ซ“": 910, "์ญ‰": 911, "์ฏค": 912, "์ฐŒ": 913, "์ฐ": 914, "์ฐ”": 915, "์ฐข": 916, "์ฐง": 917, "์ฐจ": 918, "์ฐฉ": 919, "์ฐฌ": 920, "์ฐฎ": 921, "์ฐฐ": 922, "์ฐธ": 923, "์ฐป": 924, "์ฐฝ": 925, "์ฐพ": 926, "์ฑ„": 927, "์ฑ…": 928, "์ฑŒ": 929, "์ฑ”": 930, "์ฑ™": 931, "์ฑ ": 932, "์ฒ˜": 933, "์ฒ™": 934, "์ฒœ": 935, "์ฒ ": 936, "์ฒจ": 937, "์ฒฉ": 938, "์ฒซ": 939, "์ฒญ": 940, "์ฒด": 941, "์ฒธ": 942, "์ฒผ": 943, "์ณ‡": 944, "์ณ": 945, "์ณค": 946, "์ดˆ": 947, "์ด‰": 948, "์ดŒ": 949, "์ด˜": 950, "์ด›": 951, "์ด": 952, "์ดจ": 953, "์ดฌ": 954, "์ตœ": 955, "์ถ”": 956, "์ถ•": 957, "์ถ˜": 958, "์ถœ": 959, "์ถค": 960, "์ถฉ": 961, "์ถฐ": 962, "์ทจ": 963, "์ธ ": 964, "์ธก": 965, "์ธฐ": 966, "์ธต": 967, "์น˜": 968, "์น™": 969, "์นœ": 970, "์น ": 971, "์นจ": 972, "์นฉ": 973, "์นซ": 974, "์นญ": 975, "์นด": 976, "์นธ": 977, "์นผ": 978, "์บ‰": 979, "์บ": 980, "์บ”": 981, "์บ˜": 982, "์บ ": 983, "์ปค": 984, "์ปฅ": 985, "์ปจ": 986, "์ปซ": 987, "์ปด": 988, "์ปต": 989, "์ปท": 990, "์ปธ": 991, "์ผ€": 992, "์ผˆ": 993, "์ผ": 994, "์ผ‘": 995, "์ผ“": 996, "์ผœ": 997, "์ผฐ": 998, "์ฝ”": 999, "์ฝ˜": 1000, "์ฝœ": 1001, "์ฝค": 1002, "์ฝฅ": 1003, "์ฝง": 1004, "์ฝฉ": 1005, "์พŒ": 1006, "์ฟ„": 1007, "์ฟ ": 1008, "์ฟก": 1009, "์ฟจ": 1010, "์ฟผ": 1011, "ํ€ด": 1012, "ํ": 1013, "ํฌ": 1014, "ํฐ": 1015, "ํด": 1016, "ํผ": 1017, "ํ‚ค": 1018, "ํ‚ฅ": 1019, "ํ‚จ": 1020, "ํ‚ฌ": 1021, "ํ‚ท": 1022, "ํ‚น": 1023, "ํƒ€": 1024, "ํƒ": 1025, "ํƒ„": 1026, "ํƒˆ": 1027, "ํƒ": 1028, "ํƒ‘": 1029, "ํƒ“": 1030, "ํƒ•": 1031, "ํƒœ": 1032, "ํƒ": 1033, "ํƒ ": 1034, "ํƒฌ": 1035, "ํƒฑ": 1036, "ํ„ฐ": 1037, "ํ„ฑ": 1038, "ํ„ด": 1039, "ํ„ธ": 1040, "ํ…ƒ": 1041, "ํ……": 1042, "ํ…Œ": 1043, "ํ…": 1044, "ํ…": 1045, "ํ…”": 1046, "ํ…œ": 1047, "ํ…ผ": 1048, "ํ† ": 1049, "ํ†ก": 1050, "ํ†ค": 1051, "ํ†จ": 1052, "ํ†ฐ": 1053, "ํ†ต": 1054, "ํ‡ด": 1055, "ํˆฌ": 1056, "ํˆด": 1057, "ํˆผ": 1058, "ํ‰": 1059, "ํŠ€": 1060, "ํŠœ": 1061, "ํŠฌ": 1062, "ํŠธ": 1063, "ํŠน": 1064, "ํŠผ": 1065, "ํŠฟ": 1066, "ํ‹€": 1067, "ํ‹ˆ": 1068, "ํ‹ฐ": 1069, "ํ‹ฑ": 1070, "ํ‹ด": 1071, "ํ‹ธ": 1072, "ํŒ€": 1073, "ํŒ…": 1074, "ํŒŒ": 1075, "ํŒ": 1076, "ํŒŽ": 1077, "ํŒ": 1078, "ํŒ”": 1079, "ํŒœ": 1080, "ํŒก": 1081, "ํŒจ": 1082, "ํŒฉ": 1083, "ํŒฌ": 1084, "ํŒฐ": 1085, "ํŒป": 1086, "ํŒฝ": 1087, "ํผ": 1088, "ํŽ€": 1089, "ํŽ„": 1090, "ํŽŒ": 1091, "ํŽ˜": 1092, "ํŽœ": 1093, "ํŽ ": 1094, "ํŽซ": 1095, "ํŽด": 1096, "ํŽธ": 1097, "ํŽผ": 1098, "ํ„": 1099, "ํˆ": 1100, "ํ‰": 1101, "ํ": 1102, "ํฌ": 1103, "ํญ": 1104, "ํฐ": 1105, "ํด": 1106, "ํผ": 1107, "ํ‘œ": 1108, "ํ‘ธ": 1109, "ํ‘น": 1110, "ํ‘ผ": 1111, "ํ’€": 1112, "ํ’ˆ": 1113, "ํ’‹": 1114, "ํ’": 1115, "ํ“จ": 1116, "ํ“ฐ": 1117, "ํ”„": 1118, "ํ”ˆ": 1119, "ํ”Œ": 1120, "ํ””": 1121, "ํ”ผ": 1122, "ํ”ฝ": 1123, "ํ•€": 1124, "ํ•„": 1125, "ํ•": 1126, "ํ•‘": 1127, "ํ•˜": 1128, "ํ•™": 1129, "ํ•œ": 1130, "ํ• ": 1131, "ํ•จ": 1132, "ํ•ฉ": 1133, "ํ•ซ": 1134, "ํ•ญ": 1135, "ํ•ด": 1136, "ํ•ต": 1137, "ํ•ธ": 1138, "ํ–‡": 1139, "ํ–ˆ": 1140, "ํ–‰": 1141, "ํ–ฅ": 1142, "ํ—ˆ": 1143, "ํ—Œ": 1144, "ํ—": 1145, "ํ—˜": 1146, "ํ—": 1147, "ํ—ค": 1148, "ํ—จ": 1149, "ํ—ฌ": 1150, "ํ—ด": 1151, "ํ—ท": 1152, "ํ˜€": 1153, "ํ˜": 1154, "ํ˜„": 1155, "ํ˜ˆ": 1156, "ํ˜": 1157, "ํ˜‘": 1158, "ํ˜”": 1159, "ํ˜•": 1160, "ํ˜œ": 1161, "ํ˜ธ": 1162, "ํ˜น": 1163, "ํ˜ผ": 1164, "ํ™€": 1165, "ํ™ˆ": 1166, "ํ™‰": 1167, "ํ™": 1168, "ํ™”": 1169, "ํ™•": 1170, "ํ™˜": 1171, "ํ™œ": 1172, "ํ™ฉ": 1173, "ํšŒ": 1174, "ํš": 1175, "ํšก": 1176, "ํšจ": 1177, "ํ›„": 1178, "ํ›ˆ": 1179, "ํ›Œ": 1180, "ํ›ค": 1181, "ํ›จ": 1182, "ํ›ผ": 1183, "ํœ˜": 1184, "ํœฉ": 1185, "ํœด": 1186, "ํ‰": 1187, "ํ": 1188, "ํ‘": 1189, "ํ”": 1190, "ํ˜": 1191, "ํ™": 1192, "ํ ": 1193, "ํก": 1194, "ํฅ": 1195, "ํฉ": 1196, "ํฌ": 1197, "ํฐ": 1198, "ํžˆ": 1199, "ํžŒ": 1200, "ํž": 1201, "ํž˜": 1202, "|": 0, "[UNK]": 1203, "[PAD]": 1204}