enip2473 Axelisme commited on
Commit
7a57e45
1 Parent(s): e260cf3

Upload 43 files (#1)

Browse files

- Upload 43 files (297c44b8ce3b4be589aa860e75f4371ef3a928b6)


Co-authored-by: Axel Chiu <Axelisme@users.noreply.huggingface.co>

Files changed (43) hide show
  1. ASR-model/TransformerLM_seg_char/env.log +195 -0
  2. ASR-model/TransformerLM_seg_char/hyperparams.yaml +95 -0
  3. ASR-model/TransformerLM_seg_char/log.txt +276 -0
  4. ASR-model/TransformerLM_seg_char/save/CKPT+2021-10-05+20-55-37+00/CKPT.yaml +4 -0
  5. ASR-model/TransformerLM_seg_char/save/CKPT+2021-10-05+20-55-37+00/brain.ckpt +3 -0
  6. ASR-model/TransformerLM_seg_char/save/CKPT+2021-10-05+20-55-37+00/counter.ckpt +3 -0
  7. ASR-model/TransformerLM_seg_char/save/CKPT+2021-10-05+20-55-37+00/dataloader-TRAIN.ckpt +3 -0
  8. ASR-model/TransformerLM_seg_char/save/CKPT+2021-10-05+20-55-37+00/model.ckpt +3 -0
  9. ASR-model/TransformerLM_seg_char/save/CKPT+2021-10-05+20-55-37+00/optimizer.ckpt +3 -0
  10. ASR-model/TransformerLM_seg_char/save/CKPT+2021-10-05+20-55-37+00/scheduler.ckpt +3 -0
  11. ASR-model/TransformerLM_seg_char/save/CKPT+2021-10-05+21-14-27+00/CKPT.yaml +4 -0
  12. ASR-model/TransformerLM_seg_char/save/CKPT+2021-10-05+21-14-27+00/brain.ckpt +3 -0
  13. ASR-model/TransformerLM_seg_char/save/CKPT+2021-10-05+21-14-27+00/counter.ckpt +3 -0
  14. ASR-model/TransformerLM_seg_char/save/CKPT+2021-10-05+21-14-27+00/dataloader-TRAIN.ckpt +3 -0
  15. ASR-model/TransformerLM_seg_char/save/CKPT+2021-10-05+21-14-27+00/model.ckpt +3 -0
  16. ASR-model/TransformerLM_seg_char/save/CKPT+2021-10-05+21-14-27+00/optimizer.ckpt +3 -0
  17. ASR-model/TransformerLM_seg_char/save/CKPT+2021-10-05+21-14-27+00/scheduler.ckpt +3 -0
  18. ASR-model/TransformerLM_seg_char/train.py +150 -0
  19. ASR-model/TransformerLM_seg_char/train_log.txt +21 -0
  20. ASR-model/asr_transformer_seg_char_ctc0.3/cer.txt +0 -0
  21. ASR-model/asr_transformer_seg_char_ctc0.3/env.log +195 -0
  22. ASR-model/asr_transformer_seg_char_ctc0.3/hyperparams.yaml +241 -0
  23. ASR-model/asr_transformer_seg_char_ctc0.3/log.txt +0 -0
  24. ASR-model/asr_transformer_seg_char_ctc0.3/results/asr_transformer_seg_char/env.log +109 -0
  25. ASR-model/asr_transformer_seg_char_ctc0.3/results/asr_transformer_seg_char/hyperparams.yaml +244 -0
  26. ASR-model/asr_transformer_seg_char_ctc0.3/results/asr_transformer_seg_char/log.txt +120 -0
  27. ASR-model/asr_transformer_seg_char_ctc0.3/results/asr_transformer_seg_char/test.py +331 -0
  28. ASR-model/asr_transformer_seg_char_ctc0.3/save/CKPT+2021-10-11+14-03-15+00/CKPT.yaml +5 -0
  29. ASR-model/asr_transformer_seg_char_ctc0.3/save/CKPT+2021-10-11+14-03-15+00/brain.ckpt +3 -0
  30. ASR-model/asr_transformer_seg_char_ctc0.3/save/CKPT+2021-10-11+14-03-15+00/counter.ckpt +3 -0
  31. ASR-model/asr_transformer_seg_char_ctc0.3/save/CKPT+2021-10-11+14-03-15+00/model.ckpt +3 -0
  32. ASR-model/asr_transformer_seg_char_ctc0.3/save/CKPT+2021-10-11+14-03-15+00/noam_scheduler.ckpt +3 -0
  33. ASR-model/asr_transformer_seg_char_ctc0.3/save/CKPT+2021-10-11+14-03-15+00/normalizer.ckpt +3 -0
  34. ASR-model/asr_transformer_seg_char_ctc0.3/test.py +181 -0
  35. ASR-model/asr_transformer_seg_char_ctc0.3/test.wav +0 -0
  36. ASR-model/asr_transformer_seg_char_ctc0.3/train.py +322 -0
  37. ASR-model/asr_transformer_seg_char_ctc0.3/train_log.txt +70 -0
  38. ASR-model/tokenizer_seg_bpe5k_char/5000_char.model +3 -0
  39. ASR-model/tokenizer_seg_bpe5k_char/5000_char.vocab +4257 -0
  40. ASR-model/tokenizer_seg_bpe5k_char/env.log +195 -0
  41. ASR-model/tokenizer_seg_bpe5k_char/hyperparams.yaml +31 -0
  42. ASR-model/tokenizer_seg_bpe5k_char/log.txt +1037 -0
  43. ASR-model/tokenizer_seg_bpe5k_char/train.py +30 -0
ASR-model/TransformerLM_seg_char/env.log ADDED
@@ -0,0 +1,195 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ SpeechBrain system description
2
+ ==============================
3
+ Python version:
4
+ 3.8.10 (default, Jun 2 2021, 10:49:15)
5
+ [GCC 9.4.0]
6
+ ==============================
7
+ Installed Python packages:
8
+ appdirs==1.4.4
9
+ argon2-cffi==20.1.0
10
+ async-generator==1.10
11
+ attrs==19.3.0
12
+ Automat==0.8.0
13
+ autopep8==1.5.7
14
+ backcall==0.2.0
15
+ backports.entry-points-selectable==1.1.0
16
+ black==19.10b0
17
+ bleach==3.3.1
18
+ blessings==1.7
19
+ blinker==1.4
20
+ bottle==0.12.19
21
+ certifi==2019.11.28
22
+ cffi==1.14.6
23
+ cfgv==3.3.0
24
+ chardet==3.0.4
25
+ Click==7.0
26
+ cloud-init==21.2
27
+ colorama==0.4.3
28
+ command-not-found==0.3
29
+ configobj==5.0.6
30
+ constantly==15.1.0
31
+ cryptography==2.8
32
+ cupshelpers==1.0
33
+ cycler==0.10.0
34
+ d2l==0.16.6
35
+ datasets==1.11.0
36
+ dbus-python==1.2.16
37
+ debugpy==1.3.0
38
+ decorator==5.0.9
39
+ defer==1.0.6
40
+ defusedxml==0.7.1
41
+ dill==0.3.4
42
+ distlib==0.3.2
43
+ distro==1.4.0
44
+ distro-info===0.23ubuntu1
45
+ entrypoints==0.3
46
+ filelock==3.0.12
47
+ flake8==3.7.9
48
+ fsspec==2021.7.0
49
+ gpustat==0.6.0
50
+ gpuview==0.4.0
51
+ httplib2==0.14.0
52
+ huggingface-hub==0.0.16
53
+ hyperlink==19.0.0
54
+ HyperPyYAML==1.0.0
55
+ identify==2.2.11
56
+ idna==2.8
57
+ importlib-metadata==1.5.0
58
+ incremental==16.10.1
59
+ ipykernel==6.0.2
60
+ ipython==7.25.0
61
+ ipython-genutils==0.2.0
62
+ ipywidgets==7.6.3
63
+ jedi==0.18.0
64
+ Jinja2==2.10.1
65
+ joblib==1.0.1
66
+ jsonpatch==1.22
67
+ jsonpointer==2.0
68
+ jsonschema==3.2.0
69
+ jupyter==1.0.0
70
+ jupyter-client==6.1.12
71
+ jupyter-console==6.4.0
72
+ jupyter-core==4.7.1
73
+ jupyterlab-pygments==0.1.2
74
+ jupyterlab-widgets==1.0.0
75
+ keyring==18.0.1
76
+ kiwisolver==1.3.1
77
+ language-selector==0.1
78
+ launchpadlib==1.10.13
79
+ lazr.restfulclient==0.14.2
80
+ lazr.uri==1.0.3
81
+ macaroonbakery==1.3.1
82
+ MarkupSafe==1.1.0
83
+ matplotlib==3.4.2
84
+ matplotlib-inline==0.1.2
85
+ mccabe==0.6.1
86
+ mistune==0.8.4
87
+ more-itertools==4.2.0
88
+ multiprocess==0.70.12.2
89
+ nbclient==0.5.3
90
+ nbconvert==6.1.0
91
+ nbformat==5.1.3
92
+ nest-asyncio==1.5.1
93
+ netifaces==0.10.4
94
+ nodeenv==1.6.0
95
+ notebook==6.4.0
96
+ numpy==1.21.2
97
+ nvidia-ml-py3==7.352.0
98
+ oauthlib==3.1.0
99
+ packaging==21.0
100
+ pandas==1.3.0
101
+ pandocfilters==1.4.3
102
+ parso==0.8.2
103
+ pathspec==0.9.0
104
+ pexpect==4.6.0
105
+ pickleshare==0.7.5
106
+ Pillow==8.3.1
107
+ platformdirs==2.0.2
108
+ pluggy==0.13.1
109
+ pre-commit==2.15.0
110
+ prometheus-client==0.11.0
111
+ prompt-toolkit==3.0.19
112
+ protobuf==3.6.1
113
+ psutil==5.8.0
114
+ ptyprocess==0.7.0
115
+ py==1.10.0
116
+ pyarrow==5.0.0
117
+ pyasn1==0.4.2
118
+ pyasn1-modules==0.2.1
119
+ pycairo==1.16.2
120
+ pycodestyle==2.5.0
121
+ pycparser==2.20
122
+ pycups==1.9.73
123
+ pyflakes==2.1.1
124
+ Pygments==2.9.0
125
+ PyGObject==3.36.0
126
+ PyHamcrest==1.9.0
127
+ PyJWT==1.7.1
128
+ pymacaroons==0.13.0
129
+ PyMySQL==1.0.2
130
+ PyNaCl==1.3.0
131
+ pyOpenSSL==19.0.0
132
+ pyparsing==2.4.7
133
+ pyRFC3339==1.1
134
+ pyrsistent==0.15.5
135
+ pyserial==3.4
136
+ pytest==5.4.1
137
+ python-apt==2.0.0+ubuntu0.20.4.5
138
+ python-dateutil==2.8.2
139
+ python-debian===0.1.36ubuntu1
140
+ pytube==10.9.3
141
+ pytz==2019.3
142
+ PyYAML==5.3.1
143
+ pyzmq==22.1.0
144
+ qtconsole==5.1.1
145
+ QtPy==1.9.0
146
+ regex==2021.7.6
147
+ requests==2.22.0
148
+ requests-unixsocket==0.2.0
149
+ ruamel.yaml==0.17.10
150
+ ruamel.yaml.clib==0.2.6
151
+ scipy==1.7.1
152
+ screen-resolution-extra==0.0.0
153
+ SecretStorage==2.3.1
154
+ Send2Trash==1.7.1
155
+ sentencepiece==0.1.96
156
+ service-identity==18.1.0
157
+ simplejson==3.16.0
158
+ six==1.14.0
159
+ sos==4.1
160
+ -e git+https://github.com/speechbrain/speechbrain.git@1d194bfc51ae20b9e38596d220cdf0f4977e69de#egg=speechbrain
161
+ ssh-import-id==5.10
162
+ supervisor==4.1.0
163
+ systemd-python==234
164
+ terminado==0.10.1
165
+ testpath==0.5.0
166
+ toml==0.10.2
167
+ torch==1.8.1
168
+ torchaudio==0.8.1
169
+ torchvision==0.10.0
170
+ tornado==6.1
171
+ tqdm==4.62.2
172
+ traitlets==5.0.5
173
+ Twisted==18.9.0
174
+ typed-ast==1.4.3
175
+ typing-extensions==3.10.0.0
176
+ ubuntu-advantage-tools==27.2
177
+ ufw==0.36
178
+ unattended-upgrades==0.1
179
+ urllib3==1.25.8
180
+ virtualenv==20.6.0
181
+ wadllib==1.3.3
182
+ wcwidth==0.2.5
183
+ webencodings==0.5.1
184
+ widgetsnbextension==3.5.1
185
+ xkit==0.0.0
186
+ xxhash==2.0.2
187
+ yamllint==1.23.0
188
+ zipp==1.0.0
189
+ zope.interface==4.7.1
190
+ ==============================
191
+ Git revision:
192
+ e3e51338
193
+ ==============================
194
+ Cuda version:
195
+ 10.2
ASR-model/TransformerLM_seg_char/hyperparams.yaml ADDED
@@ -0,0 +1,95 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Generated 2021-10-05 from:
2
+ # /mnt/md0/user_wayne/speechbrain/recipes/MATBN/LM/hparams/TransformerLM_seg_char.yaml
3
+ # yamllint disable
4
+ output_folder: results/TransformerLM_seg_char
5
+ save_folder: results/TransformerLM_seg_char/save
6
+ train_log: results/TransformerLM_seg_char/train_log.txt
7
+ num_workers: 4
8
+
9
+ data_folder: results/prepare_seg
10
+
11
+ tokenizer_file: results/tokenizer_seg_bpe5k_char/5000_char.model
12
+
13
+ tokenizer: &id001 !new:sentencepiece.SentencePieceProcessor
14
+
15
+ pretrainer: !new:speechbrain.utils.parameter_transfer.Pretrainer
16
+ collect_in: results/TransformerLM_seg_char/tokenizer
17
+ loadables:
18
+ tokenizer: *id001
19
+ paths:
20
+ tokenizer: results/tokenizer_seg_bpe5k_char/5000_char.model
21
+
22
+ train_logger: !new:speechbrain.utils.train_logger.FileTrainLogger
23
+ save_file: results/TransformerLM_seg_char/train_log.txt
24
+
25
+ # Training parameters
26
+ number_of_epochs: 20
27
+ batch_size: 64
28
+ lr: 1
29
+ accumulation_steps: 2
30
+ ckpt_interval_minutes: 15
31
+
32
+ epoch_counter: &id004 !new:speechbrain.utils.epoch_loop.EpochCounter
33
+
34
+ limit: 20
35
+
36
+ # Dataloader options
37
+ train_dataloader_opts:
38
+ batch_size: 64
39
+ num_workers: 4
40
+ shuffle: true
41
+ pin_memory: true
42
+
43
+ valid_dataloader_opts:
44
+ batch_size: 64
45
+ num_workers: 4
46
+
47
+ test_dataloader_opts:
48
+ batch_size: 64
49
+ num_workers: 4
50
+
51
+ # Model parameters
52
+ d_model: 576
53
+
54
+ # Outputs
55
+ output_neurons: 5000
56
+ blank_index: 0
57
+ bos_index: 1
58
+ eos_index: 2
59
+ unk_index: 0
60
+ pad_index: 0
61
+
62
+ model: &id002 !new:speechbrain.lobes.models.transformer.TransformerLM.TransformerLM
63
+
64
+ vocab: 5000
65
+ d_model: 576
66
+ nhead: 6
67
+ num_encoder_layers: 6
68
+ num_decoder_layers: 0
69
+ d_ffn: 1538
70
+ dropout: 0.2
71
+ activation: !name:torch.nn.GELU
72
+ normalize_before: false
73
+
74
+ modules:
75
+ model: *id002
76
+ lr_annealing: &id003 !new:speechbrain.nnet.schedulers.NoamScheduler
77
+ lr_initial: 1
78
+ n_warmup_steps: 1000
79
+ model_size: 576
80
+
81
+ checkpointer: !new:speechbrain.utils.checkpoints.Checkpointer
82
+ checkpoints_dir: results/TransformerLM_seg_char/save
83
+ recoverables:
84
+ model: *id002
85
+ scheduler: *id003
86
+ counter: *id004
87
+ log_softmax: !new:speechbrain.nnet.activations.Softmax
88
+ apply_log: true
89
+
90
+ optimizer: !name:torch.optim.Adam
91
+ lr: 0
92
+ betas: (0.9, 0.98)
93
+ eps: 0.000000001
94
+
95
+ compute_cost: !name:speechbrain.nnet.losses.nll_loss
ASR-model/TransformerLM_seg_char/log.txt ADDED
@@ -0,0 +1,276 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 2021-10-05 19:59:24,051 - speechbrain.core - INFO - Beginning experiment!
2
+ 2021-10-05 19:59:24,052 - speechbrain.core - INFO - Experiment folder: results/TransformerLM_seg_char
3
+ 2021-10-05 19:59:24,902 - speechbrain.utils.superpowers - DEBUG - appdirs==1.4.4
4
+ argon2-cffi==20.1.0
5
+ async-generator==1.10
6
+ attrs==19.3.0
7
+ Automat==0.8.0
8
+ autopep8==1.5.7
9
+ backcall==0.2.0
10
+ backports.entry-points-selectable==1.1.0
11
+ black==19.10b0
12
+ bleach==3.3.1
13
+ blessings==1.7
14
+ blinker==1.4
15
+ bottle==0.12.19
16
+ certifi==2019.11.28
17
+ cffi==1.14.6
18
+ cfgv==3.3.0
19
+ chardet==3.0.4
20
+ Click==7.0
21
+ cloud-init==21.2
22
+ colorama==0.4.3
23
+ command-not-found==0.3
24
+ configobj==5.0.6
25
+ constantly==15.1.0
26
+ cryptography==2.8
27
+ cupshelpers==1.0
28
+ cycler==0.10.0
29
+ d2l==0.16.6
30
+ datasets==1.11.0
31
+ dbus-python==1.2.16
32
+ debugpy==1.3.0
33
+ decorator==5.0.9
34
+ defer==1.0.6
35
+ defusedxml==0.7.1
36
+ dill==0.3.4
37
+ distlib==0.3.2
38
+ distro==1.4.0
39
+ distro-info===0.23ubuntu1
40
+ entrypoints==0.3
41
+ filelock==3.0.12
42
+ flake8==3.7.9
43
+ fsspec==2021.7.0
44
+ gpustat==0.6.0
45
+ gpuview==0.4.0
46
+ httplib2==0.14.0
47
+ huggingface-hub==0.0.16
48
+ hyperlink==19.0.0
49
+ HyperPyYAML==1.0.0
50
+ identify==2.2.11
51
+ idna==2.8
52
+ importlib-metadata==1.5.0
53
+ incremental==16.10.1
54
+ ipykernel==6.0.2
55
+ ipython==7.25.0
56
+ ipython-genutils==0.2.0
57
+ ipywidgets==7.6.3
58
+ jedi==0.18.0
59
+ Jinja2==2.10.1
60
+ joblib==1.0.1
61
+ jsonpatch==1.22
62
+ jsonpointer==2.0
63
+ jsonschema==3.2.0
64
+ jupyter==1.0.0
65
+ jupyter-client==6.1.12
66
+ jupyter-console==6.4.0
67
+ jupyter-core==4.7.1
68
+ jupyterlab-pygments==0.1.2
69
+ jupyterlab-widgets==1.0.0
70
+ keyring==18.0.1
71
+ kiwisolver==1.3.1
72
+ language-selector==0.1
73
+ launchpadlib==1.10.13
74
+ lazr.restfulclient==0.14.2
75
+ lazr.uri==1.0.3
76
+ macaroonbakery==1.3.1
77
+ MarkupSafe==1.1.0
78
+ matplotlib==3.4.2
79
+ matplotlib-inline==0.1.2
80
+ mccabe==0.6.1
81
+ mistune==0.8.4
82
+ more-itertools==4.2.0
83
+ multiprocess==0.70.12.2
84
+ nbclient==0.5.3
85
+ nbconvert==6.1.0
86
+ nbformat==5.1.3
87
+ nest-asyncio==1.5.1
88
+ netifaces==0.10.4
89
+ nodeenv==1.6.0
90
+ notebook==6.4.0
91
+ numpy==1.21.2
92
+ nvidia-ml-py3==7.352.0
93
+ oauthlib==3.1.0
94
+ packaging==21.0
95
+ pandas==1.3.0
96
+ pandocfilters==1.4.3
97
+ parso==0.8.2
98
+ pathspec==0.9.0
99
+ pexpect==4.6.0
100
+ pickleshare==0.7.5
101
+ Pillow==8.3.1
102
+ platformdirs==2.0.2
103
+ pluggy==0.13.1
104
+ pre-commit==2.15.0
105
+ prometheus-client==0.11.0
106
+ prompt-toolkit==3.0.19
107
+ protobuf==3.6.1
108
+ psutil==5.8.0
109
+ ptyprocess==0.7.0
110
+ py==1.10.0
111
+ pyarrow==5.0.0
112
+ pyasn1==0.4.2
113
+ pyasn1-modules==0.2.1
114
+ pycairo==1.16.2
115
+ pycodestyle==2.5.0
116
+ pycparser==2.20
117
+ pycups==1.9.73
118
+ pyflakes==2.1.1
119
+ Pygments==2.9.0
120
+ PyGObject==3.36.0
121
+ PyHamcrest==1.9.0
122
+ PyJWT==1.7.1
123
+ pymacaroons==0.13.0
124
+ PyMySQL==1.0.2
125
+ PyNaCl==1.3.0
126
+ pyOpenSSL==19.0.0
127
+ pyparsing==2.4.7
128
+ pyRFC3339==1.1
129
+ pyrsistent==0.15.5
130
+ pyserial==3.4
131
+ pytest==5.4.1
132
+ python-apt==2.0.0+ubuntu0.20.4.5
133
+ python-dateutil==2.8.2
134
+ python-debian===0.1.36ubuntu1
135
+ pytube==10.9.3
136
+ pytz==2019.3
137
+ PyYAML==5.3.1
138
+ pyzmq==22.1.0
139
+ qtconsole==5.1.1
140
+ QtPy==1.9.0
141
+ regex==2021.7.6
142
+ requests==2.22.0
143
+ requests-unixsocket==0.2.0
144
+ ruamel.yaml==0.17.10
145
+ ruamel.yaml.clib==0.2.6
146
+ scipy==1.7.1
147
+ screen-resolution-extra==0.0.0
148
+ SecretStorage==2.3.1
149
+ Send2Trash==1.7.1
150
+ sentencepiece==0.1.96
151
+ service-identity==18.1.0
152
+ simplejson==3.16.0
153
+ six==1.14.0
154
+ sos==4.1
155
+ -e git+https://github.com/speechbrain/speechbrain.git@1d194bfc51ae20b9e38596d220cdf0f4977e69de#egg=speechbrain
156
+ ssh-import-id==5.10
157
+ supervisor==4.1.0
158
+ systemd-python==234
159
+ terminado==0.10.1
160
+ testpath==0.5.0
161
+ toml==0.10.2
162
+ torch==1.8.1
163
+ torchaudio==0.8.1
164
+ torchvision==0.10.0
165
+ tornado==6.1
166
+ tqdm==4.62.2
167
+ traitlets==5.0.5
168
+ Twisted==18.9.0
169
+ typed-ast==1.4.3
170
+ typing-extensions==3.10.0.0
171
+ ubuntu-advantage-tools==27.2
172
+ ufw==0.36
173
+ unattended-upgrades==0.1
174
+ urllib3==1.25.8
175
+ virtualenv==20.6.0
176
+ wadllib==1.3.3
177
+ wcwidth==0.2.5
178
+ webencodings==0.5.1
179
+ widgetsnbextension==3.5.1
180
+ xkit==0.0.0
181
+ xxhash==2.0.2
182
+ yamllint==1.23.0
183
+ zipp==1.0.0
184
+ zope.interface==4.7.1
185
+
186
+
187
+ 2021-10-05 19:59:24,907 - speechbrain.utils.superpowers - DEBUG - e3e51338
188
+
189
+
190
+ 2021-10-05 19:59:24,936 - speechbrain.utils.parameter_transfer - DEBUG - Collecting files (or symlinks) for pretraining in results/TransformerLM_seg_char/tokenizer.
191
+ 2021-10-05 19:59:24,937 - speechbrain.pretrained.fetching - INFO - Fetch 5000_char.model: Linking to local file in /mnt/md0/user_wayne/speechbrain/recipes/MATBN/results/tokenizer_seg_bpe5k_char/5000_char.model.
192
+ 2021-10-05 19:59:24,937 - speechbrain.utils.parameter_transfer - INFO - Loading pretrained files for: tokenizer
193
+ 2021-10-05 19:59:25,190 - speechbrain.core - INFO - Info: ckpt_interval_minutes arg from hparam file is used
194
+ 2021-10-05 19:59:27,052 - speechbrain.core - INFO - 24.7M trainable parameters in LM
195
+ 2021-10-05 19:59:27,053 - speechbrain.utils.checkpoints - INFO - Would load a checkpoint here, but none found yet.
196
+ 2021-10-05 19:59:27,053 - speechbrain.utils.epoch_loop - INFO - Going into epoch 1
197
+ 2021-10-05 20:03:07,476 - speechbrain.utils.train_logger - INFO - epoch: 1, lr: 4.99e-04 - train loss: 4.23 - valid loss: 3.19
198
+ 2021-10-05 20:03:07,790 - speechbrain.utils.checkpoints - INFO - Saved an end-of-epoch checkpoint in results/TransformerLM_seg_char/save/CKPT+2021-10-05+20-03-07+00
199
+ 2021-10-05 20:03:07,792 - speechbrain.utils.epoch_loop - INFO - Going into epoch 2
200
+ 2021-10-05 20:06:50,852 - speechbrain.utils.train_logger - INFO - epoch: 2, lr: 1.00e-03 - train loss: 2.95 - valid loss: 2.77
201
+ 2021-10-05 20:06:51,157 - speechbrain.utils.checkpoints - INFO - Saved an end-of-epoch checkpoint in results/TransformerLM_seg_char/save/CKPT+2021-10-05+20-06-50+00
202
+ 2021-10-05 20:06:51,186 - speechbrain.utils.checkpoints - INFO - Deleted checkpoint in results/TransformerLM_seg_char/save/CKPT+2021-10-05+20-03-07+00
203
+ 2021-10-05 20:06:51,186 - speechbrain.utils.epoch_loop - INFO - Going into epoch 3
204
+ 2021-10-05 20:10:34,626 - speechbrain.utils.train_logger - INFO - epoch: 3, lr: 1.23e-03 - train loss: 2.73 - valid loss: 2.64
205
+ 2021-10-05 20:10:34,936 - speechbrain.utils.checkpoints - INFO - Saved an end-of-epoch checkpoint in results/TransformerLM_seg_char/save/CKPT+2021-10-05+20-10-34+00
206
+ 2021-10-05 20:10:34,967 - speechbrain.utils.checkpoints - INFO - Deleted checkpoint in results/TransformerLM_seg_char/save/CKPT+2021-10-05+20-06-50+00
207
+ 2021-10-05 20:10:34,967 - speechbrain.utils.epoch_loop - INFO - Going into epoch 4
208
+ 2021-10-05 20:14:18,480 - speechbrain.utils.train_logger - INFO - epoch: 4, lr: 1.07e-03 - train loss: 2.60 - valid loss: 2.54
209
+ 2021-10-05 20:14:18,787 - speechbrain.utils.checkpoints - INFO - Saved an end-of-epoch checkpoint in results/TransformerLM_seg_char/save/CKPT+2021-10-05+20-14-18+00
210
+ 2021-10-05 20:14:18,820 - speechbrain.utils.checkpoints - INFO - Deleted checkpoint in results/TransformerLM_seg_char/save/CKPT+2021-10-05+20-10-34+00
211
+ 2021-10-05 20:14:18,821 - speechbrain.utils.epoch_loop - INFO - Going into epoch 5
212
+ 2021-10-05 20:18:02,905 - speechbrain.utils.train_logger - INFO - epoch: 5, lr: 9.56e-04 - train loss: 2.49 - valid loss: 2.48
213
+ 2021-10-05 20:18:03,213 - speechbrain.utils.checkpoints - INFO - Saved an end-of-epoch checkpoint in results/TransformerLM_seg_char/save/CKPT+2021-10-05+20-18-02+00
214
+ 2021-10-05 20:18:03,247 - speechbrain.utils.checkpoints - INFO - Deleted checkpoint in results/TransformerLM_seg_char/save/CKPT+2021-10-05+20-14-18+00
215
+ 2021-10-05 20:18:03,247 - speechbrain.utils.epoch_loop - INFO - Going into epoch 6
216
+ 2021-10-05 20:21:47,703 - speechbrain.utils.train_logger - INFO - epoch: 6, lr: 8.73e-04 - train loss: 2.41 - valid loss: 2.42
217
+ 2021-10-05 20:21:48,011 - speechbrain.utils.checkpoints - INFO - Saved an end-of-epoch checkpoint in results/TransformerLM_seg_char/save/CKPT+2021-10-05+20-21-47+00
218
+ 2021-10-05 20:21:48,048 - speechbrain.utils.checkpoints - INFO - Deleted checkpoint in results/TransformerLM_seg_char/save/CKPT+2021-10-05+20-18-02+00
219
+ 2021-10-05 20:21:48,048 - speechbrain.utils.epoch_loop - INFO - Going into epoch 7
220
+ 2021-10-05 20:25:32,303 - speechbrain.utils.train_logger - INFO - epoch: 7, lr: 8.08e-04 - train loss: 2.35 - valid loss: 2.38
221
+ 2021-10-05 20:25:32,612 - speechbrain.utils.checkpoints - INFO - Saved an end-of-epoch checkpoint in results/TransformerLM_seg_char/save/CKPT+2021-10-05+20-25-32+00
222
+ 2021-10-05 20:25:32,650 - speechbrain.utils.checkpoints - INFO - Deleted checkpoint in results/TransformerLM_seg_char/save/CKPT+2021-10-05+20-21-47+00
223
+ 2021-10-05 20:25:32,651 - speechbrain.utils.epoch_loop - INFO - Going into epoch 8
224
+ 2021-10-05 20:29:17,400 - speechbrain.utils.train_logger - INFO - epoch: 8, lr: 7.56e-04 - train loss: 2.29 - valid loss: 2.35
225
+ 2021-10-05 20:29:17,706 - speechbrain.utils.checkpoints - INFO - Saved an end-of-epoch checkpoint in results/TransformerLM_seg_char/save/CKPT+2021-10-05+20-29-17+00
226
+ 2021-10-05 20:29:17,745 - speechbrain.utils.checkpoints - INFO - Deleted checkpoint in results/TransformerLM_seg_char/save/CKPT+2021-10-05+20-25-32+00
227
+ 2021-10-05 20:29:17,746 - speechbrain.utils.epoch_loop - INFO - Going into epoch 9
228
+ 2021-10-05 20:33:02,633 - speechbrain.utils.train_logger - INFO - epoch: 9, lr: 7.13e-04 - train loss: 2.24 - valid loss: 2.33
229
+ 2021-10-05 20:33:02,943 - speechbrain.utils.checkpoints - INFO - Saved an end-of-epoch checkpoint in results/TransformerLM_seg_char/save/CKPT+2021-10-05+20-33-02+00
230
+ 2021-10-05 20:33:02,985 - speechbrain.utils.checkpoints - INFO - Deleted checkpoint in results/TransformerLM_seg_char/save/CKPT+2021-10-05+20-29-17+00
231
+ 2021-10-05 20:33:02,985 - speechbrain.utils.epoch_loop - INFO - Going into epoch 10
232
+ 2021-10-05 20:36:47,808 - speechbrain.utils.train_logger - INFO - epoch: 10, lr: 6.76e-04 - train loss: 2.19 - valid loss: 2.32
233
+ 2021-10-05 20:36:48,118 - speechbrain.utils.checkpoints - INFO - Saved an end-of-epoch checkpoint in results/TransformerLM_seg_char/save/CKPT+2021-10-05+20-36-47+00
234
+ 2021-10-05 20:36:48,161 - speechbrain.utils.checkpoints - INFO - Deleted checkpoint in results/TransformerLM_seg_char/save/CKPT+2021-10-05+20-33-02+00
235
+ 2021-10-05 20:36:48,161 - speechbrain.utils.epoch_loop - INFO - Going into epoch 11
236
+ 2021-10-05 20:40:33,763 - speechbrain.utils.train_logger - INFO - epoch: 11, lr: 6.45e-04 - train loss: 2.15 - valid loss: 2.30
237
+ 2021-10-05 20:40:34,072 - speechbrain.utils.checkpoints - INFO - Saved an end-of-epoch checkpoint in results/TransformerLM_seg_char/save/CKPT+2021-10-05+20-40-33+00
238
+ 2021-10-05 20:40:34,117 - speechbrain.utils.checkpoints - INFO - Deleted checkpoint in results/TransformerLM_seg_char/save/CKPT+2021-10-05+20-36-47+00
239
+ 2021-10-05 20:40:34,117 - speechbrain.utils.epoch_loop - INFO - Going into epoch 12
240
+ 2021-10-05 20:44:19,873 - speechbrain.utils.train_logger - INFO - epoch: 12, lr: 6.17e-04 - train loss: 2.11 - valid loss: 2.30
241
+ 2021-10-05 20:44:20,181 - speechbrain.utils.checkpoints - INFO - Saved an end-of-epoch checkpoint in results/TransformerLM_seg_char/save/CKPT+2021-10-05+20-44-19+00
242
+ 2021-10-05 20:44:20,228 - speechbrain.utils.checkpoints - INFO - Deleted checkpoint in results/TransformerLM_seg_char/save/CKPT+2021-10-05+20-40-33+00
243
+ 2021-10-05 20:44:20,228 - speechbrain.utils.epoch_loop - INFO - Going into epoch 13
244
+ 2021-10-05 20:48:05,429 - speechbrain.utils.train_logger - INFO - epoch: 13, lr: 5.93e-04 - train loss: 2.08 - valid loss: 2.29
245
+ 2021-10-05 20:48:05,739 - speechbrain.utils.checkpoints - INFO - Saved an end-of-epoch checkpoint in results/TransformerLM_seg_char/save/CKPT+2021-10-05+20-48-05+00
246
+ 2021-10-05 20:48:05,788 - speechbrain.utils.checkpoints - INFO - Deleted checkpoint in results/TransformerLM_seg_char/save/CKPT+2021-10-05+20-44-19+00
247
+ 2021-10-05 20:48:05,789 - speechbrain.utils.epoch_loop - INFO - Going into epoch 14
248
+ 2021-10-05 20:51:51,235 - speechbrain.utils.train_logger - INFO - epoch: 14, lr: 5.71e-04 - train loss: 2.04 - valid loss: 2.29
249
+ 2021-10-05 20:51:51,545 - speechbrain.utils.checkpoints - INFO - Saved an end-of-epoch checkpoint in results/TransformerLM_seg_char/save/CKPT+2021-10-05+20-51-51+00
250
+ 2021-10-05 20:51:51,596 - speechbrain.utils.checkpoints - INFO - Deleted checkpoint in results/TransformerLM_seg_char/save/CKPT+2021-10-05+20-48-05+00
251
+ 2021-10-05 20:51:51,596 - speechbrain.utils.epoch_loop - INFO - Going into epoch 15
252
+ 2021-10-05 20:55:37,264 - speechbrain.utils.train_logger - INFO - epoch: 15, lr: 5.52e-04 - train loss: 2.01 - valid loss: 2.28
253
+ 2021-10-05 20:55:37,574 - speechbrain.utils.checkpoints - INFO - Saved an end-of-epoch checkpoint in results/TransformerLM_seg_char/save/CKPT+2021-10-05+20-55-37+00
254
+ 2021-10-05 20:55:37,626 - speechbrain.utils.checkpoints - INFO - Deleted checkpoint in results/TransformerLM_seg_char/save/CKPT+2021-10-05+20-51-51+00
255
+ 2021-10-05 20:55:37,627 - speechbrain.utils.epoch_loop - INFO - Going into epoch 16
256
+ 2021-10-05 20:59:22,956 - speechbrain.utils.train_logger - INFO - epoch: 16, lr: 5.34e-04 - train loss: 1.98 - valid loss: 2.28
257
+ 2021-10-05 20:59:23,263 - speechbrain.utils.checkpoints - INFO - Saved an end-of-epoch checkpoint in results/TransformerLM_seg_char/save/CKPT+2021-10-05+20-59-22+00
258
+ 2021-10-05 20:59:23,293 - speechbrain.utils.epoch_loop - INFO - Going into epoch 17
259
+ 2021-10-05 21:03:08,924 - speechbrain.utils.train_logger - INFO - epoch: 17, lr: 5.18e-04 - train loss: 1.95 - valid loss: 2.29
260
+ 2021-10-05 21:03:09,230 - speechbrain.utils.checkpoints - INFO - Saved an end-of-epoch checkpoint in results/TransformerLM_seg_char/save/CKPT+2021-10-05+21-03-08+00
261
+ 2021-10-05 21:03:09,301 - speechbrain.utils.checkpoints - INFO - Deleted checkpoint in results/TransformerLM_seg_char/save/CKPT+2021-10-05+20-59-22+00
262
+ 2021-10-05 21:03:09,301 - speechbrain.utils.epoch_loop - INFO - Going into epoch 18
263
+ 2021-10-05 21:06:55,028 - speechbrain.utils.train_logger - INFO - epoch: 18, lr: 5.04e-04 - train loss: 1.92 - valid loss: 2.29
264
+ 2021-10-05 21:06:55,335 - speechbrain.utils.checkpoints - INFO - Saved an end-of-epoch checkpoint in results/TransformerLM_seg_char/save/CKPT+2021-10-05+21-06-55+00
265
+ 2021-10-05 21:06:55,408 - speechbrain.utils.checkpoints - INFO - Deleted checkpoint in results/TransformerLM_seg_char/save/CKPT+2021-10-05+21-03-08+00
266
+ 2021-10-05 21:06:55,409 - speechbrain.utils.epoch_loop - INFO - Going into epoch 19
267
+ 2021-10-05 21:10:40,930 - speechbrain.utils.train_logger - INFO - epoch: 19, lr: 4.90e-04 - train loss: 1.90 - valid loss: 2.30
268
+ 2021-10-05 21:10:41,237 - speechbrain.utils.checkpoints - INFO - Saved an end-of-epoch checkpoint in results/TransformerLM_seg_char/save/CKPT+2021-10-05+21-10-40+00
269
+ 2021-10-05 21:10:41,312 - speechbrain.utils.checkpoints - INFO - Deleted checkpoint in results/TransformerLM_seg_char/save/CKPT+2021-10-05+21-06-55+00
270
+ 2021-10-05 21:10:41,312 - speechbrain.utils.epoch_loop - INFO - Going into epoch 20
271
+ 2021-10-05 21:14:27,278 - speechbrain.utils.train_logger - INFO - epoch: 20, lr: 4.78e-04 - train loss: 1.87 - valid loss: 2.30
272
+ 2021-10-05 21:14:27,586 - speechbrain.utils.checkpoints - INFO - Saved an end-of-epoch checkpoint in results/TransformerLM_seg_char/save/CKPT+2021-10-05+21-14-27+00
273
+ 2021-10-05 21:14:27,663 - speechbrain.utils.checkpoints - INFO - Deleted checkpoint in results/TransformerLM_seg_char/save/CKPT+2021-10-05+21-10-40+00
274
+ 2021-10-05 21:14:27,664 - speechbrain.utils.checkpoints - INFO - Loading a checkpoint from results/TransformerLM_seg_char/save/CKPT+2021-10-05+20-55-37+00
275
+ 2021-10-05 21:14:27,706 - root - DEBUG - SaveableDataLoader was requested to load a checkpoint, but the DataLoader has already been iterated. The DataLoader file will be ignored. This is normal in evaluation, when a checkpoint is loaded just to retrieve the best model.
276
+ 2021-10-05 21:14:31,760 - speechbrain.utils.train_logger - INFO - Epoch loaded: 15 - test loss: 2.68
ASR-model/TransformerLM_seg_char/save/CKPT+2021-10-05+20-55-37+00/CKPT.yaml ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ # yamllint disable
2
+ end-of-epoch: true
3
+ loss: 2.282208064707314
4
+ unixtime: 1633438537.264693
ASR-model/TransformerLM_seg_char/save/CKPT+2021-10-05+20-55-37+00/brain.ckpt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d9e24193f36931b7f57932532efbdcf64971f42732383ba6808825f77db258f6
3
+ size 28
ASR-model/TransformerLM_seg_char/save/CKPT+2021-10-05+20-55-37+00/counter.ckpt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e629fa6598d732768f7c726b4b621285f9c3b85303900aa912017db7617d8bdb
3
+ size 2
ASR-model/TransformerLM_seg_char/save/CKPT+2021-10-05+20-55-37+00/dataloader-TRAIN.ckpt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c78961d3d782d8a85d9344eedae027f43ce6b9fd35c8f355861a39e0d0ddecc5
3
+ size 3
ASR-model/TransformerLM_seg_char/save/CKPT+2021-10-05+20-55-37+00/model.ckpt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7a86f5034ce4797ae0fd5c91b5aa1d9cf370c4b00e12cbb30ec43f484b9aed89
3
+ size 104726057
ASR-model/TransformerLM_seg_char/save/CKPT+2021-10-05+20-55-37+00/optimizer.ckpt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1cfecb22673760d25f6bfc67321a0ff1e9f46cbfdab6e6671fea25a6397e7d3e
3
+ size 197914775
ASR-model/TransformerLM_seg_char/save/CKPT+2021-10-05+20-55-37+00/scheduler.ckpt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:31a2c1b304e48d83777679d5d8dae37370952827c011eceb6b0e55ed8f848574
3
+ size 431
ASR-model/TransformerLM_seg_char/save/CKPT+2021-10-05+21-14-27+00/CKPT.yaml ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ # yamllint disable
2
+ end-of-epoch: true
3
+ loss: 2.3031107332648304
4
+ unixtime: 1633439667.279289
ASR-model/TransformerLM_seg_char/save/CKPT+2021-10-05+21-14-27+00/brain.ckpt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d9e24193f36931b7f57932532efbdcf64971f42732383ba6808825f77db258f6
3
+ size 28
ASR-model/TransformerLM_seg_char/save/CKPT+2021-10-05+21-14-27+00/counter.ckpt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f5ca38f748a1d6eaf726b8a42fb575c3c71f1864a8143301782de13da2d9202b
3
+ size 2
ASR-model/TransformerLM_seg_char/save/CKPT+2021-10-05+21-14-27+00/dataloader-TRAIN.ckpt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c78961d3d782d8a85d9344eedae027f43ce6b9fd35c8f355861a39e0d0ddecc5
3
+ size 3
ASR-model/TransformerLM_seg_char/save/CKPT+2021-10-05+21-14-27+00/model.ckpt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5610f4428ab9e40a6a69f2a009d6fbd1b2ac25470b6a47f139172ca264c6ea1a
3
+ size 104726057
ASR-model/TransformerLM_seg_char/save/CKPT+2021-10-05+21-14-27+00/optimizer.ckpt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1e61f107260662bbb7b6b74433922eb0ad516163c7a478ebe6466037eed8c489
3
+ size 197914775
ASR-model/TransformerLM_seg_char/save/CKPT+2021-10-05+21-14-27+00/scheduler.ckpt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:60e6d1257c2e1fc1ea9c16b54b3f5d6690602634979718bde831b1084b107c84
3
+ size 431
ASR-model/TransformerLM_seg_char/train.py ADDED
@@ -0,0 +1,150 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import sys
2
+
3
+ import torch
4
+ import speechbrain as sb
5
+ from speechbrain.dataio import dataset
6
+ from speechbrain.utils.distributed import run_on_main
7
+ from hyperpyyaml import load_hyperpyyaml
8
+
9
+
10
+ class LM(sb.core.Brain):
11
+ def compute_forward(self, batch, stage):
12
+ batch = batch.to(self.device)
13
+ tokens_bos, _ = batch.tokens_bos
14
+ logits = self.hparams.model(tokens_bos)
15
+ pred = self.hparams.log_softmax(logits)
16
+ return pred
17
+
18
+ def compute_objectives(self, predictions, batch, stage):
19
+ batch = batch.to(self.device)
20
+ tokens_eos, tokens_len = batch.tokens_eos
21
+ loss = self.hparams.compute_cost(
22
+ predictions, tokens_eos, length=tokens_len
23
+ )
24
+ return loss
25
+
26
+ def fit_batch(self, batch):
27
+ predictions = self.compute_forward(batch, sb.Stage.TRAIN)
28
+ loss = self.compute_objectives(predictions, batch, sb.Stage.TRAIN)
29
+
30
+ (loss / self.hparams.accumulation_steps).backward()
31
+
32
+ if self.step % self.hparams.accumulation_steps == 0:
33
+ self.check_gradients(loss)
34
+
35
+ self.optimizer.step()
36
+ self.optimizer.zero_grad()
37
+
38
+ if isinstance(
39
+ self.hparams.lr_annealing, sb.nnet.schedulers.NoamScheduler
40
+ ) or isinstance(
41
+ self.hparams.lr_annealing,
42
+ sb.nnet.schedulers.CyclicCosineScheduler,
43
+ ):
44
+ self.hparams.lr_annealing(self.optimizer)
45
+
46
+ return loss
47
+
48
+ def on_stage_end(self, stage, stage_loss, epoch):
49
+ stage_stats = {"loss": stage_loss}
50
+ if stage == sb.Stage.TRAIN:
51
+ self.train_stats = stage_stats
52
+
53
+ if stage == sb.Stage.VALID and sb.utils.distributed.if_main_process():
54
+ if not (
55
+ isinstance(
56
+ self.hparams.lr_annealing, sb.nnet.schedulers.NoamScheduler
57
+ )
58
+ or isinstance(
59
+ self.hparams.lr_annealing,
60
+ sb.nnet.schedulers.CyclicCosineScheduler,
61
+ )
62
+ ):
63
+ old_lr, new_lr = self.hparams.lr_annealing(stage_loss)
64
+ sb.nnet.schedulers.update_learning_rate(self.optimizer, new_lr)
65
+ else:
66
+ old_lr = self.hparams.lr_annealing.current_lr
67
+
68
+ self.hparams.train_logger.log_stats(
69
+ stats_meta={"epoch": epoch, "lr": old_lr},
70
+ train_stats=self.train_stats,
71
+ valid_stats=stage_stats,
72
+ )
73
+ self.checkpointer.save_and_keep_only(
74
+ meta=stage_stats, min_keys=["loss"],
75
+ )
76
+
77
+ if stage == sb.Stage.TEST and sb.utils.distributed.if_main_process():
78
+ self.hparams.train_logger.log_stats(
79
+ stats_meta={"Epoch loaded": self.hparams.epoch_counter.current},
80
+ test_stats=stage_stats,
81
+ )
82
+
83
+
84
+ def dataio_prepare(hparams):
85
+ @sb.utils.data_pipeline.takes("transcription")
86
+ @sb.utils.data_pipeline.provides(
87
+ "transcription", "tokens_bos", "tokens_eos"
88
+ )
89
+ def transcription_pipline(transcription):
90
+ yield transcription
91
+ tokens_list = hparams["tokenizer"].encode_as_ids(transcription)
92
+ tokens_bos = torch.LongTensor([hparams["bos_index"]] + (tokens_list))
93
+ yield tokens_bos
94
+ tokens_eos = torch.LongTensor(tokens_list + [hparams["eos_index"]])
95
+ yield tokens_eos
96
+
97
+ data_folder = hparams["data_folder"]
98
+ datasets = {}
99
+ for dataset_name in ["train", "dev", "test"]:
100
+ json_path = f"{data_folder}/{dataset_name}.json"
101
+ datasets[dataset_name] = dataset.DynamicItemDataset.from_json(
102
+ json_path=json_path,
103
+ replacements={"data_root": data_folder},
104
+ dynamic_items=[transcription_pipline],
105
+ output_keys=["transcription", "tokens_bos", "tokens_eos"],
106
+ )
107
+
108
+ return datasets
109
+
110
+
111
+ if __name__ == "__main__":
112
+ hparams_file_path, run_opts, overrides = sb.parse_arguments(sys.argv[1:])
113
+ with open(hparams_file_path) as hparams_file:
114
+ hparams = load_hyperpyyaml(hparams_file, overrides)
115
+
116
+ sb.utils.distributed.ddp_init_group(run_opts)
117
+
118
+ sb.create_experiment_directory(
119
+ experiment_directory=hparams["output_folder"],
120
+ hyperparams_to_save=hparams_file_path,
121
+ overrides=overrides,
122
+ )
123
+
124
+ run_on_main(hparams["pretrainer"].collect_files)
125
+ hparams["pretrainer"].load_collected(device=run_opts["device"])
126
+
127
+ datasets = dataio_prepare(hparams)
128
+
129
+ lm_brain = LM(
130
+ modules=hparams["modules"],
131
+ opt_class=hparams["optimizer"],
132
+ hparams=hparams,
133
+ run_opts=run_opts,
134
+ checkpointer=hparams["checkpointer"],
135
+ )
136
+
137
+ lm_brain.fit(
138
+ lm_brain.hparams.epoch_counter,
139
+ datasets["train"],
140
+ datasets["dev"],
141
+ train_loader_kwargs=hparams["train_dataloader_opts"],
142
+ valid_loader_kwargs=hparams["valid_dataloader_opts"],
143
+ )
144
+
145
+ # evaluation
146
+ lm_brain.evaluate(
147
+ datasets["test"],
148
+ min_key="loss",
149
+ test_loader_kwargs=hparams["test_dataloader_opts"],
150
+ )
ASR-model/TransformerLM_seg_char/train_log.txt ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ epoch: 1, lr: 4.99e-04 - train loss: 4.23 - valid loss: 3.19
2
+ epoch: 2, lr: 1.00e-03 - train loss: 2.95 - valid loss: 2.77
3
+ epoch: 3, lr: 1.23e-03 - train loss: 2.73 - valid loss: 2.64
4
+ epoch: 4, lr: 1.07e-03 - train loss: 2.60 - valid loss: 2.54
5
+ epoch: 5, lr: 9.56e-04 - train loss: 2.49 - valid loss: 2.48
6
+ epoch: 6, lr: 8.73e-04 - train loss: 2.41 - valid loss: 2.42
7
+ epoch: 7, lr: 8.08e-04 - train loss: 2.35 - valid loss: 2.38
8
+ epoch: 8, lr: 7.56e-04 - train loss: 2.29 - valid loss: 2.35
9
+ epoch: 9, lr: 7.13e-04 - train loss: 2.24 - valid loss: 2.33
10
+ epoch: 10, lr: 6.76e-04 - train loss: 2.19 - valid loss: 2.32
11
+ epoch: 11, lr: 6.45e-04 - train loss: 2.15 - valid loss: 2.30
12
+ epoch: 12, lr: 6.17e-04 - train loss: 2.11 - valid loss: 2.30
13
+ epoch: 13, lr: 5.93e-04 - train loss: 2.08 - valid loss: 2.29
14
+ epoch: 14, lr: 5.71e-04 - train loss: 2.04 - valid loss: 2.29
15
+ epoch: 15, lr: 5.52e-04 - train loss: 2.01 - valid loss: 2.28
16
+ epoch: 16, lr: 5.34e-04 - train loss: 1.98 - valid loss: 2.28
17
+ epoch: 17, lr: 5.18e-04 - train loss: 1.95 - valid loss: 2.29
18
+ epoch: 18, lr: 5.04e-04 - train loss: 1.92 - valid loss: 2.29
19
+ epoch: 19, lr: 4.90e-04 - train loss: 1.90 - valid loss: 2.30
20
+ epoch: 20, lr: 4.78e-04 - train loss: 1.87 - valid loss: 2.30
21
+ Epoch loaded: 15 - test loss: 2.68
ASR-model/asr_transformer_seg_char_ctc0.3/cer.txt ADDED
The diff for this file is too large to render. See raw diff
 
ASR-model/asr_transformer_seg_char_ctc0.3/env.log ADDED
@@ -0,0 +1,195 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ SpeechBrain system description
2
+ ==============================
3
+ Python version:
4
+ 3.8.10 (default, Sep 28 2021, 16:10:42)
5
+ [GCC 9.3.0]
6
+ ==============================
7
+ Installed Python packages:
8
+ appdirs==1.4.4
9
+ argon2-cffi==20.1.0
10
+ async-generator==1.10
11
+ attrs==19.3.0
12
+ Automat==0.8.0
13
+ autopep8==1.5.7
14
+ backcall==0.2.0
15
+ backports.entry-points-selectable==1.1.0
16
+ black==19.10b0
17
+ bleach==3.3.1
18
+ blessings==1.7
19
+ blinker==1.4
20
+ bottle==0.12.19
21
+ certifi==2019.11.28
22
+ cffi==1.14.6
23
+ cfgv==3.3.0
24
+ chardet==3.0.4
25
+ Click==7.0
26
+ cloud-init==21.2
27
+ colorama==0.4.3
28
+ command-not-found==0.3
29
+ configobj==5.0.6
30
+ constantly==15.1.0
31
+ cryptography==2.8
32
+ cupshelpers==1.0
33
+ cycler==0.10.0
34
+ d2l==0.16.6
35
+ datasets==1.11.0
36
+ dbus-python==1.2.16
37
+ debugpy==1.3.0
38
+ decorator==5.0.9
39
+ defer==1.0.6
40
+ defusedxml==0.7.1
41
+ dill==0.3.4
42
+ distlib==0.3.2
43
+ distro==1.4.0
44
+ distro-info===0.23ubuntu1
45
+ entrypoints==0.3
46
+ filelock==3.0.12
47
+ flake8==3.7.9
48
+ fsspec==2021.7.0
49
+ gpustat==0.6.0
50
+ gpuview==0.4.0
51
+ httplib2==0.14.0
52
+ huggingface-hub==0.0.16
53
+ hyperlink==19.0.0
54
+ HyperPyYAML==1.0.0
55
+ identify==2.2.11
56
+ idna==2.8
57
+ importlib-metadata==1.5.0
58
+ incremental==16.10.1
59
+ ipykernel==6.0.2
60
+ ipython==7.25.0
61
+ ipython-genutils==0.2.0
62
+ ipywidgets==7.6.3
63
+ jedi==0.18.0
64
+ Jinja2==2.10.1
65
+ joblib==1.0.1
66
+ jsonpatch==1.22
67
+ jsonpointer==2.0
68
+ jsonschema==3.2.0
69
+ jupyter==1.0.0
70
+ jupyter-client==6.1.12
71
+ jupyter-console==6.4.0
72
+ jupyter-core==4.7.1
73
+ jupyterlab-pygments==0.1.2
74
+ jupyterlab-widgets==1.0.0
75
+ keyring==18.0.1
76
+ kiwisolver==1.3.1
77
+ language-selector==0.1
78
+ launchpadlib==1.10.13
79
+ lazr.restfulclient==0.14.2
80
+ lazr.uri==1.0.3
81
+ macaroonbakery==1.3.1
82
+ MarkupSafe==1.1.0
83
+ matplotlib==3.4.2
84
+ matplotlib-inline==0.1.2
85
+ mccabe==0.6.1
86
+ mistune==0.8.4
87
+ more-itertools==4.2.0
88
+ multiprocess==0.70.12.2
89
+ nbclient==0.5.3
90
+ nbconvert==6.1.0
91
+ nbformat==5.1.3
92
+ nest-asyncio==1.5.1
93
+ netifaces==0.10.4
94
+ nodeenv==1.6.0
95
+ notebook==6.4.0
96
+ numpy==1.21.2
97
+ nvidia-ml-py3==7.352.0
98
+ oauthlib==3.1.0
99
+ packaging==21.0
100
+ pandas==1.3.0
101
+ pandocfilters==1.4.3
102
+ parso==0.8.2
103
+ pathspec==0.9.0
104
+ pexpect==4.6.0
105
+ pickleshare==0.7.5
106
+ Pillow==8.3.1
107
+ platformdirs==2.0.2
108
+ pluggy==0.13.1
109
+ pre-commit==2.15.0
110
+ prometheus-client==0.11.0
111
+ prompt-toolkit==3.0.19
112
+ protobuf==3.6.1
113
+ psutil==5.8.0
114
+ ptyprocess==0.7.0
115
+ py==1.10.0
116
+ pyarrow==5.0.0
117
+ pyasn1==0.4.2
118
+ pyasn1-modules==0.2.1
119
+ pycairo==1.16.2
120
+ pycodestyle==2.5.0
121
+ pycparser==2.20
122
+ pycups==1.9.73
123
+ pyflakes==2.1.1
124
+ Pygments==2.9.0
125
+ PyGObject==3.36.0
126
+ PyHamcrest==1.9.0
127
+ PyJWT==1.7.1
128
+ pymacaroons==0.13.0
129
+ PyMySQL==1.0.2
130
+ PyNaCl==1.3.0
131
+ pyOpenSSL==19.0.0
132
+ pyparsing==2.4.7
133
+ pyRFC3339==1.1
134
+ pyrsistent==0.15.5
135
+ pyserial==3.4
136
+ pytest==5.4.1
137
+ python-apt==2.0.0+ubuntu0.20.4.5
138
+ python-dateutil==2.8.2
139
+ python-debian===0.1.36ubuntu1
140
+ pytube==10.9.3
141
+ pytz==2019.3
142
+ PyYAML==5.3.1
143
+ pyzmq==22.1.0
144
+ qtconsole==5.1.1
145
+ QtPy==1.9.0
146
+ regex==2021.7.6
147
+ requests==2.22.0
148
+ requests-unixsocket==0.2.0
149
+ ruamel.yaml==0.17.10
150
+ ruamel.yaml.clib==0.2.6
151
+ scipy==1.7.1
152
+ screen-resolution-extra==0.0.0
153
+ SecretStorage==2.3.1
154
+ Send2Trash==1.7.1
155
+ sentencepiece==0.1.96
156
+ service-identity==18.1.0
157
+ simplejson==3.16.0
158
+ six==1.14.0
159
+ sos==4.1
160
+ -e git+https://github.com/speechbrain/speechbrain.git@1d194bfc51ae20b9e38596d220cdf0f4977e69de#egg=speechbrain
161
+ ssh-import-id==5.10
162
+ supervisor==4.1.0
163
+ systemd-python==234
164
+ terminado==0.10.1
165
+ testpath==0.5.0
166
+ toml==0.10.2
167
+ torch==1.8.1
168
+ torchaudio==0.8.1
169
+ torchvision==0.10.0
170
+ tornado==6.1
171
+ tqdm==4.62.2
172
+ traitlets==5.0.5
173
+ Twisted==18.9.0
174
+ typed-ast==1.4.3
175
+ typing-extensions==3.10.0.0
176
+ ubuntu-advantage-tools==27.2
177
+ ufw==0.36
178
+ unattended-upgrades==0.1
179
+ urllib3==1.25.8
180
+ virtualenv==20.6.0
181
+ wadllib==1.3.3
182
+ wcwidth==0.2.5
183
+ webencodings==0.5.1
184
+ widgetsnbextension==3.5.1
185
+ xkit==0.0.0
186
+ xxhash==2.0.2
187
+ yamllint==1.23.0
188
+ zipp==1.0.0
189
+ zope.interface==4.7.1
190
+ ==============================
191
+ Git revision:
192
+ e3e51338
193
+ ==============================
194
+ Cuda version:
195
+ 10.2
ASR-model/asr_transformer_seg_char_ctc0.3/hyperparams.yaml ADDED
@@ -0,0 +1,241 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Generated 2021-10-11 from:
2
+ # /mnt/md0/user_wayne/speechbrain/recipes/MATBN/ASR/hparams/transformer_seg_LM_char_decode.yaml
3
+ # yamllint disable
4
+ output_folder: results/asr_transformer_seg_char
5
+ cer_file: results/asr_transformer_seg_char/cer.txt
6
+ train_log: results/asr_transformer_seg_char/train_log.txt
7
+ save_folder: results/asr_transformer_seg_char/save
8
+ ckpt_interval_minutes: 15
9
+ num_workers: 4
10
+
11
+ data_folder: results/prepare_seg
12
+ tokenizer_file: results/tokenizer_seg_bpe5k_char/5000_char.model
13
+ lm_file: results/TransformerLM_seg_char/save/CKPT+2021-10-05+20-55-37+00/model.ckpt
14
+
15
+ tokenizer: &id001 !new:sentencepiece.SentencePieceProcessor
16
+
17
+ pretrainer: !new:speechbrain.utils.parameter_transfer.Pretrainer
18
+ collect_in: results/asr_transformer_seg_char/save
19
+ loadables:
20
+ lm: &id003 !new:speechbrain.lobes.models.transformer.TransformerLM.TransformerLM
21
+ # yamllint disable-line rule:line-length
22
+ vocab: 5000
23
+ d_model: 576
24
+ nhead: 6
25
+ num_encoder_layers: 6
26
+ num_decoder_layers: 0
27
+ d_ffn: 1538
28
+ dropout: 0.2
29
+ activation: !name:torch.nn.GELU
30
+ normalize_before: false
31
+
32
+ tokenizer: *id001
33
+ paths:
34
+ lm: results/TransformerLM_seg_char/save/CKPT+2021-10-05+20-55-37+00/model.ckpt
35
+ tokenizer: results/tokenizer_seg_bpe5k_char/5000_char.model
36
+
37
+ train_logger: !new:speechbrain.utils.train_logger.FileTrainLogger
38
+ save_file: results/asr_transformer_seg_char/train_log.txt
39
+
40
+ # Feature parameters
41
+ sample_rate: 16000
42
+ n_fft: 400
43
+ n_mels: 80
44
+ hop_length: 10
45
+
46
+ compute_features: !new:speechbrain.lobes.features.Fbank
47
+ sample_rate: 16000
48
+ n_fft: 400
49
+ n_mels: 80
50
+ hop_length: 10
51
+
52
+ speed_perturb: !new:speechbrain.processing.speech_augmentation.SpeedPerturb
53
+ orig_freq: 16000
54
+ speeds: [90, 100, 110]
55
+
56
+ # Training parameters
57
+ number_of_epochs: 60
58
+ batch_size: 1
59
+ ctc_weight: 0.3
60
+ gradient_accumulation: 32
61
+ gradient_clipping: 5.0
62
+ loss_reduction: batchmean
63
+ sorting: random
64
+
65
+ # stages related parameters
66
+ stage_one_epochs: 40
67
+ lr_adam: 1.0
68
+ lr_sgd: 0.000025
69
+
70
+ # Dataloader options
71
+ train_dataloader_opts:
72
+ batch_size: 1
73
+ num_workers: 4
74
+ shuffle: true
75
+
76
+ valid_dataloader_opts:
77
+ batch_size: 1
78
+ num_workers: 4
79
+
80
+ test_dataloader_opts:
81
+ batch_size: 1
82
+ num_workers: 4
83
+
84
+ ####################### Model parameters ###########################
85
+ # Transformer
86
+ d_model: 256
87
+ nhead: 4
88
+ num_encoder_layers: 12
89
+ num_decoder_layers: 6
90
+ d_ffn: 2048
91
+ transformer_dropout: 0.1
92
+ activation: &id002 !name:torch.nn.GELU
93
+ output_neurons: 5000
94
+ vocab_size: 5000
95
+
96
+ # Outputs
97
+ blank_index: 0
98
+ label_smoothing: 0.1
99
+ pad_index: 0
100
+ bos_index: 1
101
+ eos_index: 2
102
+ unk_index: 0
103
+
104
+ # Decoding parameters
105
+ min_decode_ratio: 0.0
106
+ max_decode_ratio: 1.0 # 1.0
107
+ valid_search_interval: 10
108
+ valid_beam_size: 10
109
+ test_beam_size: 10
110
+ ctc_weight_decode: 0.30
111
+ lm_weight: 0.9
112
+
113
+ ############################## models ################################
114
+
115
+ CNN: &id004 !new:speechbrain.lobes.models.convolution.ConvolutionFrontEnd
116
+ input_shape: (8, 10, 80)
117
+ num_blocks: 2
118
+ num_layers_per_block: 1
119
+ out_channels: (256, 256)
120
+ kernel_sizes: (3, 3)
121
+ strides: (2, 2)
122
+ residuals: (False, False)
123
+
124
+ Transformer: &id005 !new:speechbrain.lobes.models.transformer.TransformerASR.TransformerASR
125
+ # yamllint disable-line rule:line-length
126
+ input_size: 5120
127
+ tgt_vocab: 5000
128
+ d_model: 256
129
+ nhead: 4
130
+ num_encoder_layers: 12
131
+ num_decoder_layers: 6
132
+ d_ffn: 2048
133
+ dropout: 0.1
134
+ activation: *id002
135
+ normalize_before: true
136
+
137
+ lm_model: *id003
138
+ ctc_lin: &id007 !new:speechbrain.nnet.linear.Linear
139
+ input_size: 256
140
+ n_neurons: 5000
141
+
142
+ seq_lin: &id006 !new:speechbrain.nnet.linear.Linear
143
+ input_size: 256
144
+ n_neurons: 5000
145
+
146
+ modules:
147
+ CNN: *id004
148
+ Transformer: *id005
149
+ seq_lin: *id006
150
+ ctc_lin: *id007
151
+ normalize: &id010 !new:speechbrain.processing.features.InputNormalization
152
+
153
+ norm_type: global
154
+ update_until_epoch: 4
155
+
156
+ model: &id008 !new:torch.nn.ModuleList
157
+ - [*id004, *id005, *id006, *id007]
158
+ Adam: !name:torch.optim.Adam
159
+ lr: 0
160
+ betas: (0.9, 0.98)
161
+ eps: 0.000000001
162
+
163
+ SGD: !name:torch.optim.SGD
164
+ lr: 0.000025
165
+ momentum: 0.99
166
+ nesterov: true
167
+
168
+ valid_search: !new:speechbrain.decoders.S2STransformerBeamSearch
169
+ modules: [*id005, *id006, *id007]
170
+ bos_index: 1
171
+ eos_index: 2
172
+ blank_index: 0
173
+ min_decode_ratio: 0.0
174
+ max_decode_ratio: 1.0
175
+ beam_size: 10
176
+ ctc_weight: 0.30
177
+ using_eos_threshold: false
178
+ length_normalization: true
179
+
180
+ test_search: !new:speechbrain.decoders.S2STransformerBeamSearch
181
+ modules: [*id005, *id006, *id007]
182
+ bos_index: 1
183
+ eos_index: 2
184
+ blank_index: 0
185
+ min_decode_ratio: 0.0
186
+ max_decode_ratio: 1.0
187
+ beam_size: 10
188
+ ctc_weight: 0.30
189
+ lm_weight: 0.9
190
+ lm_modules: *id003
191
+ temperature: 1.15
192
+ temperature_lm: 1.15
193
+ using_eos_threshold: false
194
+ length_normalization: true
195
+
196
+ log_softmax: !new:torch.nn.LogSoftmax
197
+ dim: -1
198
+
199
+ ctc_cost: !name:speechbrain.nnet.losses.ctc_loss
200
+ blank_index: 0
201
+ reduction: batchmean
202
+
203
+ seq_cost: !name:speechbrain.nnet.losses.kldiv_loss
204
+ label_smoothing: 0.1
205
+ reduction: batchmean
206
+
207
+ noam_annealing: &id009 !new:speechbrain.nnet.schedulers.NoamScheduler
208
+ lr_initial: 1.0
209
+ n_warmup_steps: 25000
210
+ model_size: 256
211
+
212
+ checkpointer: !new:speechbrain.utils.checkpoints.Checkpointer
213
+ checkpoints_dir: results/asr_transformer_seg_char/save
214
+ recoverables:
215
+ model: *id008
216
+ noam_scheduler: *id009
217
+ normalizer: *id010
218
+ counter: &id011 !new:speechbrain.utils.epoch_loop.EpochCounter
219
+
220
+ limit: 60
221
+
222
+ epoch_counter: *id011
223
+ normalize: *id010
224
+ augmentation: !new:speechbrain.lobes.augment.SpecAugment
225
+ time_warp: true
226
+ time_warp_window: 5
227
+ time_warp_mode: bicubic
228
+ freq_mask: true
229
+ n_freq_mask: 2
230
+ time_mask: true
231
+ n_time_mask: 2
232
+ replace_with_zero: false
233
+ freq_mask_width: 30
234
+ time_mask_width: 40
235
+
236
+ remove_spaces: true
237
+ split_tokens: &id012 !apply:operator.not_ [true]
238
+
239
+ cer_computer: !name:speechbrain.utils.metric_stats.ErrorRateStats
240
+ split_tokens: *id012
241
+ acc_computer: !name:speechbrain.utils.Accuracy.AccuracyStats
ASR-model/asr_transformer_seg_char_ctc0.3/log.txt ADDED
The diff for this file is too large to render. See raw diff
 
ASR-model/asr_transformer_seg_char_ctc0.3/results/asr_transformer_seg_char/env.log ADDED
@@ -0,0 +1,109 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ SpeechBrain system description
2
+ ==============================
3
+ Python version:
4
+ 3.8.16 (default, Jun 12 2023, 18:09:05)
5
+ [GCC 11.2.0]
6
+ ==============================
7
+ Installed Python packages:
8
+ antlr4-python3-runtime==4.8
9
+ anyio==3.7.0
10
+ astroid==2.15.5
11
+ bitarray==2.7.6
12
+ certifi==2023.5.7
13
+ cffi==1.15.1
14
+ cfgv==3.3.1
15
+ charset-normalizer==3.1.0
16
+ click==7.1.2
17
+ cmake==3.26.4
18
+ colorama==0.4.6
19
+ Cython==0.29.36
20
+ dill==0.3.6
21
+ distlib==0.3.6
22
+ exceptiongroup==1.1.2
23
+ fairseq @ git+https://github.com/facebookresearch/fairseq.git@31fba013a070eaff69dec8642e68e7134d60ab0f
24
+ fastapi==0.98.0
25
+ filelock==3.12.2
26
+ flake8==6.0.0
27
+ fsspec==2023.6.0
28
+ h11==0.14.0
29
+ httpcore==0.17.2
30
+ httptools==0.1.2
31
+ httpx==0.24.1
32
+ huggingface-hub==0.15.1
33
+ hydra-core==1.0.7
34
+ HyperPyYAML==1.2.1
35
+ identify==2.5.24
36
+ idna==3.4
37
+ importlib-resources==5.12.0
38
+ iniconfig==2.0.0
39
+ isort==5.12.0
40
+ jieba==0.42.1
41
+ Jinja2==3.1.2
42
+ joblib==1.3.1
43
+ lazy-object-proxy==1.9.0
44
+ lit==16.0.6
45
+ lxml==4.9.2
46
+ MarkupSafe==2.1.3
47
+ mccabe==0.7.0
48
+ mock==5.0.2
49
+ mpmath==1.3.0
50
+ networkx==3.1
51
+ nodeenv==1.8.0
52
+ numpy==1.24.4
53
+ nvidia-cublas-cu11==11.10.3.66
54
+ nvidia-cuda-cupti-cu11==11.7.101
55
+ nvidia-cuda-nvrtc-cu11==11.7.99
56
+ nvidia-cuda-runtime-cu11==11.7.99
57
+ nvidia-cudnn-cu11==8.5.0.96
58
+ nvidia-cufft-cu11==10.9.0.58
59
+ nvidia-curand-cu11==10.2.10.91
60
+ nvidia-cusolver-cu11==11.4.0.1
61
+ nvidia-cusparse-cu11==11.7.4.91
62
+ nvidia-nccl-cu11==2.14.3
63
+ nvidia-nvtx-cu11==11.7.91
64
+ omegaconf==2.0.6
65
+ packaging==23.1
66
+ platformdirs==3.8.0
67
+ pluggy==1.2.0
68
+ portalocker==2.7.0
69
+ pre-commit==3.3.3
70
+ pycodestyle==2.10.0
71
+ pycparser==2.21
72
+ pydantic==1.10.9
73
+ pyflakes==3.0.1
74
+ pylint==2.17.4
75
+ pytest==7.4.0
76
+ python-multipart==0.0.6
77
+ PyYAML==6.0
78
+ regex==2023.6.3
79
+ requests==2.31.0
80
+ ruamel.yaml==0.17.28
81
+ ruamel.yaml.clib==0.2.7
82
+ sacrebleu==2.3.1
83
+ scikit-learn==1.3.0
84
+ scipy==1.10.1
85
+ sentencepiece==0.1.99
86
+ sniffio==1.3.0
87
+ speechbrain==0.5.14
88
+ starlette==0.27.0
89
+ subword-nmt==0.3.8
90
+ sympy==1.12
91
+ tabulate==0.9.0
92
+ threadpoolctl==3.1.0
93
+ tomli==2.0.1
94
+ tomlkit==0.11.8
95
+ torch==2.0.1
96
+ torchaudio==2.0.2
97
+ tqdm==4.65.0
98
+ triton==2.0.0
99
+ typing_extensions==4.7.1
100
+ urllib3==2.0.3
101
+ uvicorn==0.11.3
102
+ uvloop==0.17.0
103
+ virtualenv==20.23.1
104
+ websockets==8.1
105
+ wrapt==1.15.0
106
+ zipp==3.15.0
107
+ ==============================
108
+ Could not get git revision==============================
109
+ CUDA not available
ASR-model/asr_transformer_seg_char_ctc0.3/results/asr_transformer_seg_char/hyperparams.yaml ADDED
@@ -0,0 +1,244 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Generated 2023-07-05 from:
2
+ # /mnt/md0/user_sinica_axel/MATBN/results/asr_transformer_seg_char_ctc0.3/hyperparams.yaml
3
+ # yamllint disable
4
+ # Generated 2021-10-11 from:
5
+ # /mnt/md0/user_wayne/speechbrain/recipes/MATBN/ASR/hparams/transformer_seg_LM_char_decode.yaml
6
+ # yamllint disable
7
+ output_folder: results/asr_transformer_seg_char
8
+ cer_file: results/asr_transformer_seg_char/cer.txt
9
+ train_log: results/asr_transformer_seg_char/train_log.txt
10
+ save_folder: results/asr_transformer_seg_char/save
11
+ ckpt_interval_minutes: 15
12
+ num_workers: 4
13
+
14
+ data_folder: results/prepare_seg
15
+ tokenizer_file: results/tokenizer_seg_bpe5k_char/5000_char.model
16
+ lm_file: results/TransformerLM_seg_char/save/CKPT+2021-10-05+20-55-37+00/model.ckpt
17
+
18
+ tokenizer: &id001 !new:sentencepiece.SentencePieceProcessor
19
+
20
+ pretrainer: !new:speechbrain.utils.parameter_transfer.Pretrainer
21
+ collect_in: results/asr_transformer_seg_char/save
22
+ loadables:
23
+ lm: &id003 !new:speechbrain.lobes.models.transformer.TransformerLM.TransformerLM
24
+ # yamllint disable-line rule:line-length
25
+ vocab: 5000
26
+ d_model: 576
27
+ nhead: 6
28
+ num_encoder_layers: 6
29
+ num_decoder_layers: 0
30
+ d_ffn: 1538
31
+ dropout: 0.2
32
+ activation: !name:torch.nn.GELU
33
+ normalize_before: false
34
+
35
+ tokenizer: *id001
36
+ paths:
37
+ lm: results/TransformerLM_seg_char/save/CKPT+2021-10-05+20-55-37+00/model.ckpt
38
+ tokenizer: results/tokenizer_seg_bpe5k_char/5000_char.model
39
+
40
+ train_logger: !new:speechbrain.utils.train_logger.FileTrainLogger
41
+ save_file: results/asr_transformer_seg_char/train_log.txt
42
+
43
+ # Feature parameters
44
+ sample_rate: 16000
45
+ n_fft: 400
46
+ n_mels: 80
47
+ hop_length: 10
48
+
49
+ compute_features: !new:speechbrain.lobes.features.Fbank
50
+ sample_rate: 16000
51
+ n_fft: 400
52
+ n_mels: 80
53
+ hop_length: 10
54
+
55
+ speed_perturb: !new:speechbrain.processing.speech_augmentation.SpeedPerturb
56
+ orig_freq: 16000
57
+ speeds: [90, 100, 110]
58
+
59
+ # Training parameters
60
+ number_of_epochs: 60
61
+ batch_size: 1
62
+ ctc_weight: 0.3
63
+ gradient_accumulation: 32
64
+ gradient_clipping: 5.0
65
+ loss_reduction: batchmean
66
+ sorting: random
67
+
68
+ # stages related parameters
69
+ stage_one_epochs: 40
70
+ lr_adam: 1.0
71
+ lr_sgd: 0.000025
72
+
73
+ # Dataloader options
74
+ train_dataloader_opts:
75
+ batch_size: 1
76
+ num_workers: 4
77
+ shuffle: true
78
+
79
+ valid_dataloader_opts:
80
+ batch_size: 1
81
+ num_workers: 4
82
+
83
+ test_dataloader_opts:
84
+ batch_size: 1
85
+ num_workers: 4
86
+
87
+ ####################### Model parameters ###########################
88
+ # Transformer
89
+ d_model: 256
90
+ nhead: 4
91
+ num_encoder_layers: 12
92
+ num_decoder_layers: 6
93
+ d_ffn: 2048
94
+ transformer_dropout: 0.1
95
+ activation: &id002 !name:torch.nn.GELU
96
+ output_neurons: 5000
97
+ vocab_size: 5000
98
+
99
+ # Outputs
100
+ blank_index: 0
101
+ label_smoothing: 0.1
102
+ pad_index: 0
103
+ bos_index: 1
104
+ eos_index: 2
105
+ unk_index: 0
106
+
107
+ # Decoding parameters
108
+ min_decode_ratio: 0.0
109
+ max_decode_ratio: 1.0 # 1.0
110
+ valid_search_interval: 10
111
+ valid_beam_size: 10
112
+ test_beam_size: 10
113
+ ctc_weight_decode: 0.30
114
+ lm_weight: 0.9
115
+
116
+ ############################## models ################################
117
+
118
+ CNN: &id004 !new:speechbrain.lobes.models.convolution.ConvolutionFrontEnd
119
+ input_shape: (8, 10, 80)
120
+ num_blocks: 2
121
+ num_layers_per_block: 1
122
+ out_channels: (256, 256)
123
+ kernel_sizes: (3, 3)
124
+ strides: (2, 2)
125
+ residuals: (False, False)
126
+
127
+ Transformer: &id005 !new:speechbrain.lobes.models.transformer.TransformerASR.TransformerASR
128
+ # yamllint disable-line rule:line-length
129
+ input_size: 5120
130
+ tgt_vocab: 5000
131
+ d_model: 256
132
+ nhead: 4
133
+ num_encoder_layers: 12
134
+ num_decoder_layers: 6
135
+ d_ffn: 2048
136
+ dropout: 0.1
137
+ activation: *id002
138
+ normalize_before: true
139
+
140
+ lm_model: *id003
141
+ ctc_lin: &id007 !new:speechbrain.nnet.linear.Linear
142
+ input_size: 256
143
+ n_neurons: 5000
144
+
145
+ seq_lin: &id006 !new:speechbrain.nnet.linear.Linear
146
+ input_size: 256
147
+ n_neurons: 5000
148
+
149
+ modules:
150
+ CNN: *id004
151
+ Transformer: *id005
152
+ seq_lin: *id006
153
+ ctc_lin: *id007
154
+ normalize: &id010 !new:speechbrain.processing.features.InputNormalization
155
+
156
+ norm_type: global
157
+ update_until_epoch: 4
158
+
159
+ model: &id008 !new:torch.nn.ModuleList
160
+ - [*id004, *id005, *id006, *id007]
161
+ Adam: !name:torch.optim.Adam
162
+ lr: 0
163
+ betas: (0.9, 0.98)
164
+ eps: 0.000000001
165
+
166
+ SGD: !name:torch.optim.SGD
167
+ lr: 0.000025
168
+ momentum: 0.99
169
+ nesterov: true
170
+
171
+ valid_search: !new:speechbrain.decoders.S2STransformerBeamSearch
172
+ modules: [*id005, *id006, *id007]
173
+ bos_index: 1
174
+ eos_index: 2
175
+ blank_index: 0
176
+ min_decode_ratio: 0.0
177
+ max_decode_ratio: 1.0
178
+ beam_size: 10
179
+ ctc_weight: 0.30
180
+ using_eos_threshold: false
181
+ length_normalization: true
182
+
183
+ test_search: !new:speechbrain.decoders.S2STransformerBeamSearch
184
+ modules: [*id005, *id006, *id007]
185
+ bos_index: 1
186
+ eos_index: 2
187
+ blank_index: 0
188
+ min_decode_ratio: 0.0
189
+ max_decode_ratio: 1.0
190
+ beam_size: 10
191
+ ctc_weight: 0.30
192
+ lm_weight: 0.9
193
+ lm_modules: *id003
194
+ temperature: 1.15
195
+ temperature_lm: 1.15
196
+ using_eos_threshold: false
197
+ length_normalization: true
198
+
199
+ log_softmax: !new:torch.nn.LogSoftmax
200
+ dim: -1
201
+
202
+ ctc_cost: !name:speechbrain.nnet.losses.ctc_loss
203
+ blank_index: 0
204
+ reduction: batchmean
205
+
206
+ seq_cost: !name:speechbrain.nnet.losses.kldiv_loss
207
+ label_smoothing: 0.1
208
+ reduction: batchmean
209
+
210
+ noam_annealing: &id009 !new:speechbrain.nnet.schedulers.NoamScheduler
211
+ lr_initial: 1.0
212
+ n_warmup_steps: 25000
213
+ model_size: 256
214
+
215
+ checkpointer: !new:speechbrain.utils.checkpoints.Checkpointer
216
+ checkpoints_dir: results/asr_transformer_seg_char/save
217
+ recoverables:
218
+ model: *id008
219
+ noam_scheduler: *id009
220
+ normalizer: *id010
221
+ counter: &id011 !new:speechbrain.utils.epoch_loop.EpochCounter
222
+
223
+ limit: 60
224
+
225
+ epoch_counter: *id011
226
+ normalize: *id010
227
+ augmentation: !new:speechbrain.lobes.augment.SpecAugment
228
+ time_warp: true
229
+ time_warp_window: 5
230
+ time_warp_mode: bicubic
231
+ freq_mask: true
232
+ n_freq_mask: 2
233
+ time_mask: true
234
+ n_time_mask: 2
235
+ replace_with_zero: false
236
+ freq_mask_width: 30
237
+ time_mask_width: 40
238
+
239
+ remove_spaces: true
240
+ split_tokens: &id012 !apply:operator.not_ [true]
241
+
242
+ cer_computer: !name:speechbrain.utils.metric_stats.ErrorRateStats
243
+ split_tokens: *id012
244
+ acc_computer: !name:speechbrain.utils.Accuracy.AccuracyStats
ASR-model/asr_transformer_seg_char_ctc0.3/results/asr_transformer_seg_char/log.txt ADDED
@@ -0,0 +1,120 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 2023-07-05 14:57:01,987 - speechbrain.core - INFO - Beginning experiment!
2
+ 2023-07-05 14:57:01,987 - speechbrain.core - INFO - Experiment folder: results/asr_transformer_seg_char
3
+ 2023-07-05 14:57:02,208 - speechbrain.utils.superpowers - DEBUG - antlr4-python3-runtime==4.8
4
+ anyio==3.7.0
5
+ astroid==2.15.5
6
+ bitarray==2.7.6
7
+ certifi==2023.5.7
8
+ cffi==1.15.1
9
+ cfgv==3.3.1
10
+ charset-normalizer==3.1.0
11
+ click==7.1.2
12
+ cmake==3.26.4
13
+ colorama==0.4.6
14
+ Cython==0.29.36
15
+ dill==0.3.6
16
+ distlib==0.3.6
17
+ exceptiongroup==1.1.2
18
+ fairseq @ git+https://github.com/facebookresearch/fairseq.git@31fba013a070eaff69dec8642e68e7134d60ab0f
19
+ fastapi==0.98.0
20
+ filelock==3.12.2
21
+ flake8==6.0.0
22
+ fsspec==2023.6.0
23
+ h11==0.14.0
24
+ httpcore==0.17.2
25
+ httptools==0.1.2
26
+ httpx==0.24.1
27
+ huggingface-hub==0.15.1
28
+ hydra-core==1.0.7
29
+ HyperPyYAML==1.2.1
30
+ identify==2.5.24
31
+ idna==3.4
32
+ importlib-resources==5.12.0
33
+ iniconfig==2.0.0
34
+ isort==5.12.0
35
+ jieba==0.42.1
36
+ Jinja2==3.1.2
37
+ joblib==1.3.1
38
+ lazy-object-proxy==1.9.0
39
+ lit==16.0.6
40
+ lxml==4.9.2
41
+ MarkupSafe==2.1.3
42
+ mccabe==0.7.0
43
+ mock==5.0.2
44
+ mpmath==1.3.0
45
+ networkx==3.1
46
+ nodeenv==1.8.0
47
+ numpy==1.24.4
48
+ nvidia-cublas-cu11==11.10.3.66
49
+ nvidia-cuda-cupti-cu11==11.7.101
50
+ nvidia-cuda-nvrtc-cu11==11.7.99
51
+ nvidia-cuda-runtime-cu11==11.7.99
52
+ nvidia-cudnn-cu11==8.5.0.96
53
+ nvidia-cufft-cu11==10.9.0.58
54
+ nvidia-curand-cu11==10.2.10.91
55
+ nvidia-cusolver-cu11==11.4.0.1
56
+ nvidia-cusparse-cu11==11.7.4.91
57
+ nvidia-nccl-cu11==2.14.3
58
+ nvidia-nvtx-cu11==11.7.91
59
+ omegaconf==2.0.6
60
+ packaging==23.1
61
+ platformdirs==3.8.0
62
+ pluggy==1.2.0
63
+ portalocker==2.7.0
64
+ pre-commit==3.3.3
65
+ pycodestyle==2.10.0
66
+ pycparser==2.21
67
+ pydantic==1.10.9
68
+ pyflakes==3.0.1
69
+ pylint==2.17.4
70
+ pytest==7.4.0
71
+ python-multipart==0.0.6
72
+ PyYAML==6.0
73
+ regex==2023.6.3
74
+ requests==2.31.0
75
+ ruamel.yaml==0.17.28
76
+ ruamel.yaml.clib==0.2.7
77
+ sacrebleu==2.3.1
78
+ scikit-learn==1.3.0
79
+ scipy==1.10.1
80
+ sentencepiece==0.1.99
81
+ sniffio==1.3.0
82
+ speechbrain==0.5.14
83
+ starlette==0.27.0
84
+ subword-nmt==0.3.8
85
+ sympy==1.12
86
+ tabulate==0.9.0
87
+ threadpoolctl==3.1.0
88
+ tomli==2.0.1
89
+ tomlkit==0.11.8
90
+ torch==2.0.1
91
+ torchaudio==2.0.2
92
+ tqdm==4.65.0
93
+ triton==2.0.0
94
+ typing_extensions==4.7.1
95
+ urllib3==2.0.3
96
+ uvicorn==0.11.3
97
+ uvloop==0.17.0
98
+ virtualenv==20.23.1
99
+ websockets==8.1
100
+ wrapt==1.15.0
101
+ zipp==3.15.0
102
+
103
+
104
+ 2023-07-05 14:57:02,231 - speechbrain.utils.parameter_transfer - DEBUG - Collecting files (or symlinks) for pretraining in results/asr_transformer_seg_char/save.
105
+ 2023-07-05 14:57:02,231 - speechbrain.pretrained.fetching - INFO - Fetch model.ckpt: Delegating to Huggingface hub, source results/TransformerLM_seg_char/save/CKPT+2021-10-05+20-55-37+00.
106
+ 2023-07-05 14:57:02,290 - speechbrain.core - ERROR - Exception:
107
+ Traceback (most recent call last):
108
+ File "test.py", line 308, in <module>
109
+ run_on_main(hparams["pretrainer"].collect_files)
110
+ File "/home/sinica_axel/miniconda3/envs/asr_api/lib/python3.8/site-packages/speechbrain/utils/distributed.py", line 61, in run_on_main
111
+ func(*args, **kwargs)
112
+ File "/home/sinica_axel/miniconda3/envs/asr_api/lib/python3.8/site-packages/speechbrain/utils/parameter_transfer.py", line 202, in collect_files
113
+ path = fetch(
114
+ File "/home/sinica_axel/miniconda3/envs/asr_api/lib/python3.8/site-packages/speechbrain/pretrained/fetching.py", line 120, in fetch
115
+ fetched_file = huggingface_hub.hf_hub_download(
116
+ File "/home/sinica_axel/miniconda3/envs/asr_api/lib/python3.8/site-packages/huggingface_hub/utils/_validators.py", line 110, in _inner_fn
117
+ validate_repo_id(arg_value)
118
+ File "/home/sinica_axel/miniconda3/envs/asr_api/lib/python3.8/site-packages/huggingface_hub/utils/_validators.py", line 158, in validate_repo_id
119
+ raise HFValidationError(
120
+ huggingface_hub.utils._validators.HFValidationError: Repo id must be in the form 'repo_name' or 'namespace/repo_name': 'results/TransformerLM_seg_char/save/CKPT+2021-10-05+20-55-37+00'. Use `repo_type` argument if needed.
ASR-model/asr_transformer_seg_char_ctc0.3/results/asr_transformer_seg_char/test.py ADDED
@@ -0,0 +1,331 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import sys
2
+
3
+ import torch
4
+ import speechbrain as sb
5
+ from speechbrain.utils.distributed import run_on_main
6
+ from hyperpyyaml import load_hyperpyyaml
7
+
8
+
9
+ class ASR(sb.core.Brain):
10
+ def compute_forward(self, batch, stage):
11
+ batch = batch.to(self.device)
12
+ wavs, wavs_len = batch.sig
13
+ tokens_bos, _ = batch.tokens_bos
14
+
15
+ feats = self.hparams.compute_features(wavs)
16
+ current_epoch = self.hparams.epoch_counter.current
17
+ feats = self.hparams.normalize(feats, wavs_len, epoch=current_epoch)
18
+
19
+ # if stage == sb.Stage.TRAIN:
20
+ # if hasattr(self.modules, "augmentation"):
21
+ # feats = self.hparams.augmentation(feats)
22
+
23
+ src = self.modules.CNN(feats)
24
+ enc_out, pred = self.modules.Transformer(
25
+ src, tokens_bos, wavs_len, pad_idx=self.hparams.pad_index
26
+ )
27
+
28
+ logits = self.modules.ctc_lin(enc_out)
29
+ p_ctc = self.hparams.log_softmax(logits)
30
+
31
+ pred = self.hparams.seq_lin(pred)
32
+ p_seq = self.hparams.log_softmax(pred)
33
+
34
+ hyps = None
35
+ if stage == sb.Stage.TRAIN:
36
+ hyps = None
37
+ elif stage == sb.Stage.VALID:
38
+ hyps = None
39
+ current_epoch = self.hparams.epoch_counter.current
40
+ if current_epoch % self.hparams.valid_search_interval == 0:
41
+ # for the sake of efficiency, we only perform beamsearch with limited capacity
42
+ # and no LM to give user some idea of how the AM is doing
43
+ hyps, _ = self.hparams.valid_search(enc_out.detach(), wavs_len)
44
+ elif stage == sb.Stage.TEST:
45
+ hyps, _ = self.hparams.test_search(enc_out.detach(), wavs_len)
46
+
47
+ return p_ctc, p_seq, wavs_len, hyps
48
+
49
+ def compute_objectives(self, predictions, batch, stage):
50
+
51
+ (p_ctc, p_seq, wavs_len, hyps,) = predictions
52
+
53
+ ids = batch.id
54
+ tokens_eos, tokens_eos_len = batch.tokens_eos
55
+ tokens, tokens_len = batch.tokens
56
+
57
+ attention_loss = self.hparams.seq_cost(
58
+ p_seq, tokens_eos, tokens_eos_len
59
+ )
60
+ ctc_loss = self.hparams.ctc_cost(p_ctc, tokens, wavs_len, tokens_len)
61
+ loss = (
62
+ self.hparams.ctc_weight * ctc_loss
63
+ + (1 - self.hparams.ctc_weight) * attention_loss
64
+ )
65
+
66
+ if stage != sb.Stage.TRAIN:
67
+ current_epoch = self.hparams.epoch_counter.current
68
+ valid_search_interval = self.hparams.valid_search_interval
69
+
70
+ if current_epoch % valid_search_interval == 0 or (
71
+ stage == sb.Stage.TEST
72
+ ):
73
+ predictions = [
74
+ self.hparams.tokenizer.decode_ids(utt_seq).split(" ")
75
+ for utt_seq in hyps
76
+ ]
77
+ targets = [
78
+ transcription.split(" ")
79
+ for transcription in batch.transcription
80
+ ]
81
+ if self.hparams.remove_spaces:
82
+ predictions = [
83
+ "".join(prediction_words)
84
+ for prediction_words in predictions
85
+ ]
86
+ targets = [
87
+ "".join(target_words) for target_words in targets
88
+ ]
89
+ self.cer_metric.append(ids, predictions, targets)
90
+
91
+ self.acc_metric.append(p_seq, tokens_eos, tokens_eos_len)
92
+
93
+ return loss
94
+
95
+ def fit_batch(self, batch):
96
+ self.check_and_reset_optimizer()
97
+
98
+ predictions = self.compute_forward(batch, sb.Stage.TRAIN)
99
+ loss = self.compute_objectives(predictions, batch, sb.Stage.TRAIN)
100
+
101
+ (loss / self.hparams.gradient_accumulation).backward()
102
+
103
+ if self.step % self.hparams.gradient_accumulation == 0:
104
+ self.check_gradients(loss)
105
+
106
+ self.optimizer.step()
107
+ self.optimizer.zero_grad()
108
+
109
+ self.hparams.noam_annealing(self.optimizer)
110
+
111
+ return loss.detach()
112
+
113
+ def evaluate_batch(self, batch, stage):
114
+ with torch.no_grad():
115
+ predictions = self.compute_forward(batch, stage=stage)
116
+ loss = self.compute_objectives(predictions, batch, stage=stage)
117
+ # origin function is call loss.detach().cpu()
118
+ return loss.detach()
119
+
120
+ def on_stage_start(self, stage, epoch):
121
+ if stage != sb.Stage.TRAIN:
122
+ self.acc_metric = self.hparams.acc_computer()
123
+ self.cer_metric = self.hparams.cer_computer()
124
+
125
+ def on_stage_end(self, stage, stage_loss, epoch):
126
+ stage_stats = {"loss": stage_loss}
127
+ if stage == sb.Stage.TRAIN:
128
+ self.train_stats = stage_stats
129
+ else:
130
+ stage_stats["ACC"] = self.acc_metric.summarize()
131
+ current_epoch = self.hparams.epoch_counter.current
132
+ valid_search_interval = self.hparams.valid_search_interval
133
+ if (
134
+ current_epoch % valid_search_interval == 0
135
+ or stage == sb.Stage.TEST
136
+ ):
137
+ stage_stats["CER"] = self.cer_metric.summarize("error_rate")
138
+
139
+ if stage == sb.Stage.VALID and sb.utils.distributed.if_main_process():
140
+
141
+ current_epoch = self.hparams.epoch_counter.current
142
+ if current_epoch <= self.hparams.stage_one_epochs:
143
+ lr = self.hparams.noam_annealing.current_lr
144
+ steps = self.hparams.noam_annealing.n_steps
145
+ optimizer = self.optimizer.__class__.__name__
146
+ else:
147
+ lr = self.hparams.lr_sgd
148
+ steps = -1
149
+ optimizer = self.optimizer.__class__.__name__
150
+
151
+ epoch_stats = {
152
+ "epoch": epoch,
153
+ "lr": lr,
154
+ "steps": steps,
155
+ "optimizer": optimizer,
156
+ }
157
+ self.hparams.train_logger.log_stats(
158
+ stats_meta=epoch_stats,
159
+ train_stats=self.train_stats,
160
+ valid_stats=stage_stats,
161
+ )
162
+ self.checkpointer.save_and_keep_only(
163
+ meta={"ACC": stage_stats["ACC"], "epoch": epoch},
164
+ max_keys=["ACC"],
165
+ num_to_keep=10,
166
+ )
167
+
168
+ elif stage == sb.Stage.TEST:
169
+ self.hparams.train_logger.log_stats(
170
+ stats_meta={"Epoch loaded": self.hparams.epoch_counter.current},
171
+ test_stats=stage_stats,
172
+ )
173
+ with open(self.hparams.cer_file, "w") as cer_file:
174
+ self.cer_metric.write_stats(cer_file)
175
+
176
+ self.checkpointer.save_and_keep_only(
177
+ meta={"ACC": 1.1, "epoch": epoch},
178
+ max_keys=["ACC"],
179
+ num_to_keep=1,
180
+ )
181
+
182
+ def check_and_reset_optimizer(self):
183
+ current_epoch = self.hparams.epoch_counter.current
184
+ if not hasattr(self, "switched"):
185
+ self.switched = False
186
+ if isinstance(self.optimizer, torch.optim.SGD):
187
+ self.switched = True
188
+
189
+ if self.switched is True:
190
+ return
191
+
192
+ if current_epoch > self.hparams.stage_one_epochs:
193
+ self.optimizer = self.hparams.SGD(self.modules.parameters())
194
+
195
+ if self.checkpointer is not None:
196
+ self.checkpointer.add_recoverable("optimizer", self.optimizer)
197
+
198
+ self.switched = True
199
+
200
+ def on_fit_start(self):
201
+ """Initialize the right optimizer on the training start"""
202
+ super().on_fit_start()
203
+
204
+ current_epoch = self.hparams.epoch_counter.current
205
+ current_optimizer = self.optimizer
206
+ if current_epoch > self.hparams.stage_one_epochs:
207
+ del self.optimizer
208
+ self.optimizer = self.hparams.SGD(self.modules.parameters())
209
+
210
+ if self.checkpointer is not None:
211
+ group = current_optimizer.param_groups[0]
212
+ if "momentum" not in group:
213
+ return
214
+ self.checkpointer.recover_if_possible(
215
+ device=torch.device(self.device)
216
+ )
217
+
218
+ def on_evaluate_start(self, max_key=None, min_key=None):
219
+ super().on_evaluate_start()
220
+
221
+ checkpointer = self.checkpointer.find_checkpoints(
222
+ max_key=max_key, min_key=min_key
223
+ )
224
+ checkpointer = sb.utils.checkpoints.average_checkpoints(
225
+ checkpointer, recoverable_name="model", device=self.device
226
+ )
227
+
228
+ self.hparams.model.load_state_dict(checkpointer, strict=True)
229
+ self.hparams.model.eval()
230
+
231
+
232
+ def dataio_prepare(hparams):
233
+ @sb.utils.data_pipeline.takes("transcription")
234
+ @sb.utils.data_pipeline.provides(
235
+ "transcription", "tokens_bos", "tokens_eos", "tokens"
236
+ )
237
+ def transcription_pipline(transcription):
238
+ yield transcription
239
+ tokens_list = hparams["tokenizer"].encode_as_ids(transcription)
240
+ tokens_bos = torch.LongTensor([hparams["bos_index"]] + (tokens_list))
241
+ yield tokens_bos
242
+ tokens_eos = torch.LongTensor(tokens_list + [hparams["eos_index"]])
243
+ yield tokens_eos
244
+ tokens = torch.LongTensor(tokens_list)
245
+ yield tokens
246
+
247
+ @sb.utils.data_pipeline.takes("wav")
248
+ @sb.utils.data_pipeline.provides("sig")
249
+ def audio_pipline(wav):
250
+ sig = sb.dataio.dataio.read_audio(wav)
251
+ return sig
252
+
253
+ @sb.utils.data_pipeline.takes("wav")
254
+ @sb.utils.data_pipeline.provides("sig")
255
+ def sp_audio_pipline(wav):
256
+ sig = sb.dataio.dataio.read_audio(wav)
257
+ sig = sig.unsqueeze(0)
258
+ sig = hparams["speed_perturb"](sig)
259
+ sig = sig.squeeze(0)
260
+ return sig
261
+
262
+ datasets = {}
263
+ data_folder = hparams["data_folder"]
264
+ output_keys = [
265
+ "transcription",
266
+ "tokens_bos",
267
+ "tokens_eos",
268
+ "tokens",
269
+ "sig",
270
+ "id",
271
+ ]
272
+ default_dynamic_items = [transcription_pipline, audio_pipline]
273
+ train_dynamic_item = [transcription_pipline, sp_audio_pipline]
274
+
275
+ for dataset_name in ["train", "dev", "test"]:
276
+ if dataset_name == "train":
277
+ dynamic_items = train_dynamic_item
278
+ else:
279
+ dynamic_items = default_dynamic_items
280
+
281
+ json_path = f"{data_folder}/{dataset_name}.json"
282
+ datasets[dataset_name] = sb.dataio.dataset.DynamicItemDataset.from_json(
283
+ json_path=json_path,
284
+ replacements={"data_root": data_folder},
285
+ dynamic_items=dynamic_items,
286
+ output_keys=output_keys,
287
+ )
288
+
289
+ return datasets
290
+
291
+
292
+ if __name__ == "__main__":
293
+ #hparams_file_path, run_opts, overrides = sb.parse_arguments(sys.argv[1:])
294
+ hparams_file_path = "hyperparams.yaml"
295
+ run_opts = {"device": "cuda", "distributed_launch": False}
296
+ overrides = None
297
+ with open(hparams_file_path) as hparams_file:
298
+ hparams = load_hyperpyyaml(hparams_file, overrides)
299
+
300
+ sb.utils.distributed.ddp_init_group(run_opts)
301
+
302
+ sb.create_experiment_directory(
303
+ experiment_directory=hparams["output_folder"],
304
+ hyperparams_to_save=hparams_file_path,
305
+ overrides=overrides,
306
+ )
307
+
308
+ run_on_main(hparams["pretrainer"].collect_files)
309
+ hparams["pretrainer"].load_collected(device=run_opts["device"])
310
+
311
+ datasets = dataio_prepare(hparams)
312
+
313
+ asr_brain = ASR(
314
+ modules=hparams["modules"],
315
+ opt_class=hparams["Adam"],
316
+ hparams=hparams,
317
+ run_opts=run_opts,
318
+ checkpointer=hparams["checkpointer"],
319
+ )
320
+
321
+ # asr_brain.fit(
322
+ # asr_brain.hparams.epoch_counter,
323
+ # datasets["train"],
324
+ # datasets["dev"],
325
+ # train_loader_kwargs=hparams["train_dataloader_opts"],
326
+ # valid_loader_kwargs=hparams["valid_dataloader_opts"],
327
+ # )
328
+
329
+ asr_brain.evaluate(
330
+ datasets["test"], max_key="ACC", test_loader_kwargs=hparams["test_dataloader_opts"]
331
+ )
ASR-model/asr_transformer_seg_char_ctc0.3/save/CKPT+2021-10-11+14-03-15+00/CKPT.yaml ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ # yamllint disable
2
+ ACC: 1.1
3
+ end-of-epoch: true
4
+ epoch: null
5
+ unixtime: 1633932195.1636646
ASR-model/asr_transformer_seg_char_ctc0.3/save/CKPT+2021-10-11+14-03-15+00/brain.ckpt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:22e854e25aa2ad065885cf277f6017bfa7ab555eb84131dfa2da35605cb8fb14
3
+ size 32
ASR-model/asr_transformer_seg_char_ctc0.3/save/CKPT+2021-10-11+14-03-15+00/counter.ckpt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:39fa9ec190eee7b6f4dff1100d6343e10918d044c75eac8f9e9a2596173f80c9
3
+ size 2
ASR-model/asr_transformer_seg_char_ctc0.3/save/CKPT+2021-10-11+14-03-15+00/model.ckpt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9f325dfc2ecffc1332882e84cac08a9f05e500253413c2346a20f35baaeed705
3
+ size 126713830
ASR-model/asr_transformer_seg_char_ctc0.3/save/CKPT+2021-10-11+14-03-15+00/noam_scheduler.ckpt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:35ce303ccbe8d1abb2b87a01a29e01ed6a7b345253ab6db9111cbd5eefbf503c
3
+ size 431
ASR-model/asr_transformer_seg_char_ctc0.3/save/CKPT+2021-10-11+14-03-15+00/normalizer.ckpt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3c4aa56041d2ead93019a7781f786d29cc577aaf24f4766574c47867a2078f80
3
+ size 1782
ASR-model/asr_transformer_seg_char_ctc0.3/test.py ADDED
@@ -0,0 +1,181 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import sys
2
+
3
+ import torch
4
+ import speechbrain as sb
5
+ from speechbrain.utils.distributed import run_on_main
6
+ from hyperpyyaml import load_hyperpyyaml
7
+
8
+
9
+ class ASR(sb.core.Brain):
10
+ def compute_forward(self, batch, stage):
11
+ batch = batch.to(self.device)
12
+ wavs, wavs_len = batch.sig
13
+ tokens_bos, _ = batch.tokens_bos
14
+
15
+ feats = self.hparams.compute_features(wavs)
16
+ current_epoch = self.hparams.epoch_counter.current
17
+ feats = self.hparams.normalize(feats, wavs_len, epoch=current_epoch)
18
+
19
+ # if stage == sb.Stage.TRAIN:
20
+ # if hasattr(self.modules, "augmentation"):
21
+ # feats = self.hparams.augmentation(feats)
22
+
23
+ src = self.modules.CNN(feats)
24
+ enc_out, pred = self.modules.Transformer(
25
+ src, tokens_bos, wavs_len, pad_idx=self.hparams.pad_index
26
+ )
27
+
28
+ logits = self.modules.ctc_lin(enc_out)
29
+ p_ctc = self.hparams.log_softmax(logits)
30
+
31
+ pred = self.hparams.seq_lin(pred)
32
+ p_seq = self.hparams.log_softmax(pred)
33
+
34
+ hyps = None
35
+ if stage == sb.Stage.TRAIN:
36
+ hyps = None
37
+ elif stage == sb.Stage.VALID:
38
+ hyps = None
39
+ current_epoch = self.hparams.epoch_counter.current
40
+ if current_epoch % self.hparams.valid_search_interval == 0:
41
+ # for the sake of efficiency, we only perform beamsearch with limited capacity
42
+ # and no LM to give user some idea of how the AM is doing
43
+ hyps, _ = self.hparams.valid_search(enc_out.detach(), wavs_len)
44
+ elif stage == sb.Stage.TEST:
45
+ hyps, _ = self.hparams.test_search(enc_out.detach(), wavs_len)
46
+
47
+ return p_ctc, p_seq, wavs_len, hyps
48
+
49
+ def evaluate_batch(self, batch, stage):
50
+ with torch.no_grad():
51
+ predictions = self.compute_forward(batch, stage=stage)
52
+ loss = self.compute_objectives(predictions, batch, stage=stage)
53
+ # origin function is call loss.detach().cpu()
54
+ return loss.detach()
55
+
56
+ def on_stage_start(self, stage, epoch):
57
+ if stage != sb.Stage.TRAIN:
58
+ self.acc_metric = self.hparams.acc_computer()
59
+ self.cer_metric = self.hparams.cer_computer()
60
+
61
+ def on_stage_end(self, stage, stage_loss, epoch):
62
+ stage_stats = {"loss": stage_loss}
63
+ if stage == sb.Stage.TRAIN:
64
+ self.train_stats = stage_stats
65
+ else:
66
+ stage_stats["ACC"] = self.acc_metric.summarize()
67
+ current_epoch = self.hparams.epoch_counter.current
68
+ valid_search_interval = self.hparams.valid_search_interval
69
+ if (
70
+ current_epoch % valid_search_interval == 0
71
+ or stage == sb.Stage.TEST
72
+ ):
73
+ stage_stats["CER"] = self.cer_metric.summarize("error_rate")
74
+
75
+ if stage == sb.Stage.VALID and sb.utils.distributed.if_main_process():
76
+
77
+ current_epoch = self.hparams.epoch_counter.current
78
+ if current_epoch <= self.hparams.stage_one_epochs:
79
+ lr = self.hparams.noam_annealing.current_lr
80
+ steps = self.hparams.noam_annealing.n_steps
81
+ optimizer = self.optimizer.__class__.__name__
82
+ else:
83
+ lr = self.hparams.lr_sgd
84
+ steps = -1
85
+ optimizer = self.optimizer.__class__.__name__
86
+
87
+ epoch_stats = {
88
+ "epoch": epoch,
89
+ "lr": lr,
90
+ "steps": steps,
91
+ "optimizer": optimizer,
92
+ }
93
+ self.hparams.train_logger.log_stats(
94
+ stats_meta=epoch_stats,
95
+ train_stats=self.train_stats,
96
+ valid_stats=stage_stats,
97
+ )
98
+ self.checkpointer.save_and_keep_only(
99
+ meta={"ACC": stage_stats["ACC"], "epoch": epoch},
100
+ max_keys=["ACC"],
101
+ num_to_keep=10,
102
+ )
103
+
104
+ elif stage == sb.Stage.TEST:
105
+ self.hparams.train_logger.log_stats(
106
+ stats_meta={"Epoch loaded": self.hparams.epoch_counter.current},
107
+ test_stats=stage_stats,
108
+ )
109
+ with open(self.hparams.cer_file, "w") as cer_file:
110
+ self.cer_metric.write_stats(cer_file)
111
+
112
+ self.checkpointer.save_and_keep_only(
113
+ meta={"ACC": 1.1, "epoch": epoch},
114
+ max_keys=["ACC"],
115
+ num_to_keep=1,
116
+ )
117
+
118
+ def check_and_reset_optimizer(self):
119
+ current_epoch = self.hparams.epoch_counter.current
120
+ if not hasattr(self, "switched"):
121
+ self.switched = False
122
+ if isinstance(self.optimizer, torch.optim.SGD):
123
+ self.switched = True
124
+
125
+ if self.switched is True:
126
+ return
127
+
128
+ if current_epoch > self.hparams.stage_one_epochs:
129
+ self.optimizer = self.hparams.SGD(self.modules.parameters())
130
+
131
+ if self.checkpointer is not None:
132
+ self.checkpointer.add_recoverable("optimizer", self.optimizer)
133
+
134
+ self.switched = True
135
+
136
+ def on_fit_start(self):
137
+ """Initialize the right optimizer on the training start"""
138
+ super().on_fit_start()
139
+
140
+ current_epoch = self.hparams.epoch_counter.current
141
+ current_optimizer = self.optimizer
142
+ if current_epoch > self.hparams.stage_one_epochs:
143
+ del self.optimizer
144
+ self.optimizer = self.hparams.SGD(self.modules.parameters())
145
+
146
+ if self.checkpointer is not None:
147
+ group = current_optimizer.param_groups[0]
148
+ if "momentum" not in group:
149
+ return
150
+ self.checkpointer.recover_if_possible(
151
+ device=torch.device(self.device)
152
+ )
153
+
154
+ def on_evaluate_start(self, max_key=None, min_key=None):
155
+ super().on_evaluate_start()
156
+
157
+ checkpointer = self.checkpointer.find_checkpoints(
158
+ max_key=max_key, min_key=min_key
159
+ )
160
+ checkpointer = sb.utils.checkpoints.average_checkpoints(
161
+ checkpointer, recoverable_name="model", device=self.device
162
+ )
163
+
164
+ self.hparams.model.load_state_dict(checkpointer, strict=True)
165
+ self.hparams.model.eval()
166
+
167
+
168
+ if __name__ == "__main__":
169
+ hparams_file_path = "hyperparams.yaml"
170
+ run_opts = {"device": "cuda", "distributed_launch": False}
171
+ with open(hparams_file_path) as hparams_file:
172
+ hparams = load_hyperpyyaml(hparams_file)
173
+
174
+ asr_brain = ASR(
175
+ modules=hparams["modules"],
176
+ opt_class=hparams["Adam"],
177
+ hparams=hparams,
178
+ run_opts=run_opts,
179
+ checkpointer=hparams["checkpointer"],
180
+ )
181
+
ASR-model/asr_transformer_seg_char_ctc0.3/test.wav ADDED
Binary file (263 kB). View file
 
ASR-model/asr_transformer_seg_char_ctc0.3/train.py ADDED
@@ -0,0 +1,322 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import sys
2
+
3
+ import torch
4
+ import speechbrain as sb
5
+ from speechbrain.utils.distributed import run_on_main
6
+ from hyperpyyaml import load_hyperpyyaml
7
+
8
+
9
+ class ASR(sb.core.Brain):
10
+ def compute_forward(self, batch, stage):
11
+ batch = batch.to(self.device)
12
+ wavs, wavs_len = batch.sig
13
+ tokens_bos, _ = batch.tokens_bos
14
+ feats = self.hparams.compute_features(wavs)
15
+ current_epoch = self.hparams.epoch_counter.current
16
+ feats = self.modules.normalize(feats, wavs_len, epoch=current_epoch)
17
+
18
+ src = self.modules.CNN(feats)
19
+ enc_out, pred = self.modules.Transformer(
20
+ src, tokens_bos, wavs_len, pad_idx=self.hparams.pad_index
21
+ )
22
+
23
+ logits = self.modules.ctc_lin(enc_out)
24
+ p_ctc = self.hparams.log_softmax(logits)
25
+
26
+ pred = self.modules.seq_lin(pred)
27
+ p_seq = self.hparams.log_softmax(pred)
28
+
29
+ hyps = None
30
+ if stage == sb.Stage.TRAIN:
31
+ hyps = None
32
+ elif stage == sb.Stage.VALID:
33
+ hyps = None
34
+ current_epoch = self.hparams.epoch_counter.current
35
+ if current_epoch % self.hparams.valid_search_interval == 0:
36
+ hyps, _ = self.hparams.valid_search(enc_out.detach(), wavs_len)
37
+ elif stage == sb.Stage.TEST:
38
+ hyps, _ = self.hparams.test_search(enc_out.detach(), wavs_len)
39
+
40
+ return p_ctc, p_seq, wavs_len, hyps
41
+
42
+ def compute_objectives(self, predictions, batch, stage):
43
+
44
+ (p_ctc, p_seq, wavs_len, hyps,) = predictions
45
+
46
+ ids = batch.id
47
+ tokens_eos, tokens_eos_len = batch.tokens_eos
48
+ tokens, tokens_len = batch.tokens
49
+
50
+ attention_loss = self.hparams.seq_cost(
51
+ p_seq, tokens_eos, length=tokens_eos_len
52
+ )
53
+ ctc_loss = self.hparams.ctc_cost(p_ctc, tokens, wavs_len, tokens_len)
54
+ loss = (
55
+ self.hparams.ctc_weight * ctc_loss
56
+ + (1 - self.hparams.ctc_weight) * attention_loss
57
+ )
58
+
59
+ if stage != sb.Stage.TRAIN:
60
+ current_epoch = self.hparams.epoch_counter.current
61
+ valid_search_interval = self.hparams.valid_search_interval
62
+
63
+ if current_epoch % valid_search_interval == 0 or (
64
+ stage == sb.Stage.TEST
65
+ ):
66
+ predictions = [
67
+ hparams["tokenizer"].decode_ids(utt_seq).split(" ")
68
+ for utt_seq in hyps
69
+ ]
70
+ targets = [
71
+ transcription.split(" ")
72
+ for transcription in batch.transcription
73
+ ]
74
+ if self.hparams.remove_spaces:
75
+ predictions = [
76
+ "".join(prediction_words)
77
+ for prediction_words in predictions
78
+ ]
79
+ targets = [
80
+ "".join(target_words) for target_words in targets
81
+ ]
82
+ self.cer_metric.append(ids, predictions, targets)
83
+
84
+ self.acc_metric.append(p_seq, tokens_eos, tokens_eos_len)
85
+
86
+ return loss
87
+
88
+ def fit_batch(self, batch):
89
+ self.check_and_reset_optimizer()
90
+
91
+ predictions = self.compute_forward(batch, sb.Stage.TRAIN)
92
+ loss = self.compute_objectives(predictions, batch, sb.Stage.TRAIN)
93
+
94
+ (loss / self.hparams.gradient_accumulation).backward()
95
+
96
+ if self.step % self.hparams.gradient_accumulation == 0:
97
+ self.check_gradients(loss)
98
+
99
+ self.optimizer.step()
100
+ self.optimizer.zero_grad()
101
+
102
+ self.hparams.noam_annealing(self.optimizer)
103
+
104
+ return loss.detach()
105
+
106
+ def evaluate_batch(self, batch, stage):
107
+ with torch.no_grad():
108
+ predictions = self.compute_forward(batch, stage=stage)
109
+ loss = self.compute_objectives(predictions, batch, stage=stage)
110
+ # origin function is call loss.detach().cpu()
111
+ return loss.detach()
112
+
113
+ def on_stage_start(self, stage, epoch):
114
+ if stage != sb.Stage.TRAIN:
115
+ self.acc_metric = self.hparams.acc_computer()
116
+ self.cer_metric = self.hparams.cer_computer()
117
+
118
+ def on_stage_end(self, stage, stage_loss, epoch):
119
+ stage_stats = {"loss": stage_loss}
120
+ if stage == sb.Stage.TRAIN:
121
+ self.train_stats = stage_stats
122
+ else:
123
+ stage_stats["ACC"] = self.acc_metric.summarize()
124
+ current_epoch = self.hparams.epoch_counter.current
125
+ valid_search_interval = self.hparams.valid_search_interval
126
+ if (
127
+ current_epoch % valid_search_interval == 0
128
+ or stage == sb.Stage.TEST
129
+ ):
130
+ stage_stats["CER"] = self.cer_metric.summarize("error_rate")
131
+
132
+ if stage == sb.Stage.VALID and sb.utils.distributed.if_main_process():
133
+
134
+ current_epoch = self.hparams.epoch_counter.current
135
+ if current_epoch <= self.hparams.stage_one_epochs:
136
+ lr = self.hparams.noam_annealing.current_lr
137
+ steps = self.hparams.noam_annealing.n_steps
138
+ optimizer = self.optimizer.__class__.__name__
139
+ else:
140
+ lr = self.hparams.lr_sgd
141
+ steps = -1
142
+ optimizer = self.optimizer.__class__.__name__
143
+
144
+ epoch_stats = {
145
+ "epoch": epoch,
146
+ "lr": lr,
147
+ "steps": steps,
148
+ "optimizer": optimizer,
149
+ }
150
+ self.hparams.train_logger.log_stats(
151
+ stats_meta=epoch_stats,
152
+ train_stats=self.train_stats,
153
+ valid_stats=stage_stats,
154
+ )
155
+ self.checkpointer.save_and_keep_only(
156
+ meta={"ACC": stage_stats["ACC"], "epoch": epoch},
157
+ max_keys=["ACC"],
158
+ num_to_keep=10,
159
+ )
160
+
161
+ elif stage == sb.Stage.TEST:
162
+ self.hparams.train_logger.log_stats(
163
+ stats_meta={"Epoch loaded": self.hparams.epoch_counter.current},
164
+ test_stats=stage_stats,
165
+ )
166
+ with open(self.hparams.cer_file, "w") as cer_file:
167
+ self.cer_metric.write_stats(cer_file)
168
+
169
+ self.checkpointer.save_and_keep_only(
170
+ meta={"ACC": 1.1, "epoch": epoch},
171
+ max_keys=["ACC"],
172
+ num_to_keep=1,
173
+ )
174
+
175
+ def check_and_reset_optimizer(self):
176
+ current_epoch = self.hparams.epoch_counter.current
177
+ if not hasattr(self, "switched"):
178
+ self.switched = False
179
+ if isinstance(self.optimizer, torch.optim.SGD):
180
+ self.switched = True
181
+
182
+ if self.switched is True:
183
+ return
184
+
185
+ if current_epoch > self.hparams.stage_one_epochs:
186
+ self.optimizer = self.hparams.SGD(self.modules.parameters())
187
+
188
+ if self.checkpointer is not None:
189
+ self.checkpointer.add_recoverable("optimizer", self.optimizer)
190
+
191
+ self.switched = True
192
+
193
+ def on_fit_start(self):
194
+ """Initialize the right optimizer on the training start"""
195
+ super().on_fit_start()
196
+
197
+ current_epoch = self.hparams.epoch_counter.current
198
+ current_optimizer = self.optimizer
199
+ if current_epoch > self.hparams.stage_one_epochs:
200
+ del self.optimizer
201
+ self.optimizer = self.hparams.SGD(self.modules.parameters())
202
+
203
+ if self.checkpointer is not None:
204
+ group = current_optimizer.param_groups[0]
205
+ if "momentum" not in group:
206
+ return
207
+ self.checkpointer.recover_if_possible(
208
+ device=torch.device(self.device)
209
+ )
210
+
211
+ def on_evaluate_start(self, max_key=None, min_key=None):
212
+ super().on_evaluate_start()
213
+
214
+ checkpointers = self.checkpointer.find_checkpoints(
215
+ max_key=max_key, min_key=min_key
216
+ )
217
+ checkpointer = sb.utils.checkpoints.average_checkpoints(
218
+ checkpointers, recoverable_name="model", device=self.device
219
+ )
220
+
221
+ self.hparams.model.load_state_dict(checkpointer, strict=True)
222
+ self.hparams.model.eval()
223
+
224
+
225
+ def dataio_prepare(hparams):
226
+ @sb.utils.data_pipeline.takes("transcription")
227
+ @sb.utils.data_pipeline.provides(
228
+ "transcription", "tokens_bos", "tokens_eos", "tokens"
229
+ )
230
+ def transcription_pipline(transcription):
231
+ yield transcription
232
+ tokens_list = hparams["tokenizer"].encode_as_ids(transcription)
233
+ tokens_bos = torch.LongTensor([hparams["bos_index"]] + (tokens_list))
234
+ yield tokens_bos
235
+ tokens_eos = torch.LongTensor(tokens_list + [hparams["eos_index"]])
236
+ yield tokens_eos
237
+ tokens = torch.LongTensor(tokens_list)
238
+ yield tokens
239
+
240
+ @sb.utils.data_pipeline.takes("wav")
241
+ @sb.utils.data_pipeline.provides("sig")
242
+ def audio_pipline(wav):
243
+ sig = sb.dataio.dataio.read_audio(wav)
244
+ return sig
245
+
246
+ @sb.utils.data_pipeline.takes("wav")
247
+ @sb.utils.data_pipeline.provides("sig")
248
+ def sp_audio_pipline(wav):
249
+ sig = sb.dataio.dataio.read_audio(wav)
250
+ sig = sig.unsqueeze(0)
251
+ sig = hparams["speed_perturb"](sig)
252
+ sig = sig.squeeze(0)
253
+ return sig
254
+
255
+ datasets = {}
256
+ data_folder = hparams["data_folder"]
257
+ output_keys = [
258
+ "transcription",
259
+ "tokens_bos",
260
+ "tokens_eos",
261
+ "tokens",
262
+ "sig",
263
+ "id",
264
+ ]
265
+ default_dynamic_items = [transcription_pipline, audio_pipline]
266
+ train_dynamic_item = [transcription_pipline, sp_audio_pipline]
267
+
268
+ for dataset_name in ["train", "dev", "test"]:
269
+ if dataset_name == "train":
270
+ dynamic_items = train_dynamic_item
271
+ else:
272
+ dynamic_items = default_dynamic_items
273
+
274
+ json_path = f"{data_folder}/{dataset_name}.json"
275
+ datasets[dataset_name] = sb.dataio.dataset.DynamicItemDataset.from_json(
276
+ json_path=json_path,
277
+ replacements={"data_root": data_folder},
278
+ dynamic_items=dynamic_items,
279
+ output_keys=output_keys,
280
+ )
281
+
282
+ return datasets
283
+
284
+
285
+ if __name__ == "__main__":
286
+ hparams_file_path, run_opts, overrides = sb.parse_arguments(sys.argv[1:])
287
+
288
+ sb.utils.distributed.ddp_init_group(run_opts)
289
+
290
+ with open(hparams_file_path) as hparams_file:
291
+ hparams = load_hyperpyyaml(hparams_file, overrides)
292
+
293
+ sb.create_experiment_directory(
294
+ experiment_directory=hparams["output_folder"],
295
+ hyperparams_to_save=hparams_file_path,
296
+ overrides=overrides,
297
+ )
298
+
299
+ run_on_main(hparams["pretrainer"].collect_files)
300
+ hparams["pretrainer"].load_collected(device=run_opts["device"])
301
+
302
+ datasets = dataio_prepare(hparams)
303
+
304
+ asr_brain = ASR(
305
+ modules=hparams["modules"],
306
+ opt_class=hparams["Adam"],
307
+ hparams=hparams,
308
+ run_opts=run_opts,
309
+ checkpointer=hparams["checkpointer"],
310
+ )
311
+
312
+ asr_brain.fit(
313
+ asr_brain.hparams.epoch_counter,
314
+ datasets["train"],
315
+ datasets["dev"],
316
+ train_loader_kwargs=hparams["train_dataloader_opts"],
317
+ valid_loader_kwargs=hparams["valid_dataloader_opts"],
318
+ )
319
+
320
+ # asr_brain.evaluate(
321
+ # datasets["test"],max_key="ACC", test_loader_kwargs=hparams["test_dataloader_opts"]
322
+ # )
ASR-model/asr_transformer_seg_char_ctc0.3/train_log.txt ADDED
@@ -0,0 +1,70 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ epoch: 1, lr: 2.40e-05, steps: 1520, optimizer: Adam - train loss: 3.41e+02 - valid loss: 2.52e+02, valid ACC: 4.07e-01
2
+ epoch: 2, lr: 4.81e-05, steps: 3040, optimizer: Adam - train loss: 2.36e+02 - valid loss: 2.26e+02, valid ACC: 4.36e-01
3
+ epoch: 3, lr: 7.21e-05, steps: 4560, optimizer: Adam - train loss: 2.17e+02 - valid loss: 2.12e+02, valid ACC: 4.66e-01
4
+ epoch: 4, lr: 9.61e-05, steps: 6080, optimizer: Adam - train loss: 2.05e+02 - valid loss: 2.02e+02, valid ACC: 4.86e-01
5
+ epoch: 5, lr: 1.20e-04, steps: 7600, optimizer: Adam - train loss: 1.96e+02 - valid loss: 1.92e+02, valid ACC: 4.98e-01
6
+ epoch: 6, lr: 1.44e-04, steps: 9120, optimizer: Adam - train loss: 1.70e+02 - valid loss: 1.51e+02, valid ACC: 5.30e-01
7
+ epoch: 7, lr: 1.68e-04, steps: 10640, optimizer: Adam - train loss: 1.37e+02 - valid loss: 1.14e+02, valid ACC: 6.31e-01
8
+ epoch: 8, lr: 1.92e-04, steps: 12160, optimizer: Adam - train loss: 1.10e+02 - valid loss: 88.56, valid ACC: 7.21e-01
9
+ epoch: 9, lr: 2.16e-04, steps: 13680, optimizer: Adam - train loss: 86.93 - valid loss: 67.36, valid ACC: 8.03e-01
10
+ epoch: 10, lr: 2.40e-04, steps: 15200, optimizer: Adam - train loss: 64.60 - valid loss: 51.05, valid ACC: 8.61e-01, valid CER: 22.14
11
+ epoch: 11, lr: 2.64e-04, steps: 16720, optimizer: Adam - train loss: 51.94 - valid loss: 42.96, valid ACC: 8.86e-01
12
+ epoch: 12, lr: 2.88e-04, steps: 18240, optimizer: Adam - train loss: 44.10 - valid loss: 37.56, valid ACC: 8.99e-01
13
+ epoch: 13, lr: 3.12e-04, steps: 19760, optimizer: Adam - train loss: 38.66 - valid loss: 33.39, valid ACC: 9.09e-01
14
+ epoch: 14, lr: 3.36e-04, steps: 21280, optimizer: Adam - train loss: 34.66 - valid loss: 31.04, valid ACC: 9.16e-01
15
+ epoch: 15, lr: 3.60e-04, steps: 22800, optimizer: Adam - train loss: 31.69 - valid loss: 29.01, valid ACC: 9.19e-01
16
+ epoch: 16, lr: 3.85e-04, steps: 24320, optimizer: Adam - train loss: 29.34 - valid loss: 27.39, valid ACC: 9.24e-01
17
+ epoch: 17, lr: 3.89e-04, steps: 25840, optimizer: Adam - train loss: 27.37 - valid loss: 25.86, valid ACC: 9.28e-01
18
+ epoch: 18, lr: 3.78e-04, steps: 27360, optimizer: Adam - train loss: 25.12 - valid loss: 24.43, valid ACC: 9.32e-01
19
+ epoch: 19, lr: 3.68e-04, steps: 28880, optimizer: Adam - train loss: 23.16 - valid loss: 23.26, valid ACC: 9.35e-01
20
+ epoch: 20, lr: 3.58e-04, steps: 30400, optimizer: Adam - train loss: 21.53 - valid loss: 22.58, valid ACC: 9.37e-01, valid CER: 10.20
21
+ epoch: 21, lr: 3.50e-04, steps: 31920, optimizer: Adam - train loss: 20.06 - valid loss: 21.66, valid ACC: 9.39e-01
22
+ epoch: 22, lr: 3.42e-04, steps: 33440, optimizer: Adam - train loss: 18.83 - valid loss: 21.18, valid ACC: 9.41e-01
23
+ epoch: 23, lr: 3.34e-04, steps: 34960, optimizer: Adam - train loss: 17.77 - valid loss: 20.92, valid ACC: 9.42e-01
24
+ epoch: 24, lr: 3.27e-04, steps: 36480, optimizer: Adam - train loss: 16.80 - valid loss: 20.30, valid ACC: 9.43e-01
25
+ epoch: 25, lr: 3.21e-04, steps: 38000, optimizer: Adam - train loss: 15.91 - valid loss: 19.68, valid ACC: 9.44e-01
26
+ epoch: 26, lr: 3.14e-04, steps: 39520, optimizer: Adam - train loss: 15.12 - valid loss: 19.73, valid ACC: 9.45e-01
27
+ epoch: 27, lr: 3.09e-04, steps: 41040, optimizer: Adam - train loss: 14.41 - valid loss: 19.30, valid ACC: 9.46e-01
28
+ epoch: 28, lr: 3.03e-04, steps: 42560, optimizer: Adam - train loss: 13.79 - valid loss: 18.82, valid ACC: 9.47e-01
29
+ epoch: 29, lr: 2.98e-04, steps: 44080, optimizer: Adam - train loss: 13.15 - valid loss: 19.04, valid ACC: 9.47e-01
30
+ epoch: 30, lr: 2.93e-04, steps: 45600, optimizer: Adam - train loss: 12.66 - valid loss: 18.89, valid ACC: 9.47e-01, valid CER: 8.39
31
+ epoch: 31, lr: 2.88e-04, steps: 47120, optimizer: Adam - train loss: 12.13 - valid loss: 18.66, valid ACC: 9.48e-01
32
+ epoch: 32, lr: 2.83e-04, steps: 48640, optimizer: Adam - train loss: 11.64 - valid loss: 18.51, valid ACC: 9.48e-01
33
+ epoch: 33, lr: 2.79e-04, steps: 50160, optimizer: Adam - train loss: 11.21 - valid loss: 18.36, valid ACC: 9.48e-01
34
+ epoch: 34, lr: 2.75e-04, steps: 51680, optimizer: Adam - train loss: 10.79 - valid loss: 18.65, valid ACC: 9.48e-01
35
+ epoch: 35, lr: 2.71e-04, steps: 53200, optimizer: Adam - train loss: 10.43 - valid loss: 18.40, valid ACC: 9.49e-01
36
+ epoch: 36, lr: 2.67e-04, steps: 54720, optimizer: Adam - train loss: 10.11 - valid loss: 18.29, valid ACC: 9.49e-01
37
+ epoch: 37, lr: 2.64e-04, steps: 56240, optimizer: Adam - train loss: 9.78 - valid loss: 18.32, valid ACC: 9.50e-01
38
+ epoch: 38, lr: 2.60e-04, steps: 57760, optimizer: Adam - train loss: 9.46 - valid loss: 18.12, valid ACC: 9.50e-01
39
+ epoch: 39, lr: 2.57e-04, steps: 59280, optimizer: Adam - train loss: 9.17 - valid loss: 18.36, valid ACC: 9.49e-01
40
+ epoch: 40, lr: 2.53e-04, steps: 60800, optimizer: Adam - train loss: 8.88 - valid loss: 18.29, valid ACC: 9.49e-01, valid CER: 7.93
41
+ epoch: 41, lr: 2.50e-05, steps: -1, optimizer: SGD - train loss: 7.01 - valid loss: 17.34, valid ACC: 9.54e-01
42
+ epoch: 42, lr: 2.50e-05, steps: -1, optimizer: SGD - train loss: 6.32 - valid loss: 17.22, valid ACC: 9.54e-01
43
+ epoch: 43, lr: 2.50e-05, steps: -1, optimizer: SGD - train loss: 5.97 - valid loss: 17.15, valid ACC: 9.54e-01
44
+ epoch: 44, lr: 2.50e-05, steps: -1, optimizer: SGD - train loss: 5.76 - valid loss: 17.14, valid ACC: 9.55e-01
45
+ epoch: 45, lr: 2.50e-05, steps: -1, optimizer: SGD - train loss: 5.58 - valid loss: 17.19, valid ACC: 9.54e-01
46
+ epoch: 46, lr: 2.50e-05, steps: -1, optimizer: SGD - train loss: 5.45 - valid loss: 17.07, valid ACC: 9.55e-01
47
+ epoch: 47, lr: 2.50e-05, steps: -1, optimizer: SGD - train loss: 5.33 - valid loss: 17.07, valid ACC: 9.55e-01
48
+ epoch: 48, lr: 2.50e-05, steps: -1, optimizer: SGD - train loss: 5.22 - valid loss: 17.17, valid ACC: 9.55e-01
49
+ epoch: 49, lr: 2.50e-05, steps: -1, optimizer: SGD - train loss: 5.13 - valid loss: 17.24, valid ACC: 9.55e-01
50
+ epoch: 50, lr: 2.50e-05, steps: -1, optimizer: SGD - train loss: 5.04 - valid loss: 17.10, valid ACC: 9.55e-01, valid CER: 7.10
51
+ epoch: 51, lr: 2.50e-05, steps: -1, optimizer: SGD - train loss: 4.97 - valid loss: 17.26, valid ACC: 9.55e-01
52
+ epoch: 52, lr: 2.50e-05, steps: -1, optimizer: SGD - train loss: 4.88 - valid loss: 17.23, valid ACC: 9.55e-01
53
+ epoch: 53, lr: 2.50e-05, steps: -1, optimizer: SGD - train loss: 4.82 - valid loss: 17.17, valid ACC: 9.55e-01
54
+ epoch: 54, lr: 2.50e-05, steps: -1, optimizer: SGD - train loss: 4.76 - valid loss: 17.31, valid ACC: 9.55e-01
55
+ epoch: 55, lr: 2.50e-05, steps: -1, optimizer: SGD - train loss: 4.71 - valid loss: 17.37, valid ACC: 9.55e-01
56
+ epoch: 56, lr: 2.50e-05, steps: -1, optimizer: SGD - train loss: 4.66 - valid loss: 17.26, valid ACC: 9.55e-01
57
+ epoch: 57, lr: 2.50e-05, steps: -1, optimizer: SGD - train loss: 4.60 - valid loss: 17.43, valid ACC: 9.55e-01
58
+ epoch: 58, lr: 2.50e-05, steps: -1, optimizer: SGD - train loss: 4.54 - valid loss: 17.52, valid ACC: 9.55e-01
59
+ epoch: 59, lr: 2.50e-05, steps: -1, optimizer: SGD - train loss: 4.49 - valid loss: 17.42, valid ACC: 9.55e-01
60
+ epoch: 60, lr: 2.50e-05, steps: -1, optimizer: SGD - train loss: 4.45 - valid loss: 17.53, valid ACC: 9.55e-01, valid CER: 7.11
61
+ Epoch loaded: 60 - test loss: 30.17, test ACC: 9.27e-01, test CER: 7.50 - TransformerLM 0.2
62
+ Epoch loaded: 60 - test loss: 15.08, test ACC: 9.27e-01, test CER: 7.74 - RNNLM 0.2
63
+ Epoch loaded: 60 - test loss: 10.06, test ACC: 9.27e-01, test CER: 7.97 - w/o LM
64
+ Epoch loaded: 60 - test loss: 7.54, test ACC: 9.27e-01, test CER: 7.62 - TransformerLM 0.1
65
+ Epoch loaded: 60 - test loss: 6.03, test ACC: 9.27e-01, test CER: 7.54 - TransformerLM 0.3
66
+ Epoch loaded: 60 - test loss: 5.03, test ACC: 9.27e-01, test CER: 7.65 - TransformerLM 0.4
67
+ Epoch loaded: 60 - test loss: 4.31, test ACC: 9.27e-01, test CER: 7.87 - TransformerLM 0.5
68
+ Epoch loaded: 60 - test loss: 3.77, test ACC: 9.27e-01, test CER: 8.13 - TransformerLM 0.6
69
+ Epoch loaded: 60 - test loss: 3.35, test ACC: 9.27e-01, test CER: 8.37 - TransformerLM 0.7
70
+ Epoch loaded: 60 - test loss: 3.02, test ACC: 9.27e-01, test CER: 8.74 - TransformerLM 0.8
ASR-model/tokenizer_seg_bpe5k_char/5000_char.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3598620f52b18d0378760db5f5b2443f87ea7e0887303c49a13b936ec66cbb7e
3
+ size 288783
ASR-model/tokenizer_seg_bpe5k_char/5000_char.vocab ADDED
@@ -0,0 +1,4257 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <unk> 0
2
+ <s> 0
3
+ </s> 0
4
+ ▁ -0.988007
5
+ 的 -3.64818
6
+ 是 -4.66353
7
+ 一 -4.71877
8
+ 這 -5.02165
9
+ 有 -5.0436
10
+ 不 -5.0719
11
+ 在 -5.10361
12
+ 個 -5.11818
13
+ 會 -5.35635
14
+ 來 -5.40839
15
+ 我 -5.45934
16
+ 以 -5.47765
17
+ 們 -5.52486
18
+ 人 -5.52624
19
+ 了 -5.57924
20
+ 到 -5.60484
21
+ 要 -5.64711
22
+ 為 -5.66809
23
+ 國 -5.66897
24
+ 就 -5.69856
25
+ 大 -5.71761
26
+ 臺 -5.78127
27
+ 也 -5.79987
28
+ 他 -5.80268
29
+ 年 -5.81117
30
+ 過 -5.86746
31
+ 十 -5.87511
32
+ 中 -5.88534
33
+ 天 -5.90132
34
+ 上 -5.90745
35
+ 公 -5.91384
36
+ 新 -5.92983
37
+ 可 -5.97377
38
+ 民 -6.02273
39
+ 出 -6.03295
40
+ 能 -6.03346
41
+ 生 -6.04135
42
+ 法 -6.04712
43
+ 政 -6.05771
44
+ 那 -6.07501
45
+ 經 -6.09248
46
+ 對 -6.11149
47
+ 多 -6.11851
48
+ 說 -6.16196
49
+ 後 -6.19182
50
+ 學 -6.20966
51
+ 家 -6.22306
52
+ 行 -6.23075
53
+ 成 -6.23928
54
+ 還 -6.25927
55
+ 時 -6.26309
56
+ 部 -6.26869
57
+ 都 -6.27577
58
+ 現 -6.3008
59
+ 今 -6.30895
60
+ 所 -6.33091
61
+ 之 -6.37008
62
+ 地 -6.37097
63
+ 發 -6.38745
64
+ 導 -6.40385
65
+ 報 -6.4079
66
+ 分 -6.40974
67
+ 方 -6.43329
68
+ 視 -6.4367
69
+ 三 -6.43784
70
+ 看 -6.45143
71
+ 因 -6.45181
72
+ 長 -6.45761
73
+ 前 -6.45974
74
+ 立 -6.45994
75
+ 而 -6.46383
76
+ 沒 -6.46559
77
+ 下 -6.46656
78
+ 聞 -6.47461
79
+ 業 -6.47481
80
+ 面 -6.48054
81
+ 開 -6.49332
82
+ 麼 -6.50747
83
+ 得 -6.52806
84
+ 很 -6.54124
85
+ 工 -6.55567
86
+ 外 -6.55674
87
+ 灣 -6.56599
88
+ 然 -6.56858
89
+ 二 -6.57314
90
+ 作 -6.5751
91
+ 用 -6.58056
92
+ 和 -6.58782
93
+ 院 -6.5907
94
+ 最 -6.59402
95
+ 者 -6.60519
96
+ 樣 -6.60699
97
+ 進 -6.60789
98
+ 將 -6.61626
99
+ 當 -6.61922
100
+ 心 -6.62127
101
+ 重 -6.62998
102
+ 高 -6.63044
103
+ 兩 -6.63367
104
+ 於 -6.65139
105
+ 啊 -6.65966
106
+ 動 -6.66322
107
+ 表 -6.66465
108
+ 定 -6.66561
109
+ 去 -6.66991
110
+ 關 -6.67833
111
+ 些 -6.69245
112
+ 百 -6.70403
113
+ 事 -6.7105
114
+ 好 -6.7145
115
+ 實 -6.71726
116
+ 如 -6.72458
117
+ 黨 -6.73067
118
+ 自 -6.73859
119
+ 五 -6.75099
120
+ 委 -6.77067
121
+ 水 -6.77679
122
+ 教 -6.77999
123
+ 全 -6.79455
124
+ 市 -6.79835
125
+ 場 -6.81014
126
+ 同 -6.82011
127
+ 度 -6.82346
128
+ 保 -6.8257
129
+ 本 -6.83866
130
+ 員 -6.84922
131
+ 子 -6.85008
132
+ 點 -6.85756
133
+ 做 -6.87301
134
+ 明 -6.87477
135
+ 果 -6.87506
136
+ 選 -6.87506
137
+ 但 -6.87565
138
+ 小 -6.88304
139
+ 讓 -6.88363
140
+ 已 -6.88631
141
+ 體 -6.8869
142
+ 採 -6.89257
143
+ 月 -6.89407
144
+ 內 -6.89527
145
+ 主 -6.89587
146
+ 比 -6.90009
147
+ 目 -6.9025
148
+ 間 -6.90281
149
+ 北 -6.90857
150
+ 訪 -6.91712
151
+ 期 -6.91805
152
+ 力 -6.92792
153
+ 加 -6.93103
154
+ 提 -6.93571
155
+ 你 -6.93821
156
+ 合 -6.95655
157
+ 四 -6.95783
158
+ 起 -6.95943
159
+ 從 -6.96392
160
+ 機 -6.96908
161
+ 裡 -6.97687
162
+ 應 -6.97817
163
+ 文 -6.9785
164
+ 其 -6.98013
165
+ 只 -6.98013
166
+ 美 -6.98833
167
+ 題 -7.0016
168
+ 意 -7.00864
169
+ 統 -7.0232
170
+ 再 -7.03142
171
+ 位 -7.03314
172
+ 陳 -7.03729
173
+ 府 -7.04493
174
+ 問 -7.05123
175
+ 金 -7.06074
176
+ 種 -7.06678
177
+ 情 -7.0675
178
+ 案 -7.07
179
+ 總 -7.07609
180
+ 呢 -7.07825
181
+ 跟 -7.08476
182
+ 正 -7.08585
183
+ 九 -7.09095
184
+ 示 -7.09865
185
+ 更 -7.10789
186
+ 議 -7.11759
187
+ 候 -7.11947
188
+ 化 -7.12474
189
+ 常 -7.1289
190
+ 六 -7.13194
191
+ 它 -7.13423
192
+ 資 -7.14303
193
+ 回 -7.14766
194
+ 理 -7.15932
195
+ 調 -7.15932
196
+ 區 -7.1601
197
+ 認 -7.16441
198
+ 與 -7.1731
199
+ 電 -7.17429
200
+ 八 -7.17786
201
+ 性 -7.17866
202
+ 受 -7.19635
203
+ 第 -7.1992
204
+ 入 -7.20042
205
+ 齁 -7.20123
206
+ 由 -7.20246
207
+ 話 -7.20368
208
+ 續 -7.20655
209
+ 及 -7.2156
210
+ 原 -7.22725
211
+ 次 -7.23608
212
+ 相 -7.23734
213
+ 眾 -7.24243
214
+ 利 -7.25396
215
+ 望 -7.25957
216
+ 想 -7.27174
217
+ 氣 -7.27788
218
+ 通 -7.28097
219
+ 各 -7.28539
220
+ 希 -7.29118
221
+ 老 -7.30782
222
+ 除 -7.311
223
+ 建 -7.31784
224
+ 把 -7.31922
225
+ 團 -7.31968
226
+ 接 -7.32336
227
+ 解 -7.32983
228
+ 夠 -7.3368
229
+ 展 -7.33821
230
+ 基 -7.34055
231
+ 少 -7.34477
232
+ 著 -7.34524
233
+ 產 -7.34996
234
+ 等 -7.35281
235
+ 什 -7.35613
236
+ 任 -7.35804
237
+ 林 -7.3633
238
+ 育 -7.3773
239
+ 費 -7.3773
240
+ 程 -7.37876
241
+ 七 -7.38022
242
+ 或 -7.38119
243
+ 師 -7.38364
244
+ 元 -7.38461
245
+ 觀 -7.38658
246
+ 整 -7.39396
247
+ 活 -7.39495
248
+ 名 -7.40141
249
+ 才 -7.40141
250
+ 像 -7.4069
251
+ 萬 -7.4084
252
+ 帶 -7.4089
253
+ 代 -7.41142
254
+ 安 -7.41242
255
+ 較 -7.41747
256
+ 被 -7.42
257
+ 先 -7.42356
258
+ 權 -7.42611
259
+ 東 -7.42713
260
+ 計 -7.43174
261
+ 改 -7.43225
262
+ 特 -7.43328
263
+ 嗯 -7.43483
264
+ 務 -7.43637
265
+ 社 -7.4431
266
+ 品 -7.45144
267
+ 該 -7.45301
268
+ 影 -7.45406
269
+ 共 -7.46197
270
+ 身 -7.4625
271
+ 近 -7.471
272
+ 物 -7.47261
273
+ 平 -7.47528
274
+ 辦 -7.48661
275
+ 每 -7.49751
276
+ 道 -7.49806
277
+ 式 -7.51354
278
+ 千 -7.52025
279
+ 考 -7.52081
280
+ 始 -7.52306
281
+ 查 -7.52983
282
+ 管 -7.53665
283
+ 變 -7.53665
284
+ 制 -7.5395
285
+ 給 -7.54064
286
+ 局 -7.54122
287
+ 交 -7.54179
288
+ 卻 -7.54351
289
+ 書 -7.54409
290
+ 決 -7.54523
291
+ 並 -7.54696
292
+ 日 -7.54985
293
+ 您 -7.55274
294
+ 放 -7.5539
295
+ 持 -7.56322
296
+ 界 -7.5685
297
+ 商 -7.57145
298
+ 造 -7.57678
299
+ 喔 -7.57797
300
+ 哪 -7.58992
301
+ 號 -7.59173
302
+ 別 -7.59354
303
+ 科 -7.59959
304
+ 畫 -7.60629
305
+ 數 -7.61058
306
+ 醫 -7.61058
307
+ 海 -7.6155
308
+ 陸 -7.61921
309
+ 呃 -7.62293
310
+ 非 -7.62542
311
+ 術 -7.62792
312
+ 預 -7.63167
313
+ 此 -7.6323
314
+ 助 -7.63293
315
+ 頭 -7.63481
316
+ 治 -7.64112
317
+ 許 -7.64747
318
+ 收 -7.64811
319
+ 達 -7.64811
320
+ 己 -7.65002
321
+ 結 -7.6513
322
+ 張 -7.65194
323
+ 山 -7.65578
324
+ 舉 -7.65578
325
+ 處 -7.65578
326
+ 覺 -7.65836
327
+ 件 -7.65964
328
+ 量 -7.66029
329
+ 無 -7.66287
330
+ 感 -7.66741
331
+ 至 -7.67001
332
+ 節 -7.67721
333
+ 病 -7.67983
334
+ 信 -7.68842
335
+ 強 -7.68842
336
+ 南 -7.7065
337
+ 必 -7.70718
338
+ 車 -7.70785
339
+ 消 -7.70853
340
+ 完 -7.71057
341
+ 演 -7.71328
342
+ 單 -7.71464
343
+ 則 -7.71533
344
+ 路 -7.71533
345
+ 求 -7.71874
346
+ 縣 -7.72148
347
+ 馬 -7.72836
348
+ 向 -7.73251
349
+ 手 -7.73251
350
+ 幾 -7.73599
351
+ 檢 -7.73808
352
+ 樂 -7.74157
353
+ 請 -7.74437
354
+ 司 -7.75425
355
+ 研 -7.76637
356
+ 反 -7.76709
357
+ 布 -7.7714
358
+ 女 -7.78082
359
+ 未 -7.78155
360
+ 連 -7.78739
361
+ 規 -7.78812
362
+ 環 -7.78959
363
+ 傳 -7.7918
364
+ 使 -7.79401
365
+ 繼 -7.79401
366
+ 爭 -7.79474
367
+ 且 -7.79918
368
+ 算 -7.80066
369
+ 推 -7.80662
370
+ 難 -7.80662
371
+ 校 -7.81036
372
+ 友 -7.81186
373
+ 失 -7.81789
374
+ 真 -7.83005
375
+ 光 -7.83081
376
+ 指 -7.83618
377
+ 包 -7.83772
378
+ 勞 -7.84081
379
+ 世 -7.84158
380
+ 專 -7.84235
381
+ 財 -7.84313
382
+ 另 -7.84856
383
+ 濟 -7.85012
384
+ 設 -7.85012
385
+ 又 -7.85247
386
+ 健 -7.85325
387
+ 取 -7.85403
388
+ 流 -7.85481
389
+ 投 -7.85638
390
+ 知 -7.8611
391
+ 條 -7.86982
392
+ 打 -7.88021
393
+ 風 -7.88182
394
+ 空 -7.88909
395
+ 記 -7.88909
396
+ 邊 -7.88909
397
+ 參 -7.89072
398
+ 住 -7.89153
399
+ 何 -7.89234
400
+ 直 -7.89397
401
+ 口 -7.89724
402
+ 況 -7.89724
403
+ 太 -7.89806
404
+ 雖 -7.89806
405
+ 西 -7.90051
406
+ 照 -7.91373
407
+ 告 -7.91539
408
+ 首 -7.9204
409
+ 運 -7.92796
410
+ 際 -7.92965
411
+ 晚 -7.93134
412
+ 率 -7.93303
413
+ 她 -7.93473
414
+ 引 -7.93643
415
+ 農 -7.93813
416
+ 容 -7.94411
417
+ 技 -7.94411
418
+ 聯 -7.94411
419
+ 協 -7.95271
420
+ 啦 -7.95444
421
+ 根 -7.95444
422
+ 項 -7.95444
423
+ 策 -7.95878
424
+ 底 -7.96839
425
+ 走 -7.96839
426
+ 越 -7.97368
427
+ 清 -7.9781
428
+ 藝 -7.97899
429
+ 質 -7.97899
430
+ 價 -7.98254
431
+ 親 -7.98611
432
+ 歡 -7.9879
433
+ 需 -7.99149
434
+ 官 -7.99238
435
+ 施 -7.99328
436
+ 響 -7.99418
437
+ 色 -8.00323
438
+ 見 -8.00505
439
+ 透 -8.01053
440
+ 往 -8.0142
441
+ 據 -8.0142
442
+ 論 -8.02158
443
+ 門 -8.02622
444
+ 組 -8.02809
445
+ 講 -8.03463
446
+ 義 -8.03557
447
+ 軍 -8.03557
448
+ 例 -8.03651
449
+ 園 -8.03745
450
+ 營 -8.03933
451
+ 境 -8.04216
452
+ 戰 -8.04311
453
+ 態 -8.045
454
+ 轉 -8.05453
455
+ 星 -8.0574
456
+ 象 -8.05836
457
+ 須 -8.06029
458
+ 創 -8.06125
459
+ 源 -8.06899
460
+ 障 -8.06899
461
+ 花 -8.07483
462
+ 朋 -8.07874
463
+ 驗 -8.07972
464
+ 防 -8.08267
465
+ 怎 -8.08563
466
+ 億 -8.0876
467
+ 票 -8.08958
468
+ 署 -8.09157
469
+ 確 -8.10557
470
+ 英 -8.10759
471
+ 擔 -8.11467
472
+ 步 -8.11467
473
+ 李 -8.11671
474
+ 職 -8.12284
475
+ 低 -8.13107
476
+ 嚴 -8.13314
477
+ 零 -8.13521
478
+ 修 -8.13729
479
+ 究 -8.1425
480
+ 落 -8.1425
481
+ 息 -8.14564
482
+ 音 -8.15195
483
+ 領 -8.15195
484
+ 孩 -8.153
485
+ 錢 -8.153
486
+ 服 -8.16575
487
+ 客 -8.17543
488
+ 標 -8.17651
489
+ 福 -8.18628
490
+ 察 -8.18738
491
+ 支 -8.18847
492
+ 護 -8.18847
493
+ 雄 -8.18847
494
+ 兒 -8.19837
495
+ 華 -8.20058
496
+ 藥 -8.20168
497
+ 黃 -8.20724
498
+ 療 -8.20948
499
+ 土 -8.22071
500
+ 增 -8.22071
501
+ 限 -8.22297
502
+ 蘭 -8.22411
503
+ 約 -8.22524
504
+ 系 -8.23093
505
+ 岸 -8.23551
506
+ 廠 -8.24126
507
+ 形 -8.24126
508
+ 快 -8.24473
509
+ 熱 -8.24821
510
+ 證 -8.24821
511
+ 景 -8.25286
512
+ 具 -8.25403
513
+ 半 -8.25403
514
+ 供 -8.25989
515
+ 效 -8.2646
516
+ 深 -8.26933
517
+ 興 -8.26933
518
+ 衛 -8.26933
519
+ 士 -8.27528
520
+ 類 -8.27528
521
+ 早 -8.27887
522
+ 準 -8.28007
523
+ 格 -8.28127
524
+ 警 -8.28367
525
+ 顯 -8.28367
526
+ 即 -8.28487
527
+ 訴 -8.28487
528
+ 球 -8.28608
529
+ 隊 -8.28608
530
+ 談 -8.28729
531
+ 滿 -8.29092
532
+ 討 -8.29578
533
+ 訊 -8.297
534
+ 備 -8.30557
535
+ 愛 -8.30803
536
+ 曾 -8.31298
537
+ 習 -8.31546
538
+ 款 -8.31919
539
+ 野 -8.32294
540
+ 負 -8.32545
541
+ 遊 -8.3267
542
+ 剛 -8.32796
543
+ 審 -8.33553
544
+ 料 -8.3368
545
+ 幫 -8.33807
546
+ 嘛 -8.34062
547
+ 配 -8.34062
548
+ 迎 -8.34317
549
+ 補 -8.34445
550
+ 線 -8.34701
551
+ 列 -8.35215
552
+ 假 -8.35344
553
+ 段 -8.35344
554
+ 注 -8.35473
555
+ 竹 -8.35473
556
+ 狀 -8.35732
557
+ 片 -8.35861
558
+ 宣 -8.36121
559
+ 降 -8.36381
560
+ 勢 -8.36512
561
+ 鄉 -8.36773
562
+ 排 -8.37167
563
+ 課 -8.3743
564
+ 減 -8.37694
565
+ 律 -8.37826
566
+ 養 -8.38223
567
+ 居 -8.38356
568
+ 害 -8.38756
569
+ 維 -8.38756
570
+ 識 -8.38756
571
+ 易 -8.3889
572
+ 午 -8.3983
573
+ 試 -8.39965
574
+ 積 -8.40779
575
+ 米 -8.40779
576
+ 免 -8.40915
577
+ 紀 -8.40915
578
+ 型 -8.41052
579
+ 欸 -8.41052
580
+ 針 -8.41052
581
+ 歷 -8.41189
582
+ 謝 -8.416
583
+ 聲 -8.42151
584
+ 獎 -8.42289
585
+ 站 -8.42289
586
+ 歲 -8.42427
587
+ 評 -8.42566
588
+ 找 -8.42983
589
+ 臨 -8.42983
590
+ 族 -8.43542
591
+ 功 -8.43822
592
+ 副 -8.44386
593
+ 括 -8.44386
594
+ 拿 -8.44527
595
+ 集 -8.44527
596
+ 留 -8.4481
597
+ 念 -8.45094
598
+ 油 -8.45094
599
+ 登 -8.45237
600
+ 超 -8.45237
601
+ 止 -8.45522
602
+ 願 -8.45522
603
+ 執 -8.45808
604
+ 群 -8.45951
605
+ 溫 -8.46239
606
+ 份 -8.46383
607
+ 拉 -8.46383
608
+ 網 -8.46383
609
+ 責 -8.46671
610
+ 酒 -8.46671
611
+ 語 -8.46961
612
+ 編 -8.47251
613
+ 依 -8.47688
614
+ 銀 -8.48421
615
+ 劇 -8.48715
616
+ 神 -8.48715
617
+ 測 -8.48863
618
+ 待 -8.49011
619
+ 概 -8.49159
620
+ 般 -8.49753
621
+ 航 -8.50501
622
+ 週 -8.50501
623
+ 細 -8.5171
624
+ 吃 -8.51862
625
+ 製 -8.52014
626
+ 買 -8.52014
627
+ 雨 -8.52472
628
+ 命 -8.52626
629
+ 聽 -8.52626
630
+ 獲 -8.52779
631
+ 差 -8.53087
632
+ 彩 -8.53241
633
+ 媽 -8.5355
634
+ 舞 -8.5355
635
+ 係 -8.5386
636
+ 存 -8.54016
637
+ 離 -8.54171
638
+ 益 -8.54327
639
+ 便 -8.5464
640
+ 媒 -8.54953
641
+ 層 -8.54953
642
+ 言 -8.54953
643
+ 班 -8.5511
644
+ 疑 -8.5511
645
+ 精 -8.5511
646
+ 甚 -8.55583
647
+ 席 -8.55899
648
+ 死 -8.55899
649
+ 斷 -8.56217
650
+ 河 -8.56535
651
+ 升 -8.56695
652
+ 角 -8.56855
653
+ 屬 -8.57015
654
+ 染 -8.57015
655
+ 游 -8.57015
656
+ 斯 -8.57658
657
+ 核 -8.57981
658
+ 革 -8.58305
659
+ 朝 -8.58793
660
+ 史 -8.59448
661
+ 值 -8.59612
662
+ 級 -8.59942
663
+ 停 -8.61104
664
+ 葉 -8.61104
665
+ 腦 -8.62279
666
+ 白 -8.62448
667
+ 印 -8.62618
668
+ 塊 -8.62618
669
+ 抗 -8.62618
670
+ 島 -8.62787
671
+ 館 -8.62957
672
+ 版 -8.63128
673
+ 短 -8.63128
674
+ 蘇 -8.63298
675
+ 輕 -8.64327
676
+ 顧 -8.64673
677
+ 申 -8.64846
678
+ 足 -8.65019
679
+ 險 -8.65019
680
+ 判 -8.65193
681
+ 企 -8.65367
682
+ 擊 -8.65541
683
+ 善 -8.65716
684
+ 付 -8.66066
685
+ 賣 -8.66066
686
+ 啟 -8.66242
687
+ 復 -8.66242
688
+ 王 -8.66242
689
+ 食 -8.66242
690
+ 休 -8.66418
691
+ 極 -8.67125
692
+ 壓 -8.6748
693
+ 央 -8.67658
694
+ 退 -8.68194
695
+ 送 -8.68194
696
+ 困 -8.68373
697
+ 競 -8.68553
698
+ 阿 -8.68553
699
+ 字 -8.68913
700
+ 錯 -8.68913
701
+ 毒 -8.69094
702
+ 監 -8.69456
703
+ 讀 -8.6982
704
+ 隆 -8.70368
705
+ 救 -8.70551
706
+ 廣 -8.70735
707
+ 錄 -8.70735
708
+ 否 -8.70919
709
+ 呼 -8.70919
710
+ 綜 -8.71288
711
+ 卡 -8.71473
712
+ 峰 -8.71473
713
+ 換 -8.71473
714
+ 洲 -8.71658
715
+ 庭 -8.7203
716
+ 遭 -8.72778
717
+ 石 -8.72966
718
+ 稍 -8.72966
719
+ 羅 -8.73531
720
+ 戲 -8.7391
721
+ 眼 -8.741
722
+ 遠 -8.741
723
+ 額 -8.74863
724
+ 優 -8.75055
725
+ 破 -8.75055
726
+ 礙 -8.75632
727
+ 雙 -8.75632
728
+ 郭 -8.76213
729
+ 溪 -8.76602
730
+ 稅 -8.76602
731
+ 青 -8.76993
732
+ 構 -8.77189
733
+ 榮 -8.77385
734
+ 隨 -8.77385
735
+ 患 -8.77582
736
+ 批 -8.77779
737
+ 童 -8.77779
738
+ 亞 -8.77976
739
+ 里 -8.77976
740
+ 器 -8.78174
741
+ 承 -8.78372
742
+ 銷 -8.78372
743
+ 拜 -8.78571
744
+ 介 -8.7877
745
+ 禮 -8.78969
746
+ 港 -8.79169
747
+ 火 -8.79169
748
+ 喜 -8.79369
749
+ 庫 -8.79369
750
+ 絕 -8.79369
751
+ 刑 -8.79771
752
+ 吸 -8.79771
753
+ 志 -8.79771
754
+ 初 -8.80376
755
+ 缺 -8.80579
756
+ 德 -8.80782
757
+ 波 -8.80782
758
+ 切 -8.82215
759
+ 古 -8.82215
760
+ 宗 -8.82215
761
+ 適 -8.82215
762
+ 災 -8.82628
763
+ 估 -8.83251
764
+ 速 -8.8346
765
+ 慧 -8.83669
766
+ 終 -8.83669
767
+ 克 -8.8472
768
+ 旅 -8.8472
769
+ 扁 -8.84932
770
+ 戶 -8.85144
771
+ 謂 -8.85144
772
+ 宜 -8.8557
773
+ 傷 -8.86211
774
+ 座 -8.86426
775
+ 籍 -8.86642
776
+ 劃 -8.87074
777
+ 召 -8.8729
778
+ 範 -8.8729
779
+ 恐 -8.87507
780
+ 飛 -8.87943
781
+ 康 -8.88162
782
+ 獨 -8.88381
783
+ 令 -8.89041
784
+ 富 -8.89041
785
+ 血 -8.89041
786
+ 密 -8.89928
787
+ 裝 -8.90823
788
+ 織 -8.91047
789
+ 致 -8.91047
790
+ 乎 -8.91499
791
+ 村 -8.91953
792
+ 充 -8.9218
793
+ 掉 -8.92408
794
+ 暫 -8.92408
795
+ 慢 -8.93095
796
+ 置 -8.93326
797
+ 房 -8.93787
798
+ 互 -8.94019
799
+ 擇 -8.94019
800
+ 昨 -8.94019
801
+ 故 -8.94484
802
+ 母 -8.94484
803
+ 刻 -8.94952
804
+ 父 -8.94952
805
+ 廢 -8.95421
806
+ 延 -8.95421
807
+ 歌 -8.95421
808
+ 俊 -8.95657
809
+ 汙 -8.95657
810
+ 釋 -8.95657
811
+ 材 -8.95893
812
+ 症 -8.96367
813
+ 街 -8.96367
814
+ 緊 -8.96605
815
+ 黑 -8.96605
816
+ 練 -8.96843
817
+ 洪 -8.97321
818
+ 珍 -8.97321
819
+ 融 -8.98285
820
+ 素 -8.98528
821
+ 思 -8.98771
822
+ 派 -8.99014
823
+ 避 -8.99014
824
+ 危 -8.99749
825
+ 股 -8.99995
826
+ 似 -9.00488
827
+ 移 -9.00488
828
+ 楚 -9.00736
829
+ 船 -9.00736
830
+ 雲 -9.00736
831
+ 努 -9.00985
832
+ 店 -9.00985
833
+ 祖 -9.01234
834
+ 模 -9.01484
835
+ 控 -9.01734
836
+ 突 -9.01734
837
+ 漲 -9.01985
838
+ 域 -9.02995
839
+ 江 -9.02995
840
+ 煌 -9.03249
841
+ 購 -9.03249
842
+ 冷 -9.03504
843
+ 婦 -9.03504
844
+ 賽 -9.03504
845
+ 嘉 -9.03759
846
+ 巴 -9.03759
847
+ 寫 -9.04272
848
+ 蕙 -9.04788
849
+ 久 -9.05046
850
+ 籲 -9.05306
851
+ 醒 -9.05306
852
+ 仍 -9.06088
853
+ 博 -9.06088
854
+ 吳 -9.0635
855
+ 圖 -9.0635
856
+ 訓 -9.0635
857
+ 播 -9.06613
858
+ 順 -9.06613
859
+ 劉 -9.06876
860
+ 盟 -9.07405
861
+ 牠 -9.07671
862
+ 紅 -9.07671
863
+ 娟 -9.07937
864
+ 券 -9.08472
865
+ 簡 -9.08741
866
+ 玉 -9.0901
867
+ 草 -9.0901
868
+ 守 -9.0955
869
+ 慶 -9.0955
870
+ 叫 -9.09822
871
+ 急 -9.09822
872
+ 盡 -9.1064
873
+ 違 -9.1064
874
+ 逐 -9.1119
875
+ 竟 -9.11466
876
+ 譯 -9.11466
877
+ 昆 -9.11742
878
+ 靜 -9.11742
879
+ 男 -9.1202
880
+ 訂 -9.1202
881
+ 鐵 -9.1202
882
+ 銘 -9.12298
883
+ 閣 -9.12577
884
+ 夫 -9.13137
885
+ 罪 -9.13137
886
+ 貨 -9.13137
887
+ 爾 -9.137
888
+ 錫 -9.13983
889
+ 湖 -9.14267
890
+ 衝 -9.14267
891
+ 怕 -9.14551
892
+ 套 -9.14837
893
+ 怡 -9.15123
894
+ 輔 -9.15123
895
+ 犯 -9.1541
896
+ 漸 -9.15697
897
+ 貸 -9.16276
898
+ 慮 -9.16566
899
+ 城 -9.16857
900
+ 嗎 -9.17442
901
+ 龍 -9.17735
902
+ 智 -9.1803
903
+ 室 -9.18325
904
+ 擴 -9.18919
905
+ 玩 -9.18919
906
+ 稱 -9.18919
907
+ 屆 -9.19516
908
+ 省 -9.19516
909
+ 烈 -9.19816
910
+ 罰 -9.19816
911
+ 香 -9.19816
912
+ 藉 -9.20117
913
+ 魚 -9.20721
914
+ 幅 -9.2194
915
+ 永 -9.2194
916
+ 措 -9.22248
917
+ 木 -9.22556
918
+ 紹 -9.22556
919
+ 複 -9.22556
920
+ 漁 -9.22865
921
+ 貿 -9.22865
922
+ 階 -9.22865
923
+ 奇 -9.23175
924
+ 微 -9.23486
925
+ 苦 -9.23486
926
+ 附 -9.23486
927
+ 右 -9.24111
928
+ 威 -9.24111
929
+ 爆 -9.24425
930
+ 左 -9.2474
931
+ 彈 -9.2474
932
+ 趕 -9.2474
933
+ 陽 -9.2474
934
+ 鼓 -9.2474
935
+ 夜 -9.25056
936
+ 追 -9.25056
937
+ 靠 -9.25056
938
+ 圍 -9.25691
939
+ 韓 -9.25691
940
+ 畢 -9.26651
941
+ 背 -9.26651
942
+ 尊 -9.26973
943
+ 幣 -9.26973
944
+ 歐 -9.26973
945
+ 紛 -9.26973
946
+ 賴 -9.26973
947
+ 典 -9.27296
948
+ 納 -9.27296
949
+ 哈 -9.2762
950
+ 隻 -9.27945
951
+ 餐 -9.27945
952
+ 勵 -9.28271
953
+ 堂 -9.28271
954
+ 拍 -9.28271
955
+ 揚 -9.28271
956
+ 季 -9.29257
957
+ 損 -9.29257
958
+ 良 -9.29257
959
+ 輪 -9.29257
960
+ 徐 -9.30586
961
+ 樓 -9.30586
962
+ 舊 -9.31257
963
+ 鏡 -9.31257
964
+ 植 -9.31594
965
+ 鎮 -9.31594
966
+ 筆 -9.31932
967
+ 簽 -9.31932
968
+ 豐 -9.31932
969
+ 倒 -9.32272
970
+ 蓮 -9.32272
971
+ 蠻 -9.32272
972
+ 春 -9.32613
973
+ 棄 -9.32613
974
+ 替 -9.33298
975
+ 秀 -9.33298
976
+ 誤 -9.33298
977
+ 普 -9.33642
978
+ 綠 -9.33987
979
+ 跑 -9.33987
980
+ 亡 -9.34334
981
+ 周 -9.34334
982
+ 答 -9.34334
983
+ 探 -9.34682
984
+ 貼 -9.34682
985
+ 京 -9.35031
986
+ 焦 -9.35031
987
+ 趣 -9.35031
988
+ 曉 -9.35381
989
+ 儘 -9.35733
990
+ 溝 -9.35733
991
+ 癌 -9.35733
992
+ 私 -9.35733
993
+ 腳 -9.35733
994
+ 柏 -9.36085
995
+ 欣 -9.36085
996
+ 痛 -9.36085
997
+ 圓 -9.36439
998
+ 樹 -9.36439
999
+ 糖 -9.36439
1000
+ 僱 -9.37151
1001
+ 售 -9.37509
1002
+ 幕 -9.37509
1003
+ 貴 -9.38228
1004
+ 寶 -9.3859
1005
+ 潮 -9.3859
1006
+ 激 -9.3859
1007
+ 董 -9.3859
1008
+ 均 -9.38953
1009
+ 桃 -9.38953
1010
+ 束 -9.39317
1011
+ 弱 -9.39683
1012
+ 涉 -9.39683
1013
+ 陣 -9.39683
1014
+ 尼 -9.4005
1015
+ 穩 -9.4005
1016
+ 禁 -9.40418
1017
+ 芳 -9.40788
1018
+ 遺 -9.41531
1019
+ 堅 -9.41905
1020
+ 邀 -9.41905
1021
+ 亮 -9.4228
1022
+ 攻 -9.42657
1023
+ 諾 -9.42657
1024
+ 余 -9.43035
1025
+ 唱 -9.43415
1026
+ 序 -9.44178
1027
+ 懷 -9.44178
1028
+ 餘 -9.44178
1029
+ 射 -9.44562
1030
+ 藏 -9.44562
1031
+ 償 -9.44947
1032
+ 雜 -9.45334
1033
+ 挑 -9.45722
1034
+ 曲 -9.45722
1035
+ 暴 -9.46112
1036
+ 碼 -9.46112
1037
+ 聰 -9.46112
1038
+ 尋 -9.46897
1039
+ 泰 -9.46897
1040
+ 符 -9.46897
1041
+ 賞 -9.46897
1042
+ 享 -9.47291
1043
+ 偏 -9.47687
1044
+ 忙 -9.48085
1045
+ 奈 -9.48484
1046
+ 昌 -9.48484
1047
+ 疾 -9.48484
1048
+ 築 -9.48885
1049
+ 味 -9.49287
1050
+ 徵 -9.49287
1051
+ 緩 -9.49287
1052
+ 洋 -9.49691
1053
+ 疫 -9.49691
1054
+ 皮 -9.49691
1055
+ 援 -9.50504
1056
+ 浪 -9.50913
1057
+ 宏 -9.51324
1058
+ 遇 -9.51324
1059
+ 呈 -9.51736
1060
+ 憲 -9.51736
1061
+ 秘 -9.51736
1062
+ 薪 -9.51736
1063
+ 培 -9.5215
1064
+ 索 -9.5215
1065
+ 軟 -9.5215
1066
+ 幹 -9.52566
1067
+ 攝 -9.52566
1068
+ 爸 -9.52566
1069
+ 績 -9.52984
1070
+ 肯 -9.52984
1071
+ 壞 -9.53403
1072
+ 掌 -9.53403
1073
+ 揮 -9.53403
1074
+ 抽 -9.53824
1075
+ 耀 -9.53824
1076
+ 賠 -9.53824
1077
+ 按 -9.54247
1078
+ 武 -9.54247
1079
+ 診 -9.54247
1080
+ 麗 -9.55098
1081
+ 仁 -9.55526
1082
+ 尤 -9.55526
1083
+ 屋 -9.55526
1084
+ 捷 -9.55526
1085
+ 燈 -9.55526
1086
+ 誰 -9.55526
1087
+ 玲 -9.55956
1088
+ 盛 -9.56388
1089
+ 藍 -9.56388
1090
+ 繳 -9.56822
1091
+ 尺 -9.57258
1092
+ 惠 -9.57258
1093
+ 捐 -9.57258
1094
+ 裁 -9.57695
1095
+ 擁 -9.58576
1096
+ 籌 -9.58576
1097
+ 胞 -9.58576
1098
+ 晶 -9.5902
1099
+ 異 -9.5902
1100
+ 振 -9.59913
1101
+ 齡 -9.59913
1102
+ 慈 -9.60362
1103
+ 彭 -9.60814
1104
+ 架 -9.60814
1105
+ 澎 -9.60814
1106
+ 蓋 -9.60814
1107
+ 距 -9.60814
1108
+ 麟 -9.61267
1109
+ 川 -9.61723
1110
+ 末 -9.61723
1111
+ 聚 -9.61723
1112
+ 賄 -9.62181
1113
+ 鄭 -9.62181
1114
+ 屏 -9.6264
1115
+ 牌 -9.6264
1116
+ 詳 -9.6264
1117
+ 宮 -9.63102
1118
+ 觸 -9.63102
1119
+ 震 -9.63102
1120
+ 乾 -9.63566
1121
+ 勒 -9.63566
1122
+ 殘 -9.63566
1123
+ 邱 -9.64032
1124
+ 固 -9.64501
1125
+ 雅 -9.64501
1126
+ 夏 -9.65444
1127
+ 雕 -9.65919
1128
+ 露 -9.65919
1129
+ 伊 -9.66397
1130
+ 諭 -9.66397
1131
+ 鐘 -9.66397
1132
+ 唯 -9.67842
1133
+ 促 -9.68329
1134
+ 戒 -9.68329
1135
+ 蔡 -9.68329
1136
+ 坐 -9.68818
1137
+ 婚 -9.68818
1138
+ 授 -9.68818
1139
+ 略 -9.68818
1140
+ 紙 -9.68818
1141
+ 覽 -9.68818
1142
+ 辛 -9.68818
1143
+ 汝 -9.69309
1144
+ 颱 -9.69309
1145
+ 靈 -9.69803
1146
+ 佳 -9.703
1147
+ 搶 -9.713
1148
+ 洗 -9.713
1149
+ 炸 -9.713
1150
+ 穿 -9.713
1151
+ 雪 -9.713
1152
+ 招 -9.71803
1153
+ 殺 -9.71803
1154
+ 偵 -9.7231
1155
+ 掃 -9.7231
1156
+ 板 -9.7231
1157
+ 滋 -9.7231
1158
+ 僅 -9.72819
1159
+ 握 -9.72819
1160
+ 祝 -9.72819
1161
+ 章 -9.72819
1162
+ 累 -9.72819
1163
+ 覆 -9.72819
1164
+ 詢 -9.72819
1165
+ 恆 -9.7333
1166
+ 津 -9.73844
1167
+ 輸 -9.73844
1168
+ 旦 -9.74361
1169
+ 森 -9.74361
1170
+ 橋 -9.74361
1171
+ 縮 -9.7488
1172
+ 迫 -9.7488
1173
+ 鬆 -9.7488
1174
+ 句 -9.75403
1175
+ 塑 -9.75403
1176
+ 沙 -9.75403
1177
+ 跨 -9.75403
1178
+ 迷 -9.75403
1179
+ 吧 -9.75928
1180
+ 頻 -9.75928
1181
+ 侵 -9.76455
1182
+ 瑤 -9.76455
1183
+ 亂 -9.76986
1184
+ 坤 -9.76986
1185
+ 敢 -9.76986
1186
+ 殊 -9.76986
1187
+ 泉 -9.76986
1188
+ 怖 -9.77519
1189
+ 漫 -9.78055
1190
+ 鍵 -9.78055
1191
+ 哥 -9.78594
1192
+ 飲 -9.78594
1193
+ 坦 -9.79136
1194
+ 某 -9.79681
1195
+ 炎 -9.79681
1196
+ 散 -9.80229
1197
+ 途 -9.80229
1198
+ 勝 -9.8078
1199
+ 托 -9.8078
1200
+ 擾 -9.8078
1201
+ 菸 -9.8078
1202
+ 鬥 -9.8078
1203
+ 折 -9.81334
1204
+ 搭 -9.81334
1205
+ 盲 -9.81334
1206
+ 歸 -9.82452
1207
+ 純 -9.82452
1208
+ 肉 -9.82452
1209
+ 遷 -9.83015
1210
+ 雷 -9.83015
1211
+ 剩 -9.83582
1212
+ 獻 -9.83582
1213
+ 佔 -9.84151
1214
+ 債 -9.84151
1215
+ 煙 -9.84151
1216
+ 蹈 -9.84151
1217
+ 恢 -9.84724
1218
+ 既 -9.85301
1219
+ 熟 -9.85301
1220
+ 袁 -9.85301
1221
+ 圾 -9.85881
1222
+ 垃 -9.85881
1223
+ 慣 -9.85881
1224
+ 蔣 -9.85881
1225
+ 蟲 -9.85881
1226
+ 譬 -9.85881
1227
+ 趨 -9.85881
1228
+ 寬 -9.86464
1229
+ 泛 -9.86464
1230
+ 菜 -9.8705
1231
+ 訟 -9.8705
1232
+ 遍 -9.8705
1233
+ 扣 -9.8764
1234
+ 抵 -9.8764
1235
+ 澤 -9.8764
1236
+ 盤 -9.8764
1237
+ 龔 -9.8764
1238
+ 隔 -9.88234
1239
+ 榜 -9.88831
1240
+ 瑞 -9.88831
1241
+ 含 -9.89431
1242
+ 奏 -9.89431
1243
+ 擬 -9.89431
1244
+ 田 -9.89431
1245
+ 乏 -9.90035
1246
+ 倍 -9.90035
1247
+ 聖 -9.90035
1248
+ 跳 -9.90035
1249
+ 州 -9.90643
1250
+ 淡 -9.90643
1251
+ 飯 -9.90643
1252
+ 串 -9.91255
1253
+ 寒 -9.91255
1254
+ 幸 -9.91255
1255
+ 幼 -9.91255
1256
+ 惡 -9.91255
1257
+ 潛 -9.91255
1258
+ 析 -9.9187
1259
+ 狗 -9.9187
1260
+ 硬 -9.9187
1261
+ 端 -9.9187
1262
+ 苗 -9.9187
1263
+ 徒 -9.9249
1264
+ 託 -9.9249
1265
+ 貫 -9.9249
1266
+ 擺 -9.93113
1267
+ 閒 -9.93113
1268
+ 宋 -9.9374
1269
+ 脫 -9.9374
1270
+ 賓 -9.9374
1271
+ 儀 -9.9437
1272
+ 珊 -9.9437
1273
+ 督 -9.9437
1274
+ 肥 -9.9437
1275
+ 憂 -9.95005
1276
+ 賺 -9.95005
1277
+ 予 -9.95644
1278
+ 巡 -9.95644
1279
+ 昇 -9.95644
1280
+ 罹 -9.96288
1281
+ 駐 -9.96288
1282
+ 敗 -9.96935
1283
+ 顏 -9.96935
1284
+ 併 -9.97586
1285
+ 冬 -9.97586
1286
+ 誠 -9.97586
1287
+ 閱 -9.97586
1288
+ 拆 -9.98242
1289
+ 燒 -9.98242
1290
+ 茶 -9.98242
1291
+ 袋 -9.98242
1292
+ 骨 -9.98242
1293
+ 麻 -9.98242
1294
+ 旁 -9.98902
1295
+ 獄 -9.98902
1296
+ 呂 -9.99566
1297
+ 哲 -9.99566
1298
+ 描 -9.99566
1299
+ 操 -10.0024
1300
+ 敏 -10.0024
1301
+ 朗 -10.0024
1302
+ 牙 -10.0024
1303
+ 床 -10.0091
1304
+ 君 -10.0159
1305
+ 呀 -10.0159
1306
+ 憶 -10.0159
1307
+ 衣 -10.0159
1308
+ 誌 -10.0227
1309
+ 隱 -10.0227
1310
+ 勇 -10.0296
1311
+ 嫌 -10.0296
1312
+ 拒 -10.0296
1313
+ 汽 -10.0296
1314
+ 偷 -10.0365
1315
+ 誕 -10.0365
1316
+ 輝 -10.0365
1317
+ 鮮 -10.0365
1318
+ 淨 -10.0435
1319
+ 疏 -10.0435
1320
+ 菁 -10.0435
1321
+ 阻 -10.0435
1322
+ 匯 -10.0505
1323
+ 廷 -10.0505
1324
+ 欠 -10.0505
1325
+ 瓶 -10.0505
1326
+ 衡 -10.0505
1327
+ 倫 -10.0575
1328
+ 封 -10.0575
1329
+ 翻 -10.0575
1330
+ 耶 -10.0575
1331
+ 默 -10.0575
1332
+ 丁 -10.0647
1333
+ 契 -10.0647
1334
+ 帳 -10.0647
1335
+ 楊 -10.0647
1336
+ 盜 -10.0647
1337
+ 繁 -10.0647
1338
+ 胡 -10.0647
1339
+ 塞 -10.0718
1340
+ 忽 -10.0718
1341
+ 貝 -10.0718
1342
+ 夢 -10.079
1343
+ 搜 -10.079
1344
+ 梅 -10.079
1345
+ 沈 -10.079
1346
+ 辭 -10.0863
1347
+ 壇 -10.0936
1348
+ 摩 -10.0936
1349
+ 晴 -10.0936
1350
+ 汗 -10.0936
1351
+ 贊 -10.0936
1352
+ 邦 -10.0936
1353
+ 驚 -10.0936
1354
+ 廳 -10.101
1355
+ 抓 -10.101
1356
+ 捕 -10.101
1357
+ 祭 -10.101
1358
+ 忘 -10.116
1359
+ 笑 -10.116
1360
+ 蛋 -10.116
1361
+ 逃 -10.116
1362
+ 刺 -10.1235
1363
+ 喝 -10.1235
1364
+ 扮 -10.1235
1365
+ 池 -10.1235
1366
+ 烏 -10.1235
1367
+ 牛 -10.1235
1368
+ 稻 -10.1235
1369
+ 贈 -10.1235
1370
+ 蹤 -10.1235
1371
+ 吋 -10.1311
1372
+ 暑 -10.1311
1373
+ 駕 -10.1311
1374
+ 俄 -10.1388
1375
+ 兼 -10.1388
1376
+ 爐 -10.1388
1377
+ 抱 -10.1465
1378
+ 棒 -10.1465
1379
+ 鬧 -10.1465
1380
+ 擠 -10.1543
1381
+ 忠 -10.1621
1382
+ 撤 -10.1621
1383
+ 濃 -10.1621
1384
+ 碰 -10.1621
1385
+ 辯 -10.1621
1386
+ 兆 -10.17
1387
+ 搖 -10.17
1388
+ 沿 -10.17
1389
+ 瑚 -10.17
1390
+ 裂 -10.17
1391
+ 膠 -10.178
1392
+ 菌 -10.178
1393
+ 遲 -10.178
1394
+ 乘 -10.186
1395
+ 劑 -10.186
1396
+ 晨 -10.186
1397
+ 浮 -10.186
1398
+ 礎 -10.186
1399
+ 毅 -10.1941
1400
+ 混 -10.1941
1401
+ 莉 -10.1941
1402
+ 鈔 -10.1941
1403
+ 濫 -10.2023
1404
+ 兵 -10.2105
1405
+ 懲 -10.2105
1406
+ 朱 -10.2105
1407
+ 瑪 -10.2105
1408
+ 莊 -10.2105
1409
+ 陪 -10.2105
1410
+ 鳥 -10.2105
1411
+ 嬰 -10.2188
1412
+ 鋒 -10.2188
1413
+ 鎖 -10.2188
1414
+ 頓 -10.2188
1415
+ 冒 -10.2272
1416
+ 脅 -10.2272
1417
+ 顆 -10.2272
1418
+ 伯 -10.2356
1419
+ 怪 -10.2356
1420
+ 脈 -10.2356
1421
+ 蹟 -10.2356
1422
+ 吉 -10.2441
1423
+ 填 -10.2441
1424
+ 映 -10.2441
1425
+ 貌 -10.2441
1426
+ 販 -10.2441
1427
+ 魯 -10.2441
1428
+ 厚 -10.2527
1429
+ 嘿 -10.2527
1430
+ 巨 -10.2527
1431
+ 撥 -10.2527
1432
+ 牽 -10.2527
1433
+ 租 -10.2527
1434
+ 艦 -10.2527
1435
+ 輩 -10.2527
1436
+ 佈 -10.2614
1437
+ 嶼 -10.2614
1438
+ 毛 -10.2614
1439
+ 琴 -10.2614
1440
+ 蚊 -10.2614
1441
+ 鋼 -10.2614
1442
+ 盈 -10.2701
1443
+ 聘 -10.2701
1444
+ 臉 -10.2701
1445
+ 返 -10.2701
1446
+ 坡 -10.2789
1447
+ 堆 -10.2789
1448
+ 奶 -10.2789
1449
+ 娘 -10.2789
1450
+ 役 -10.2789
1451
+ 荷 -10.2789
1452
+ 袖 -10.2789
1453
+ 載 -10.2789
1454
+ 宿 -10.2878
1455
+ 巧 -10.2878
1456
+ 拼 -10.2878
1457
+ 旗 -10.2878
1458
+ 洞 -10.2878
1459
+ 胎 -10.2878
1460
+ 臟 -10.2878
1461
+ 錦 -10.2878
1462
+ 姓 -10.2968
1463
+ 懂 -10.2968
1464
+ 掛 -10.2968
1465
+ 氛 -10.2968
1466
+ 澳 -10.2968
1467
+ 瞭 -10.2968
1468
+ 紐 -10.3058
1469
+ 腸 -10.3058
1470
+ 膽 -10.3058
1471
+ 贏 -10.3058
1472
+ 禎 -10.315
1473
+ 糾 -10.315
1474
+ 肝 -10.315
1475
+ 虧 -10.315
1476
+ 詞 -10.315
1477
+ 豪 -10.315
1478
+ 渡 -10.3242
1479
+ 踏 -10.3242
1480
+ 佛 -10.3335
1481
+ 占 -10.3335
1482
+ 弊 -10.3335
1483
+ 跡 -10.3335
1484
+ 攤 -10.3429
1485
+ 漢 -10.3429
1486
+ 輻 -10.3429
1487
+ 刪 -10.3523
1488
+ 敬 -10.3523
1489
+ 湯 -10.3523
1490
+ 澄 -10.3523
1491
+ 秋 -10.3523
1492
+ 耕 -10.3523
1493
+ 鄰 -10.3523
1494
+ 塔 -10.3619
1495
+ 漏 -10.3619
1496
+ 粉 -10.3619
1497
+ 伴 -10.3716
1498
+ 冰 -10.3716
1499
+ 姐 -10.3716
1500
+ 戴 -10.3716
1501
+ 斑 -10.3716
1502
+ 耳 -10.3716
1503
+ 跌 -10.3716
1504
+ 龐 -10.3716
1505
+ 弟 -10.3813
1506
+ 徹 -10.3813
1507
+ 殖 -10.3813
1508
+ 洛 -10.3813
1509
+ 窮 -10.3813
1510
+ 賀 -10.3813
1511
+ 偶 -10.3912
1512
+ 募 -10.3912
1513
+ 斤 -10.3912
1514
+ 松 -10.3912
1515
+ 氏 -10.3912
1516
+ 頒 -10.3912
1517
+ 咖 -10.4011
1518
+ 滑 -10.4011
1519
+ 甲 -10.4011
1520
+ 鯨 -10.4011
1521
+ 鳳 -10.4011
1522
+ 歉 -10.4112
1523
+ 泥 -10.4112
1524
+ 瑜 -10.4112
1525
+ 閉 -10.4112
1526
+ 齣 -10.4112
1527
+ 准 -10.4213
1528
+ 喊 -10.4213
1529
+ 夕 -10.4213
1530
+ 暖 -10.4213
1531
+ 詩 -10.4213
1532
+ 乳 -10.4316
1533
+ 噴 -10.4316
1534
+ 壽 -10.4316
1535
+ 彼 -10.4316
1536
+ 槍 -10.4316
1537
+ 謀 -10.4316
1538
+ 郵 -10.4316
1539
+ 雞 -10.4316
1540
+ 瓦 -10.442
1541
+ 述 -10.442
1542
+ 鑑 -10.442
1543
+ 頂 -10.442
1544
+ 仔 -10.463
1545
+ 孟 -10.463
1546
+ 桌 -10.463
1547
+ 漂 -10.463
1548
+ 谷 -10.463
1549
+ 啡 -10.4737
1550
+ 墾 -10.4737
1551
+ 宅 -10.4737
1552
+ 拖 -10.4737
1553
+ 歧 -10.4737
1554
+ 絲 -10.4737
1555
+ 蒙 -10.4737
1556
+ 軌 -10.4737
1557
+ 迴 -10.4737
1558
+ 頃 -10.4737
1559
+ 干 -10.4845
1560
+ 搬 -10.4845
1561
+ 栗 -10.4845
1562
+ 豬 -10.4845
1563
+ 駛 -10.4845
1564
+ 齊 -10.4845
1565
+ 俗 -10.4954
1566
+ 忍 -10.4954
1567
+ 押 -10.4954
1568
+ 髮 -10.4954
1569
+ 梁 -10.5065
1570
+ 涵 -10.5065
1571
+ 睛 -10.5065
1572
+ 礁 -10.5065
1573
+ 荒 -10.5065
1574
+ 借 -10.5177
1575
+ 凍 -10.5177
1576
+ 慎 -10.5177
1577
+ 截 -10.5177
1578
+ 揭 -10.5177
1579
+ 旱 -10.5177
1580
+ 瓜 -10.5177
1581
+ 肢 -10.5177
1582
+ 蝶 -10.5177
1583
+ 圈 -10.529
1584
+ 廚 -10.529
1585
+ 泡 -10.529
1586
+ 滅 -10.529
1587
+ 若 -10.529
1588
+ 鼠 -10.529
1589
+ 丟 -10.5404
1590
+ 妹 -10.5404
1591
+ 寧 -10.5404
1592
+ 潔 -10.5404
1593
+ 繪 -10.5404
1594
+ 菲 -10.5404
1595
+ 陷 -10.5404
1596
+ 添 -10.552
1597
+ 罷 -10.552
1598
+ 遣 -10.552
1599
+ 仟 -10.5636
1600
+ 尿 -10.5636
1601
+ 炮 -10.5636
1602
+ 翠 -10.5636
1603
+ 膚 -10.5636
1604
+ 襲 -10.5636
1605
+ 貧 -10.5636
1606
+ 墨 -10.5755
1607
+ 奉 -10.5755
1608
+ 椅 -10.5755
1609
+ 榔 -10.5755
1610
+ 櫃 -10.5755
1611
+ 燃 -10.5755
1612
+ 牆 -10.5755
1613
+ 牧 -10.5755
1614
+ 絡 -10.5755
1615
+ 妨 -10.5875
1616
+ 孫 -10.5875
1617
+ 彰 -10.5875
1618
+ 扶 -10.5875
1619
+ 挖 -10.5875
1620
+ 曆 -10.5875
1621
+ 淹 -10.5875
1622
+ 緒 -10.5875
1623
+ 輛 -10.5875
1624
+ 闆 -10.5875
1625
+ 噸 -10.5996
1626
+ 毀 -10.5996
1627
+ 耐 -10.5996
1628
+ 伐 -10.6119
1629
+ 搞 -10.6119
1630
+ 檳 -10.6119
1631
+ 液 -10.6119
1632
+ 綁 -10.6119
1633
+ 龜 -10.6119
1634
+ 刷 -10.6243
1635
+ 珠 -10.6243
1636
+ 甘 -10.6243
1637
+ 莫 -10.6243
1638
+ 伸 -10.6369
1639
+ 勁 -10.6369
1640
+ 喪 -10.6369
1641
+ 妻 -10.6369
1642
+ 循 -10.6369
1643
+ 惜 -10.6369
1644
+ 賭 -10.6369
1645
+ 儲 -10.6496
1646
+ 窗 -10.6496
1647
+ 肌 -10.6496
1648
+ 虛 -10.6496
1649
+ 鍾 -10.6496
1650
+ 餅 -10.6496
1651
+ 傾 -10.6625
1652
+ 坑 -10.6625
1653
+ 廟 -10.6625
1654
+ 弄 -10.6625
1655
+ 梓 -10.6625
1656
+ 溼 -10.6625
1657
+ 甦 -10.6625
1658
+ 陰 -10.6625
1659
+ 霧 -10.6625
1660
+ 鴨 -10.6625
1661
+ 唸 -10.6756
1662
+ 悉 -10.6756
1663
+ 撈 -10.6756
1664
+ 暗 -10.6756
1665
+ 棟 -10.6756
1666
+ 淺 -10.6756
1667
+ 燕 -10.6756
1668
+ 紋 -10.6756
1669
+ 舒 -10.6756
1670
+ 刀 -10.6888
1671
+ 堤 -10.6888
1672
+ 尖 -10.6888
1673
+ 悲 -10.6888
1674
+ 憑 -10.6888
1675
+ 摸 -10.6888
1676
+ 洩 -10.6888
1677
+ 胖 -10.6888
1678
+ 兌 -10.7022
1679
+ 暢 -10.7022
1680
+ 檔 -10.7022
1681
+ 犬 -10.7022
1682
+ 箱 -10.7022
1683
+ 遵 -10.7022
1684
+ 仰 -10.7158
1685
+ 婆 -10.7158
1686
+ 沖 -10.7158
1687
+ 卸 -10.7296
1688
+ 寄 -10.7296
1689
+ 徽 -10.7296
1690
+ 敵 -10.7296
1691
+ 牲 -10.7296
1692
+ 菊 -10.7296
1693
+ 黎 -10.7296
1694
+ 凌 -10.7436
1695
+ 奮 -10.7436
1696
+ 泳 -10.7436
1697
+ 瘤 -10.7436
1698
+ 膜 -10.7436
1699
+ 騎 -10.7436
1700
+ 鼻 -10.7436
1701
+ 割 -10.7578
1702
+ 灰 -10.7578
1703
+ 煩 -10.7578
1704
+ 矚 -10.7578
1705
+ 締 -10.7578
1706
+ 賦 -10.7578
1707
+ 酸 -10.7578
1708
+ 冠 -10.7722
1709
+ 刊 -10.7722
1710
+ 勤 -10.7722
1711
+ 嚇 -10.7722
1712
+ 址 -10.7722
1713
+ 妳 -10.7722
1714
+ 孔 -10.7722
1715
+ 孝 -10.7722
1716
+ 尾 -10.7722
1717
+ 拔 -10.7722
1718
+ 昭 -10.7722
1719
+ 蒐 -10.7722
1720
+ 薦 -10.7722
1721
+ 衰 -10.7722
1722
+ 諮 -10.7722
1723
+ 趁 -10.7722
1724
+ 陶 -10.7722
1725
+ 怨 -10.7868
1726
+ 悠 -10.7868
1727
+ 柯 -10.7868
1728
+ 毫 -10.7868
1729
+ 睡 -10.7868
1730
+ 筋 -10.7868
1731
+ 貢 -10.7868
1732
+ 躍 -10.7868
1733
+ 邁 -10.7868
1734
+ 釐 -10.7868
1735
+ 飽 -10.7868
1736
+ 催 -10.8016
1737
+ 嘗 -10.8016
1738
+ 奧 -10.8016
1739
+ 娃 -10.8016
1740
+ 寮 -10.8016
1741
+ 皇 -10.8016
1742
+ 盧 -10.8016
1743
+ 礦 -10.8016
1744
+ 壁 -10.8166
1745
+ 杜 -10.8166
1746
+ 橫 -10.8166
1747
+ 氧 -10.8166
1748
+ 犧 -10.8166
1749
+ 伍 -10.8319
1750
+ 傍 -10.8319
1751
+ 庸 -10.8319
1752
+ 爺 -10.8319
1753
+ 番 -10.8319
1754
+ 舍 -10.8319
1755
+ 譽 -10.8319
1756
+ 遞 -10.8319
1757
+ 郎 -10.8319
1758
+ 鹽 -10.8319
1759
+ 濤 -10.8474
1760
+ 縱 -10.8474
1761
+ 誓 -10.8474
1762
+ 赴 -10.8474
1763
+ 丸 -10.8632
1764
+ 喘 -10.8632
1765
+ 妥 -10.8632
1766
+ 婕 -10.8632
1767
+ 愈 -10.8632
1768
+ 插 -10.8632
1769
+ 杯 -10.8632
1770
+ 桶 -10.8632
1771
+ 滾 -10.8632
1772
+ 紫 -10.8632
1773
+ 綾 -10.8632
1774
+ 脆 -10.8632
1775
+ 螢 -10.8632
1776
+ 貓 -10.8632
1777
+ 飾 -10.8632
1778
+ 麥 -10.8632
1779
+ 岩 -10.8792
1780
+ 枝 -10.8792
1781
+ 甸 -10.8792
1782
+ 聊 -10.8792
1783
+ 峽 -10.8954
1784
+ 斥 -10.8954
1785
+ 棲 -10.8954
1786
+ 滴 -10.8954
1787
+ 玫 -10.8954
1788
+ 玻 -10.8954
1789
+ 芝 -10.8954
1790
+ 芬 -10.8954
1791
+ 虎 -10.8954
1792
+ 辨 -10.8954
1793
+ 徑 -10.912
1794
+ 敦 -10.912
1795
+ 篩 -10.912
1796
+ 註 -10.912
1797
+ 輟 -10.912
1798
+ 騙 -10.912
1799
+ 埔 -10.9288
1800
+ 彎 -10.9288
1801
+ 慰 -10.9288
1802
+ 拓 -10.9288
1803
+ 焚 -10.9288
1804
+ 罕 -10.9288
1805
+ 耗 -10.9288
1806
+ 蓄 -10.9288
1807
+ 釀 -10.9288
1808
+ 駁 -10.9288
1809
+ 冊 -10.9459
1810
+ 塗 -10.9459
1811
+ 姚 -10.9459
1812
+ 孕 -10.9459
1813
+ 崇 -10.9459
1814
+ 撐 -10.9459
1815
+ 璃 -10.9459
1816
+ 癮 -10.9459
1817
+ 砍 -10.9459
1818
+ 繫 -10.9459
1819
+ 脂 -10.9459
1820
+ 豚 -10.9459
1821
+ 趟 -10.9459
1822
+ 鞋 -10.9459
1823
+ 偉 -10.9632
1824
+ 卑 -10.9632
1825
+ 卷 -10.9632
1826
+ 姝 -10.9632
1827
+ 憾 -10.9632
1828
+ 沉 -10.9632
1829
+ 祕 -10.9632
1830
+ 翡 -10.9632
1831
+ 葛 -10.9632
1832
+ 蕭 -10.9632
1833
+ 酵 -10.9632
1834
+ 丙 -10.9809
1835
+ 傑 -10.9809
1836
+ 姻 -10.9809
1837
+ 孤 -10.9809
1838
+ 擋 -10.9809
1839
+ 碟 -10.9809
1840
+ 緝 -10.9809
1841
+ 緣 -10.9809
1842
+ 緻 -10.9809
1843
+ 茂 -10.9809
1844
+ 僑 -10.999
1845
+ 壯 -10.999
1846
+ 撞 -10.999
1847
+ 旺 -10.999
1848
+ 凱 -11.0173
1849
+ 叛 -11.0173
1850
+ 慘 -11.0173
1851
+ 敲 -11.0173
1852
+ 椰 -11.0173
1853
+ 狂 -11.0173
1854
+ 腔 -11.0173
1855
+ 艱 -11.0173
1856
+ 仲 -11.036
1857
+ 刮 -11.036
1858
+ 剝 -11.036
1859
+ 哼 -11.036
1860
+ 淑 -11.036
1861
+ 潤 -11.036
1862
+ 盃 -11.036
1863
+ 粽 -11.036
1864
+ 逼 -11.036
1865
+ 頸 -11.036
1866
+ 騷 -11.036
1867
+ 丹 -11.0551
1868
+ 偽 -11.0551
1869
+ 吐 -11.0551
1870
+ 恩 -11.0551
1871
+ 涼 -11.0551
1872
+ 煮 -11.0551
1873
+ 熊 -11.0551
1874
+ 盼 -11.0551
1875
+ 眷 -11.0551
1876
+ 秉 -11.0551
1877
+ 綱 -11.0551
1878
+ 罵 -11.0551
1879
+ 趙 -11.0551
1880
+ 迪 -11.0551
1881
+ 遙 -11.0551
1882
+ 骸 -11.0551
1883
+ 黏 -11.0551
1884
+ 吹 -11.0745
1885
+ 呆 -11.0745
1886
+ 撫 -11.0745
1887
+ 攸 -11.0745
1888
+ 獵 -11.0745
1889
+ 盾 -11.0745
1890
+ 繞 -11.0745
1891
+ 芭 -11.0745
1892
+ 菇 -11.0745
1893
+ 萊 -11.0745
1894
+ 藻 -11.0745
1895
+ 誘 -11.0745
1896
+ 醇 -11.0745
1897
+ 勸 -11.0943
1898
+ 唐 -11.0943
1899
+ 喚 -11.0943
1900
+ 奔 -11.0943
1901
+ 奪 -11.0943
1902
+ 姦 -11.0943
1903
+ 擱 -11.0943
1904
+ 甄 -11.0943
1905
+ 痕 -11.0943
1906
+ 砂 -11.0943
1907
+ 脊 -11.0943
1908
+ 勳 -11.1145
1909
+ 匪 -11.1145
1910
+ 妙 -11.1145
1911
+ 尚 -11.1145
1912
+ 廁 -11.1145
1913
+ 慕 -11.1145
1914
+ 抬 -11.1145
1915
+ 棵 -11.1145
1916
+ 牢 -11.1145
1917
+ 盒 -11.1145
1918
+ 稚 -11.1145
1919
+ 腐 -11.1145
1920
+ 腰 -11.1145
1921
+ 踢 -11.1145
1922
+ 麵 -11.1145
1923
+ 傅 -11.1351
1924
+ 凡 -11.1351
1925
+ 凸 -11.1351
1926
+ 圳 -11.1351
1927
+ 坪 -11.1351
1928
+ 墓 -11.1351
1929
+ 壤 -11.1351
1930
+ 崩 -11.1351
1931
+ 攀 -11.1351
1932
+ 旬 -11.1351
1933
+ 甜 -11.1351
1934
+ 稽 -11.1351
1935
+ 肺 -11.1351
1936
+ 謹 -11.1351
1937
+ 逮 -11.1351
1938
+ 魔 -11.1351
1939
+ 冤 -11.1562
1940
+ 堡 -11.1562
1941
+ 巷 -11.1562
1942
+ 廖 -11.1562
1943
+ 挫 -11.1562
1944
+ 祥 -11.1562
1945
+ 籠 -11.1562
1946
+ 纖 -11.1562
1947
+ 翁 -11.1562
1948
+ 葬 -11.1562
1949
+ 貶 -11.1562
1950
+ 迅 -11.1562
1951
+ 鬼 -11.1562
1952
+ 側 -11.1777
1953
+ 宵 -11.1777
1954
+ 帝 -11.1777
1955
+ 幻 -11.1777
1956
+ 彌 -11.1777
1957
+ 椎 -11.1777
1958
+ 疼 -11.1777
1959
+ 碑 -11.1777
1960
+ 艘 -11.1777
1961
+ 蕾 -11.1777
1962
+ 鯊 -11.1777
1963
+ 鴻 -11.1777
1964
+ 削 -11.1996
1965
+ 吊 -11.1996
1966
+ 塵 -11.1996
1967
+ 幽 -11.1996
1968
+ 柴 -11.1996
1969
+ 瘦 -11.1996
1970
+ 腫 -11.1996
1971
+ 蝠 -11.1996
1972
+ 裔 -11.1996
1973
+ 采 -11.1996
1974
+ 銅 -11.1996
1975
+ 馨 -11.1996
1976
+ 剪 -11.2221
1977
+ 唉 -11.2221
1978
+ 埋 -11.2221
1979
+ 妝 -11.2221
1980
+ 姆 -11.2221
1981
+ 掩 -11.2221
1982
+ 揆 -11.2221
1983
+ 曼 -11.2221
1984
+ 梨 -11.2221
1985
+ 湧 -11.2221
1986
+ 灌 -11.2221
1987
+ 爬 -11.2221
1988
+ 磨 -11.2221
1989
+ 籤 -11.2221
1990
+ 蔬 -11.2221
1991
+ 允 -11.2451
1992
+ 凝 -11.2451
1993
+ 厲 -11.2451
1994
+ 呵 -11.2451
1995
+ 挺 -11.2451
1996
+ 檻 -11.2451
1997
+ 殼 -11.2451
1998
+ 璩 -11.2451
1999
+ 羊 -11.2451
2000
+ 鑽 -11.2451
2001
+ 僵 -11.2686
2002
+ 吵 -11.2686
2003
+ 哭 -11.2686
2004
+ 娛 -11.2686
2005
+ 婉 -11.2686
2006
+ 廉 -11.2686
2007
+ 琪 -11.2686
2008
+ 痺 -11.2686
2009
+ 稀 -11.2686
2010
+ 篇 -11.2686
2011
+ 薩 -11.2686
2012
+ 蝴 -11.2686
2013
+ 詮 -11.2686
2014
+ 謠 -11.2686
2015
+ 赫 -11.2686
2016
+ 躲 -11.2686
2017
+ 輯 -11.2686
2018
+ 銜 -11.2686
2019
+ 陌 -11.2686
2020
+ 函 -11.2927
2021
+ 埃 -11.2927
2022
+ 姿 -11.2927
2023
+ 峇 -11.2927
2024
+ 攜 -11.2927
2025
+ 濁 -11.2927
2026
+ 琳 -11.2927
2027
+ 瑯 -11.2927
2028
+ 稿 -11.2927
2029
+ 竊 -11.2927
2030
+ 糕 -11.2927
2031
+ 蔓 -11.2927
2032
+ 薄 -11.2927
2033
+ 謨 -11.2927
2034
+ 霞 -11.2927
2035
+ 飄 -11.2927
2036
+ 騰 -11.2927
2037
+ 卓 -11.3174
2038
+ 抨 -11.3174
2039
+ 旋 -11.3174
2040
+ 盆 -11.3174
2041
+ 秩 -11.3174
2042
+ 糊 -11.3174
2043
+ 腹 -11.3174
2044
+ 莎 -11.3174
2045
+ 萍 -11.3174
2046
+ 蛙 -11.3174
2047
+ 詐 -11.3174
2048
+ 賢 -11.3174
2049
+ 醉 -11.3174
2050
+ 鵝 -11.3174
2051
+ 寵 -11.3427
2052
+ 抹 -11.3427
2053
+ 挪 -11.3427
2054
+ 擦 -11.3427
2055
+ 曬 -11.3427
2056
+ 栽 -11.3427
2057
+ 梯 -11.3427
2058
+ 濱 -11.3427
2059
+ 灑 -11.3427
2060
+ 町 -11.3427
2061
+ 盪 -11.3427
2062
+ 粹 -11.3427
2063
+ 紓 -11.3427
2064
+ 翔 -11.3427
2065
+ 茜 -11.3427
2066
+ 萎 -11.3427
2067
+ 蝙 -11.3427
2068
+ 餵 -11.3427
2069
+ 僚 -11.3687
2070
+ 勘 -11.3687
2071
+ 咬 -11.3687
2072
+ 喀 -11.3687
2073
+ 嘴 -11.3687
2074
+ 夥 -11.3687
2075
+ 恰 -11.3687
2076
+ 懼 -11.3687
2077
+ 拋 -11.3687
2078
+ 挽 -11.3687
2079
+ 枯 -11.3687
2080
+ 汰 -11.3687
2081
+ 滯 -11.3687
2082
+ 爛 -11.3687
2083
+ 猩 -11.3687
2084
+ 琛 -11.3687
2085
+ 禍 -11.3687
2086
+ 秒 -11.3687
2087
+ 芽 -11.3687
2088
+ 赤 -11.3687
2089
+ 辜 -11.3687
2090
+ 鷺 -11.3687
2091
+ 夾 -11.3954
2092
+ 嫁 -11.3954
2093
+ 戀 -11.3954
2094
+ 杉 -11.3954
2095
+ 欺 -11.3954
2096
+ 涯 -11.3954
2097
+ 潑 -11.3954
2098
+ 碧 -11.3954
2099
+ 祈 -11.3954
2100
+ 翰 -11.3954
2101
+ 肚 -11.3954
2102
+ 胃 -11.3954
2103
+ 臍 -11.3954
2104
+ 蒂 -11.3954
2105
+ 蘋 -11.3954
2106
+ 雁 -11.3954
2107
+ 乙 -11.4228
2108
+ 井 -11.4228
2109
+ 勾 -11.4228
2110
+ 姑 -11.4228
2111
+ 宴 -11.4228
2112
+ 屍 -11.4228
2113
+ 憤 -11.4228
2114
+ 撒 -11.4228
2115
+ 浩 -11.4228
2116
+ 淚 -11.4228
2117
+ 疲 -11.4228
2118
+ 緬 -11.4228
2119
+ 萄 -11.4228
2120
+ 葡 -11.4228
2121
+ 譜 -11.4228
2122
+ 郁 -11.4228
2123
+ 饋 -11.4228
2124
+ 魂 -11.4228
2125
+ 堵 -11.451
2126
+ 怒 -11.451
2127
+ 挾 -11.451
2128
+ 曜 -11.451
2129
+ 朵 -11.451
2130
+ 檜 -11.451
2131
+ 浴 -11.451
2132
+ 眉 -11.451
2133
+ 禪 -11.451
2134
+ 粒 -11.451
2135
+ 粗 -11.451
2136
+ 翼 -11.451
2137
+ 肖 -11.451
2138
+ 蘊 -11.451
2139
+ 轄 -11.451
2140
+ 逝 -11.451
2141
+ 酷 -11.451
2142
+ 霸 -11.451
2143
+ 鞭 -11.451
2144
+ 驅 -11.451
2145
+ 倉 -11.4799
2146
+ 兄 -11.4799
2147
+ 咦 -11.4799
2148
+ 坵 -11.4799
2149
+ 塌 -11.4799
2150
+ 拚 -11.4799
2151
+ 橘 -11.4799
2152
+ 灘 -11.4799
2153
+ 炭 -11.4799
2154
+ 砲 -11.4799
2155
+ 笛 -11.4799
2156
+ 羈 -11.4799
2157
+ 胚 -11.4799
2158
+ 艾 -11.4799
2159
+ 蜜 -11.4799
2160
+ 裕 -11.4799
2161
+ 銳 -11.4799
2162
+ 闖 -11.4799
2163
+ 頗 -11.4799
2164
+ 囚 -11.5098
2165
+ 孳 -11.5098
2166
+ 慾 -11.5098
2167
+ 捉 -11.5098
2168
+ 捨 -11.5098
2169
+ 撿 -11.5098
2170
+ 昂 -11.5098
2171
+ 汁 -11.5098
2172
+ 矛 -11.5098
2173
+ 碎 -11.5098
2174
+ 緹 -11.5098
2175
+ 胸 -11.5098
2176
+ 腎 -11.5098
2177
+ 衍 -11.5098
2178
+ 辰 -11.5098
2179
+ 閩 -11.5098
2180
+ 飆 -11.5098
2181
+ 鬱 -11.5098
2182
+ 魅 -11.5098
2183
+ 俠 -11.5406
2184
+ 勉 -11.5406
2185
+ 坊 -11.5406
2186
+ 墮 -11.5406
2187
+ 岳 -11.5406
2188
+ 曝 -11.5406
2189
+ 槽 -11.5406
2190
+ 欲 -11.5406
2191
+ 溯 -11.5406
2192
+ 煉 -11.5406
2193
+ 疊 -11.5406
2194
+ 穎 -11.5406
2195
+ 肩 -11.5406
2196
+ 蟹 -11.5406
2197
+ 踩 -11.5406
2198
+ 轟 -11.5406
2199
+ 醜 -11.5406
2200
+ 闊 -11.5406
2201
+ 鷹 -11.5406
2202
+ 嬌 -11.5723
2203
+ 崗 -11.5723
2204
+ 廈 -11.5723
2205
+ 彥 -11.5723
2206
+ 愉 -11.5723
2207
+ 慌 -11.5723
2208
+ 框 -11.5723
2209
+ 棚 -11.5723
2210
+ 漿 -11.5723
2211
+ 炳 -11.5723
2212
+ 爵 -11.5723
2213
+ 瑰 -11.5723
2214
+ 肅 -11.5723
2215
+ 脹 -11.5723
2216
+ 膩 -11.5723
2217
+ 艙 -11.5723
2218
+ 茲 -11.5723
2219
+ 虐 -11.5723
2220
+ 虹 -11.5723
2221
+ 蚵 -11.5723
2222
+ 誼 -11.5723
2223
+ 遜 -11.5723
2224
+ 鄧 -11.5723
2225
+ 鍰 -11.5723
2226
+ 闢 -11.5723
2227
+ 陵 -11.5723
2228
+ 兔 -11.6051
2229
+ 劫 -11.6051
2230
+ 卵 -11.6051
2231
+ 嘆 -11.6051
2232
+ 囊 -11.6051
2233
+ 堪 -11.6051
2234
+ 奕 -11.6051
2235
+ 屈 -11.6051
2236
+ 撲 -11.6051
2237
+ 柔 -11.6051
2238
+ 樸 -11.6051
2239
+ 檯 -11.6051
2240
+ 氯 -11.6051
2241
+ 汐 -11.6051
2242
+ 淘 -11.6051
2243
+ 淤 -11.6051
2244
+ 猜 -11.6051
2245
+ 矮 -11.6051
2246
+ 碗 -11.6051
2247
+ 碳 -11.6051
2248
+ 腿 -11.6051
2249
+ 蒜 -11.6051
2250
+ 蜂 -11.6051
2251
+ 蝦 -11.6051
2252
+ 諒 -11.6051
2253
+ 貪 -11.6051
2254
+ 逕 -11.6051
2255
+ 酬 -11.6051
2256
+ 鈴 -11.6051
2257
+ 靳 -11.6051
2258
+ 乃 -11.639
2259
+ 划 -11.639
2260
+ 宇 -11.639
2261
+ 寸 -11.639
2262
+ 巒 -11.639
2263
+ 廊 -11.639
2264
+ 悅 -11.639
2265
+ 敘 -11.639
2266
+ 斗 -11.639
2267
+ 梭 -11.639
2268
+ 械 -11.639
2269
+ 歇 -11.639
2270
+ 猛 -11.639
2271
+ 睹 -11.639
2272
+ 穴 -11.639
2273
+ 讚 -11.639
2274
+ 邏 -11.639
2275
+ 鉉 -11.639
2276
+ 頁 -11.639
2277
+ 鹿 -11.639
2278
+ 齒 -11.639
2279
+ 仙 -11.6741
2280
+ 俱 -11.6741
2281
+ 傘 -11.6741
2282
+ 勃 -11.6741
2283
+ 匆 -11.6741
2284
+ 垮 -11.6741
2285
+ 姊 -11.6741
2286
+ 宕 -11.6741
2287
+ 巫 -11.6741
2288
+ 懸 -11.6741
2289
+ 暨 -11.6741
2290
+ 泊 -11.6741
2291
+ 涂 -11.6741
2292
+ 濾 -11.6741
2293
+ 瑕 -11.6741
2294
+ 縫 -11.6741
2295
+ 胰 -11.6741
2296
+ 蛛 -11.6741
2297
+ 軸 -11.6741
2298
+ 逢 -11.6741
2299
+ 鍊 -11.6741
2300
+ 隧 -11.6741
2301
+ 丈 -11.7105
2302
+ 抑 -11.7105
2303
+ 捲 -11.7105
2304
+ 掀 -11.7105
2305
+ 擅 -11.7105
2306
+ 攔 -11.7105
2307
+ 攬 -11.7105
2308
+ 潭 -11.7105
2309
+ 澱 -11.7105
2310
+ 猶 -11.7105
2311
+ 碩 -11.7105
2312
+ 磁 -11.7105
2313
+ 糞 -11.7105
2314
+ 蜘 -11.7105
2315
+ 蝕 -11.7105
2316
+ 諸 -11.7105
2317
+ 豆 -11.7105
2318
+ 鴉 -11.7105
2319
+ 仿 -11.7482
2320
+ 偕 -11.7482
2321
+ 后 -11.7482
2322
+ 哀 -11.7482
2323
+ 哇 -11.7482
2324
+ 娜 -11.7482
2325
+ 娼 -11.7482
2326
+ 崎 -11.7482
2327
+ 扇 -11.7482
2328
+ 扯 -11.7482
2329
+ 拾 -11.7482
2330
+ 棗 -11.7482
2331
+ 氾 -11.7482
2332
+ 洽 -11.7482
2333
+ 琵 -11.7482
2334
+ 瑄 -11.7482
2335
+ 疵 -11.7482
2336
+ 睜 -11.7482
2337
+ 窘 -11.7482
2338
+ 籃 -11.7482
2339
+ 纏 -11.7482
2340
+ 罩 -11.7482
2341
+ 范 -11.7482
2342
+ 贖 -11.7482
2343
+ 閃 -11.7482
2344
+ 髒 -11.7482
2345
+ 乖 -11.7874
2346
+ 埤 -11.7874
2347
+ 壘 -11.7874
2348
+ 夷 -11.7874
2349
+ 寓 -11.7874
2350
+ 帽 -11.7874
2351
+ 悄 -11.7874
2352
+ 掘 -11.7874
2353
+ 斌 -11.7874
2354
+ 桿 -11.7874
2355
+ 歹 -11.7874
2356
+ 焊 -11.7874
2357
+ 綿 -11.7874
2358
+ 艷 -11.7874
2359
+ 逾 -11.7874
2360
+ 佑 -11.8282
2361
+ 佩 -11.8282
2362
+ 兇 -11.8282
2363
+ 卹 -11.8282
2364
+ 噪 -11.8282
2365
+ 囤 -11.8282
2366
+ 寺 -11.8282
2367
+ 嶺 -11.8282
2368
+ 悟 -11.8282
2369
+ 旭 -11.8282
2370
+ 曹 -11.8282
2371
+ 淵 -11.8282
2372
+ 穆 -11.8282
2373
+ 肪 -11.8282
2374
+ 臭 -11.8282
2375
+ 茫 -11.8282
2376
+ 蒸 -11.8282
2377
+ 詹 -11.8282
2378
+ 諧 -11.8282
2379
+ 譴 -11.8282
2380
+ 郝 -11.8282
2381
+ 佰 -11.8708
2382
+ 傲 -11.8708
2383
+ 噶 -11.8708
2384
+ 尬 -11.8708
2385
+ 尷 -11.8708
2386
+ 巢 -11.8708
2387
+ 弘 -11.8708
2388
+ 忌 -11.8708
2389
+ 惱 -11.8708
2390
+ 撼 -11.8708
2391
+ 擄 -11.8708
2392
+ 攏 -11.8708
2393
+ 昏 -11.8708
2394
+ 沛 -11.8708
2395
+ 漆 -11.8708
2396
+ 狹 -11.8708
2397
+ 磯 -11.8708
2398
+ 穫 -11.8708
2399
+ 糟 -11.8708
2400
+ 紮 -11.8708
2401
+ 臣 -11.8708
2402
+ 裸 -11.8708
2403
+ 覓 -11.8708
2404
+ 赦 -11.8708
2405
+ 軒 -11.8708
2406
+ 鼎 -11.8708
2407
+ 于 -11.9153
2408
+ 喬 -11.9153
2409
+ 屢 -11.9153
2410
+ 庚 -11.9153
2411
+ 恭 -11.9153
2412
+ 惑 -11.9153
2413
+ 摘 -11.9153
2414
+ 樺 -11.9153
2415
+ 汞 -11.9153
2416
+ 淪 -11.9153
2417
+ 窄 -11.9153
2418
+ 紗 -11.9153
2419
+ 艇 -11.9153
2420
+ 蒲 -11.9153
2421
+ 蓬 -11.9153
2422
+ 衷 -11.9153
2423
+ 貯 -11.9153
2424
+ 遮 -11.9153
2425
+ 鉛 -11.9153
2426
+ 靡 -11.9153
2427
+ 駿 -11.9153
2428
+ 仗 -11.9618
2429
+ 卿 -11.9618
2430
+ 厘 -11.9618
2431
+ 塢 -11.9618
2432
+ 扭 -11.9618
2433
+ 斐 -11.9618
2434
+ 晃 -11.9618
2435
+ 枉 -11.9618
2436
+ 浸 -11.9618
2437
+ 渴 -11.9618
2438
+ 澡 -11.9618
2439
+ 磚 -11.9618
2440
+ 糧 -11.9618
2441
+ 臥 -11.9618
2442
+ 舖 -11.9618
2443
+ 萃 -11.9618
2444
+ 諷 -11.9618
2445
+ 謎 -11.9618
2446
+ 辱 -11.9618
2447
+ 逛 -11.9618
2448
+ 鳴 -11.9618
2449
+ 亭 -12.0106
2450
+ 侯 -12.0106
2451
+ 劣 -12.0106
2452
+ 卜 -12.0106
2453
+ 喇 -12.0106
2454
+ 喻 -12.0106
2455
+ 垂 -12.0106
2456
+ 塘 -12.0106
2457
+ 墊 -12.0106
2458
+ 墜 -12.0106
2459
+ 壟 -12.0106
2460
+ 廂 -12.0106
2461
+ 慨 -12.0106
2462
+ 扎 -12.0106
2463
+ 抒 -12.0106
2464
+ 掏 -12.0106
2465
+ 柱 -12.0106
2466
+ 榕 -12.0106
2467
+ 樁 -12.0106
2468
+ 泌 -12.0106
2469
+ 湊 -12.0106
2470
+ 溉 -12.0106
2471
+ 潘 -12.0106
2472
+ 燙 -12.0106
2473
+ 燥 -12.0106
2474
+ 獸 -12.0106
2475
+ 瘋 -12.0106
2476
+ 碌 -12.0106
2477
+ 禦 -12.0106
2478
+ 聾 -12.0106
2479
+ 蔚 -12.0106
2480
+ 証 -12.0106
2481
+ 豹 -12.0106
2482
+ 貞 -12.0106
2483
+ 鉅 -12.0106
2484
+ 鑼 -12.0106
2485
+ 鑿 -12.0106
2486
+ 魄 -12.0106
2487
+ 侶 -12.0619
2488
+ 倡 -12.0619
2489
+ 坍 -12.0619
2490
+ 屯 -12.0619
2491
+ 幟 -12.0619
2492
+ 弦 -12.0619
2493
+ 汪 -12.0619
2494
+ 淋 -12.0619
2495
+ 瀉 -12.0619
2496
+ 煞 -12.0619
2497
+ 獅 -12.0619
2498
+ 癒 -12.0619
2499
+ 眠 -12.0619
2500
+ 箭 -12.0619
2501
+ 臂 -12.0619
2502
+ 虞 -12.0619
2503
+ 蠟 -12.0619
2504
+ 醞 -12.0619
2505
+ 髓 -12.0619
2506
+ 鮪 -12.0619
2507
+ 鷲 -12.0619
2508
+ 侷 -12.1159
2509
+ 俘 -12.1159
2510
+ 倪 -12.1159
2511
+ 卉 -12.1159
2512
+ 卯 -12.1159
2513
+ 嚐 -12.1159
2514
+ 墅 -12.1159
2515
+ 壩 -12.1159
2516
+ 姨 -12.1159
2517
+ 姬 -12.1159
2518
+ 婷 -12.1159
2519
+ 寂 -12.1159
2520
+ 履 -12.1159
2521
+ 嶄 -12.1159
2522
+ 帕 -12.1159
2523
+ 庇 -12.1159
2524
+ 拘 -12.1159
2525
+ 搓 -12.1159
2526
+ 晉 -12.1159
2527
+ 柑 -12.1159
2528
+ 殷 -12.1159
2529
+ 淇 -12.1159
2530
+ 煤 -12.1159
2531
+ 燦 -12.1159
2532
+ 爽 -12.1159
2533
+ 玄 -12.1159
2534
+ 瑋 -12.1159
2535
+ 瓊 -12.1159
2536
+ 疆 -12.1159
2537
+ 睞 -12.1159
2538
+ 竭 -12.1159
2539
+ 筒 -12.1159
2540
+ 紡 -12.1159
2541
+ 耘 -12.1159
2542
+ 蕩 -12.1159
2543
+ 蛾 -12.1159
2544
+ 袱 -12.1159
2545
+ 諱 -12.1159
2546
+ 逆 -12.1159
2547
+ 遏 -12.1159
2548
+ 醬 -12.1159
2549
+ 驟 -12.1159
2550
+ 魏 -12.1159
2551
+ 劾 -12.1731
2552
+ 吞 -12.1731
2553
+ 墩 -12.1731
2554
+ 妓 -12.1731
2555
+ 妮 -12.1731
2556
+ 寞 -12.1731
2557
+ 寡 -12.1731
2558
+ 屁 -12.1731
2559
+ 彗 -12.1731
2560
+ 抄 -12.1731
2561
+ 拳 -12.1731
2562
+ 斜 -12.1731
2563
+ 枚 -12.1731
2564
+ 欽 -12.1731
2565
+ 殿 -12.1731
2566
+ 炒 -12.1731
2567
+ 烹 -12.1731
2568
+ 畜 -12.1731
2569
+ 皂 -12.1731
2570
+ 禽 -12.1731
2571
+ 竄 -12.1731
2572
+ 竿 -12.1731
2573
+ 糙 -12.1731
2574
+ 罐 -12.1731
2575
+ 臘 -12.1731
2576
+ 舟 -12.1731
2577
+ 蔗 -12.1731
2578
+ 訝 -12.1731
2579
+ 賜 -12.1731
2580
+ 踐 -12.1731
2581
+ 踴 -12.1731
2582
+ 躬 -12.1731
2583
+ 輒 -12.1731
2584
+ 邪 -12.1731
2585
+ 銓 -12.1731
2586
+ 鑰 -12.1731
2587
+ 隸 -12.1731
2588
+ 靖 -12.1731
2589
+ 鞠 -12.1731
2590
+ 飼 -12.1731
2591
+ 鹼 -12.1731
2592
+ 叢 -12.2337
2593
+ 哨 -12.2337
2594
+ 帥 -12.2337
2595
+ 庄 -12.2337
2596
+ 彙 -12.2337
2597
+ 忱 -12.2337
2598
+ 愁 -12.2337
2599
+ 桂 -12.2337
2600
+ 桐 -12.2337
2601
+ 桑 -12.2337
2602
+ 棘 -12.2337
2603
+ 沾 -12.2337
2604
+ 浦 -12.2337
2605
+ 潰 -12.2337
2606
+ 瀕 -12.2337
2607
+ 烘 -12.2337
2608
+ 畏 -12.2337
2609
+ 瞻 -12.2337
2610
+ 砸 -12.2337
2611
+ 穀 -12.2337
2612
+ 窒 -12.2337
2613
+ 窩 -12.2337
2614
+ 簿 -12.2337
2615
+ 耨 -12.2337
2616
+ 芒 -12.2337
2617
+ 蘆 -12.2337
2618
+ 蛇 -12.2337
2619
+ 蠹 -12.2337
2620
+ 褲 -12.2337
2621
+ 襄 -12.2337
2622
+ 訣 -12.2337
2623
+ 謊 -12.2337
2624
+ 躺 -12.2337
2625
+ 酗 -12.2337
2626
+ 酪 -12.2337
2627
+ 霍 -12.2337
2628
+ 頑 -12.2337
2629
+ 顱 -12.2337
2630
+ 颼 -12.2337
2631
+ 飢 -12.2337
2632
+ 駭 -12.2337
2633
+ 丘 -12.2983
2634
+ 咯 -12.2983
2635
+ 哺 -12.2983
2636
+ 嚼 -12.2983
2637
+ 墟 -12.2983
2638
+ 壺 -12.2983
2639
+ 孑 -12.2983
2640
+ 徙 -12.2983
2641
+ 悶 -12.2983
2642
+ 捍 -12.2983
2643
+ 朽 -12.2983
2644
+ 毯 -12.2983
2645
+ 漠 -12.2983
2646
+ 甫 -12.2983
2647
+ 瘡 -12.2983
2648
+ 睿 -12.2983
2649
+ 繃 -12.2983
2650
+ 蚓 -12.2983
2651
+ 蚯 -12.2983
2652
+ 謙 -12.2983
2653
+ 豔 -12.2983
2654
+ 輿 -12.2983
2655
+ 鄒 -12.2983
2656
+ 釣 -12.2983
2657
+ 闡 -12.2983
2658
+ 仇 -12.3672
2659
+ 俸 -12.3672
2660
+ 啤 -12.3672
2661
+ 壢 -12.3672
2662
+ 孓 -12.3672
2663
+ 巖 -12.3672
2664
+ 征 -12.3672
2665
+ 怠 -12.3672
2666
+ 恨 -12.3672
2667
+ 憐 -12.3672
2668
+ 捏 -12.3672
2669
+ 斃 -12.3672
2670
+ 斬 -12.3672
2671
+ 曙 -12.3672
2672
+ 欒 -12.3672
2673
+ 氓 -12.3672
2674
+ 沃 -12.3672
2675
+ 洶 -12.3672
2676
+ 溜 -12.3672
2677
+ 澈 -12.3672
2678
+ 熬 -12.3672
2679
+ 燭 -12.3672
2680
+ 琦 -12.3672
2681
+ 瑛 -12.3672
2682
+ 繽 -12.3672
2683
+ 脾 -12.3672
2684
+ 腺 -12.3672
2685
+ 葆 -12.3672
2686
+ 螃 -12.3672
2687
+ 螺 -12.3672
2688
+ 辣 -12.3672
2689
+ 鋪 -12.3672
2690
+ 閘 -12.3672
2691
+ 陀 -12.3672
2692
+ 霖 -12.3672
2693
+ 霜 -12.3672
2694
+ 靚 -12.3672
2695
+ 餓 -12.3672
2696
+ 伏 -12.4414
2697
+ 伙 -12.4414
2698
+ 卦 -12.4414
2699
+ 叮 -12.4414
2700
+ 咎 -12.4414
2701
+ 咳 -12.4414
2702
+ 圭 -12.4414
2703
+ 姜 -12.4414
2704
+ 崙 -12.4414
2705
+ 弓 -12.4414
2706
+ 悼 -12.4414
2707
+ 憨 -12.4414
2708
+ 拯 -12.4414
2709
+ 挨 -12.4414
2710
+ 摔 -12.4414
2711
+ 撓 -12.4414
2712
+ 擎 -12.4414
2713
+ 敞 -12.4414
2714
+ 旨 -12.4414
2715
+ 晤 -12.4414
2716
+ 曠 -12.4414
2717
+ 朴 -12.4414
2718
+ 柳 -12.4414
2719
+ 歪 -12.4414
2720
+ 沫 -12.4414
2721
+ 滄 -12.4414
2722
+ 滲 -12.4414
2723
+ 澆 -12.4414
2724
+ 炫 -12.4414
2725
+ 烯 -12.4414
2726
+ 璋 -12.4414
2727
+ 癬 -12.4414
2728
+ 癱 -12.4414
2729
+ 盯 -12.4414
2730
+ 瞧 -12.4414
2731
+ 禱 -12.4414
2732
+ 窯 -12.4414
2733
+ 肇 -12.4414
2734
+ 膀 -12.4414
2735
+ 蟻 -12.4414
2736
+ 詠 -12.4414
2737
+ 誇 -12.4414
2738
+ 賈 -12.4414
2739
+ 邸 -12.4414
2740
+ 釘 -12.4414
2741
+ 鈞 -12.4414
2742
+ 鉤 -12.4414
2743
+ 鋸 -12.4414
2744
+ 鍋 -12.4414
2745
+ 顛 -12.4414
2746
+ 饒 -12.4414
2747
+ 馳 -12.4414
2748
+ 鱗 -12.4414
2749
+ 黛 -12.4414
2750
+ 亨 -12.5214
2751
+ 倖 -12.5214
2752
+ 儼 -12.5214
2753
+ 匿 -12.5214
2754
+ 孽 -12.5214
2755
+ 弗 -12.5214
2756
+ 悔 -12.5214
2757
+ 惶 -12.5214
2758
+ 惹 -12.5214
2759
+ 憩 -12.5214
2760
+ 披 -12.5214
2761
+ 昔 -12.5214
2762
+ 昧 -12.5214
2763
+ 昱 -12.5214
2764
+ 柵 -12.5214
2765
+ 栩 -12.5214
2766
+ 棣 -12.5214
2767
+ 棺 -12.5214
2768
+ 橡 -12.5214
2769
+ 沐 -12.5214
2770
+ 淆 -12.5214
2771
+ 渺 -12.5214
2772
+ 澀 -12.5214
2773
+ 牡 -12.5214
2774
+ 狼 -12.5214
2775
+ 瑣 -12.5214
2776
+ 皆 -12.5214
2777
+ 瞄 -12.5214
2778
+ 硫 -12.5214
2779
+ 礫 -12.5214
2780
+ 籬 -12.5214
2781
+ 耍 -12.5214
2782
+ 腕 -12.5214
2783
+ 臻 -12.5214
2784
+ 舌 -12.5214
2785
+ 苛 -12.5214
2786
+ 荼 -12.5214
2787
+ 蒼 -12.5214
2788
+ 薑 -12.5214
2789
+ 袍 -12.5214
2790
+ 裹 -12.5214
2791
+ 迢 -12.5214
2792
+ 鈷 -12.5214
2793
+ 闈 -12.5214
2794
+ 陋 -12.5214
2795
+ 雀 -12.5214
2796
+ 驕 -12.5214
2797
+ 亦 -12.6084
2798
+ 佣 -12.6084
2799
+ 厭 -12.6084
2800
+ 咽 -12.6084
2801
+ 喧 -12.6084
2802
+ 嗓 -12.6084
2803
+ 嗽 -12.6084
2804
+ 奠 -12.6084
2805
+ 宰 -12.6084
2806
+ 峻 -12.6084
2807
+ 懶 -12.6084
2808
+ 抖 -12.6084
2809
+ 拭 -12.6084
2810
+ 搏 -12.6084
2811
+ 暈 -12.6084
2812
+ 札 -12.6084
2813
+ 株 -12.6084
2814
+ 椿 -12.6084
2815
+ 濬 -12.6084
2816
+ 瀰 -12.6084
2817
+ 猴 -12.6084
2818
+ 瓷 -12.6084
2819
+ 疽 -12.6084
2820
+ 瘓 -12.6084
2821
+ 瞬 -12.6084
2822
+ 砌 -12.6084
2823
+ 禿 -12.6084
2824
+ 篤 -12.6084
2825
+ 粟 -12.6084
2826
+ 耽 -12.6084
2827
+ 胺 -12.6084
2828
+ 脖 -12.6084
2829
+ 膨 -12.6084
2830
+ 茹 -12.6084
2831
+ 藤 -12.6084
2832
+ 蝗 -12.6084
2833
+ 螞 -12.6084
2834
+ 蠵 -12.6084
2835
+ 謬 -12.6084
2836
+ 遴 -12.6084
2837
+ 醣 -12.6084
2838
+ 銲 -12.6084
2839
+ 鎔 -12.6084
2840
+ 阱 -12.6084
2841
+ 霾 -12.6084
2842
+ 伶 -12.7037
2843
+ 俐 -12.7037
2844
+ 傻 -12.7037
2845
+ 儉 -12.7037
2846
+ 凹 -12.7037
2847
+ 嚮 -12.7037
2848
+ 埕 -12.7037
2849
+ 宙 -12.7037
2850
+ 屠 -12.7037
2851
+ 岡 -12.7037
2852
+ 忡 -12.7037
2853
+ 恤 -12.7037
2854
+ 惕 -12.7037
2855
+ 戈 -12.7037
2856
+ 扛 -12.7037
2857
+ 扼 -12.7037
2858
+ 揉 -12.7037
2859
+ 摧 -12.7037
2860
+ 撕 -12.7037
2861
+ 晒 -12.7037
2862
+ 沸 -12.7037
2863
+ 渾 -12.7037
2864
+ 溶 -12.7037
2865
+ 熄 -12.7037
2866
+ 爪 -12.7037
2867
+ 痢 -12.7037
2868
+ 眨 -12.7037
2869
+ 矽 -12.7037
2870
+ 磋 -12.7037
2871
+ 祐 -12.7037
2872
+ 禧 -12.7037
2873
+ 穗 -12.7037
2874
+ 窟 -12.7037
2875
+ 箏 -12.7037
2876
+ 綢 -12.7037
2877
+ 繩 -12.7037
2878
+ 缸 -12.7037
2879
+ 罌 -12.7037
2880
+ 羞 -12.7037
2881
+ 翅 -12.7037
2882
+ 膳 -12.7037
2883
+ 薇 -12.7037
2884
+ 誹 -12.7037
2885
+ 諦 -12.7037
2886
+ 謗 -12.7037
2887
+ 趾 -12.7037
2888
+ 逗 -12.7037
2889
+ 郊 -12.7037
2890
+ 酌 -12.7037
2891
+ 酮 -12.7037
2892
+ 釁 -12.7037
2893
+ 韌 -12.7037
2894
+ 鮑 -12.7037
2895
+ 鷗 -12.7037
2896
+ 倆 -12.8091
2897
+ 匹 -12.8091
2898
+ 唇 -12.8091
2899
+ 唷 -12.8091
2900
+ 喉 -12.8091
2901
+ 嘔 -12.8091
2902
+ 垢 -12.8091
2903
+ 墳 -12.8091
2904
+ 峙 -12.8091
2905
+ 巾 -12.8091
2906
+ 帆 -12.8091
2907
+ 弭 -12.8091
2908
+ 彷 -12.8091
2909
+ 愕 -12.8091
2910
+ 懈 -12.8091
2911
+ 捧 -12.8091
2912
+ 撇 -12.8091
2913
+ 撰 -12.8091
2914
+ 斧 -12.8091
2915
+ 晰 -12.8091
2916
+ 梵 -12.8091
2917
+ 棋 -12.8091
2918
+ 樞 -12.8091
2919
+ 殃 -12.8091
2920
+ 汛 -12.8091
2921
+ 渠 -12.8091
2922
+ 瀾 -12.8091
2923
+ 烤 -12.8091
2924
+ 璿 -12.8091
2925
+ 癥 -12.8091
2926
+ 秦 -12.8091
2927
+ 稜 -12.8091
2928
+ 竅 -12.8091
2929
+ 綻 -12.8091
2930
+ 羨 -12.8091
2931
+ 翊 -12.8091
2932
+ 聆 -12.8091
2933
+ 膏 -12.8091
2934
+ 舵 -12.8091
2935
+ 芮 -12.8091
2936
+ 芹 -12.8091
2937
+ 茵 -12.8091
2938
+ 葵 -12.8091
2939
+ 蔘 -12.8091
2940
+ 薯 -12.8091
2941
+ 藩 -12.8091
2942
+ 襟 -12.8091
2943
+ 詭 -12.8091
2944
+ 誦 -12.8091
2945
+ 蹲 -12.8091
2946
+ 鈉 -12.8091
2947
+ 鈕 -12.8091
2948
+ 鈣 -12.8091
2949
+ 鑒 -12.8091
2950
+ 霉 -12.8091
2951
+ 鶯 -12.8091
2952
+ 侍 -12.9269
2953
+ 兢 -12.9269
2954
+ 凋 -12.9269
2955
+ 凶 -12.9269
2956
+ 剷 -12.9269
2957
+ 匙 -12.9269
2958
+ 厝 -12.9269
2959
+ 吻 -12.9269
2960
+ 咕 -12.9269
2961
+ 嚥 -12.9269
2962
+ 囂 -12.9269
2963
+ 堰 -12.9269
2964
+ 嫖 -12.9269
2965
+ 帖 -12.9269
2966
+ 廝 -12.9269
2967
+ 彿 -12.9269
2968
+ 戚 -12.9269
2969
+ 拮 -12.9269
2970
+ 摹 -12.9269
2971
+ 敷 -12.9269
2972
+ 殯 -12.9269
2973
+ 氫 -12.9269
2974
+ 汶 -12.9269
2975
+ 沮 -12.9269
2976
+ 湛 -12.9269
2977
+ 溢 -12.9269
2978
+ 瀆 -12.9269
2979
+ 灼 -12.9269
2980
+ 焰 -12.9269
2981
+ 熔 -12.9269
2982
+ 牯 -12.9269
2983
+ 畔 -12.9269
2984
+ 癢 -12.9269
2985
+ 瞞 -12.9269
2986
+ 矯 -12.9269
2987
+ 笨 -12.9269
2988
+ 簾 -12.9269
2989
+ 綵 -12.9269
2990
+ 繹 -12.9269
2991
+ 羽 -12.9269
2992
+ 翩 -12.9269
2993
+ 聳 -12.9269
2994
+ 肆 -12.9269
2995
+ 肛 -12.9269
2996
+ 茅 -12.9269
2997
+ 萱 -12.9269
2998
+ 薰 -12.9269
2999
+ 褪 -12.9269
3000
+ 襪 -12.9269
3001
+ 襯 -12.9269
3002
+ 詬 -12.9269
3003
+ 躁 -12.9269
3004
+ 輾 -12.9269
3005
+ 轎 -12.9269
3006
+ 逸 -12.9269
3007
+ 銫 -12.9269
3008
+ 錶 -12.9269
3009
+ 鍛 -12.9269
3010
+ 鏽 -12.9269
3011
+ 鑫 -12.9269
3012
+ 阮 -12.9269
3013
+ 雋 -12.9269
3014
+ 雯 -12.9269
3015
+ 鞏 -12.9269
3016
+ 頌 -12.9269
3017
+ 餾 -12.9269
3018
+ 骼 -12.9269
3019
+ 齋 -12.9269
3020
+ 侃 -13.0604
3021
+ 倚 -13.0604
3022
+ 傢 -13.0604
3023
+ 僧 -13.0604
3024
+ 儕 -13.0604
3025
+ 冥 -13.0604
3026
+ 剎 -13.0604
3027
+ 叉 -13.0604
3028
+ 哎 -13.0604
3029
+ 噓 -13.0604
3030
+ 圃 -13.0604
3031
+ 坻 -13.0604
3032
+ 嫻 -13.0604
3033
+ 寨 -13.0604
3034
+ 岱 -13.0604
3035
+ 崖 -13.0604
3036
+ 徊 -13.0604
3037
+ 徘 -13.0604
3038
+ 慷 -13.0604
3039
+ 憫 -13.0604
3040
+ 戎 -13.0604
3041
+ 戳 -13.0604
3042
+ 扒 -13.0604
3043
+ 扳 -13.0604
3044
+ 挹 -13.0604
3045
+ 摻 -13.0604
3046
+ 斂 -13.0604
3047
+ 杰 -13.0604
3048
+ 椒 -13.0604
3049
+ 楣 -13.0604
3050
+ 櫥 -13.0604
3051
+ 淞 -13.0604
3052
+ 渣 -13.0604
3053
+ 漓 -13.0604
3054
+ 狄 -13.0604
3055
+ 猖 -13.0604
3056
+ 瑟 -13.0604
3057
+ 疝 -13.0604
3058
+ 癡 -13.0604
3059
+ 眺 -13.0604
3060
+ 瞎 -13.0604
3061
+ 矩 -13.0604
3062
+ 祀 -13.0604
3063
+ 祉 -13.0604
3064
+ 祿 -13.0604
3065
+ 紘 -13.0604
3066
+ 絮 -13.0604
3067
+ 綴 -13.0604
3068
+ 繆 -13.0604
3069
+ 耆 -13.0604
3070
+ 聶 -13.0604
3071
+ 舜 -13.0604
3072
+ 芋 -13.0604
3073
+ 茄 -13.0604
3074
+ 茱 -13.0604
3075
+ 莖 -13.0604
3076
+ 菱 -13.0604
3077
+ 藐 -13.0604
3078
+ 蘿 -13.0604
3079
+ 虔 -13.0604
3080
+ 蛀 -13.0604
3081
+ 裙 -13.0604
3082
+ 裳 -13.0604
3083
+ 詰 -13.0604
3084
+ 贓 -13.0604
3085
+ 趴 -13.0604
3086
+ 遐 -13.0604
3087
+ 邵 -13.0604
3088
+ 郡 -13.0604
3089
+ 鏈 -13.0604
3090
+ 隙 -13.0604
3091
+ 雛 -13.0604
3092
+ 顫 -13.0604
3093
+ 餚 -13.0604
3094
+ 馮 -13.0604
3095
+ 魟 -13.0604
3096
+ 鵬 -13.0604
3097
+ 鶴 -13.0604
3098
+ 鹹 -13.0604
3099
+ 黯 -13.0604
3100
+ 么 -13.2145
3101
+ 伺 -13.2145
3102
+ 侮 -13.2145
3103
+ 傭 -13.2145
3104
+ 僻 -13.2145
3105
+ 冗 -13.2145
3106
+ 剔 -13.2145
3107
+ 劍 -13.2145
3108
+ 勿 -13.2145
3109
+ 叔 -13.2145
3110
+ 叩 -13.2145
3111
+ 吼 -13.2145
3112
+ 啞 -13.2145
3113
+ 喂 -13.2145
3114
+ 嘟 -13.2145
3115
+ 嘲 -13.2145
3116
+ 嚕 -13.2145
3117
+ 奢 -13.2145
3118
+ 妃 -13.2145
3119
+ 娓 -13.2145
3120
+ 婪 -13.2145
3121
+ 婿 -13.2145
3122
+ 媳 -13.2145
3123
+ 嬋 -13.2145
3124
+ 屑 -13.2145
3125
+ 崑 -13.2145
3126
+ 庶 -13.2145
3127
+ 廿 -13.2145
3128
+ 弔 -13.2145
3129
+ 惰 -13.2145
3130
+ 懇 -13.2145
3131
+ 拷 -13.2145
3132
+ 掠 -13.2145
3133
+ 揣 -13.2145
3134
+ 摒 -13.2145
3135
+ 攪 -13.2145
3136
+ 暇 -13.2145
3137
+ 曦 -13.2145
3138
+ 杖 -13.2145
3139
+ 枋 -13.2145
3140
+ 枕 -13.2145
3141
+ 棉 -13.2145
3142
+ 楓 -13.2145
3143
+ 榴 -13.2145
3144
+ 櫻 -13.2145
3145
+ 欄 -13.2145
3146
+ 泄 -13.2145
3147
+ 渥 -13.2145
3148
+ 滌 -13.2145
3149
+ 熙 -13.2145
3150
+ 燬 -13.2145
3151
+ 狒 -13.2145
3152
+ 狩 -13.2145
3153
+ 獗 -13.2145
3154
+ 琉 -13.2145
3155
+ 疹 -13.2145
3156
+ 痊 -13.2145
3157
+ 痰 -13.2145
3158
+ 瘍 -13.2145
3159
+ 矗 -13.2145
3160
+ 祂 -13.2145
3161
+ 穌 -13.2145
3162
+ 筱 -13.2145
3163
+ 絞 -13.2145
3164
+ 綽 -13.2145
3165
+ 腥 -13.2145
3166
+ 臆 -13.2145
3167
+ 茁 -13.2145
3168
+ 莞 -13.2145
3169
+ 蔔 -13.2145
3170
+ 蜓 -13.2145
3171
+ 蜥 -13.2145
3172
+ 蜻 -13.2145
3173
+ 衫 -13.2145
3174
+ 褐 -13.2145
3175
+ 諜 -13.2145
3176
+ 諺 -13.2145
3177
+ 讞 -13.2145
3178
+ 賂 -13.2145
3179
+ 賑 -13.2145
3180
+ 轍 -13.2145
3181
+ 迦 -13.2145
3182
+ 遼 -13.2145
3183
+ 遽 -13.2145
3184
+ 鋁 -13.2145
3185
+ 鎂 -13.2145
3186
+ 閥 -13.2145
3187
+ 陡 -13.2145
3188
+ 頤 -13.2145
3189
+ 餃 -13.2145
3190
+ 餡 -13.2145
3191
+ 饕 -13.2145
3192
+ 鬣 -13.2145
3193
+ 丑 -13.3969
3194
+ 丰 -13.3969
3195
+ 亢 -13.3969
3196
+ 体 -13.3969
3197
+ 佤 -13.3969
3198
+ 俯 -13.3969
3199
+ 兜 -13.3969
3200
+ 冉 -13.3969
3201
+ 刁 -13.3969
3202
+ 剖 -13.3969
3203
+ 匠 -13.3969
3204
+ 叭 -13.3969
3205
+ 吒 -13.3969
3206
+ 吟 -13.3969
3207
+ 哄 -13.3969
3208
+ 哩 -13.3969
3209
+ 啼 -13.3969
3210
+ 嗅 -13.3969
3211
+ 嗜 -13.3969
3212
+ 噁 -13.3969
3213
+ 嚨 -13.3969
3214
+ 囉 -13.3969
3215
+ 夭 -13.3969
3216
+ 娥 -13.3969
3217
+ 嫩 -13.3969
3218
+ 嬉 -13.3969
3219
+ 嬸 -13.3969
3220
+ 宛 -13.3969
3221
+ 寢 -13.3969
3222
+ 弧 -13.3969
3223
+ 彬 -13.3969
3224
+ 恫 -13.3969
3225
+ 愧 -13.3969
3226
+ 拱 -13.3969
3227
+ 掙 -13.3969
3228
+ 搐 -13.3969
3229
+ 摟 -13.3969
3230
+ 擲 -13.3969
3231
+ 斡 -13.3969
3232
+ 曖 -13.3969
3233
+ 杭 -13.3969
3234
+ 栓 -13.3969
3235
+ 梢 -13.3969
3236
+ 棍 -13.3969
3237
+ 毆 -13.3969
3238
+ 泣 -13.3969
3239
+ 湘 -13.3969
3240
+ 潢 -13.3969
3241
+ 瀏 -13.3969
3242
+ 瀧 -13.3969
3243
+ 燉 -13.3969
3244
+ 狙 -13.3969
3245
+ 珮 -13.3969
3246
+ 璉 -13.3969
3247
+ 畸 -13.3969
3248
+ 疸 -13.3969
3249
+ 痙 -13.3969
3250
+ 痴 -13.3969
3251
+ 皺 -13.3969
3252
+ 睫 -13.3969
3253
+ 磺 -13.3969
3254
+ 祁 -13.3969
3255
+ 秤 -13.3969
3256
+ 竣 -13.3969
3257
+ 筍 -13.3969
3258
+ 紜 -13.3969
3259
+ 羌 -13.3969
3260
+ 肘 -13.3969
3261
+ 舅 -13.3969
3262
+ 芥 -13.3969
3263
+ 荔 -13.3969
3264
+ 荻 -13.3969
3265
+ 萌 -13.3969
3266
+ 蒞 -13.3969
3267
+ 蔀 -13.3969
3268
+ 蕃 -13.3969
3269
+ 蕉 -13.3969
3270
+ 薛 -13.3969
3271
+ 蛻 -13.3969
3272
+ 蜴 -13.3969
3273
+ 蟬 -13.3969
3274
+ 蠶 -13.3969
3275
+ 誣 -13.3969
3276
+ 謾 -13.3969
3277
+ 譎 -13.3969
3278
+ 蹄 -13.3969
3279
+ 鑣 -13.3969
3280
+ 陲 -13.3969
3281
+ 鞍 -13.3969
3282
+ 韋 -13.3969
3283
+ 韻 -13.3969
3284
+ 飪 -13.3969
3285
+ 饗 -13.3969
3286
+ 馴 -13.3969
3287
+ 鬚 -13.3969
3288
+ 魁 -13.3969
3289
+ 鮭 -13.3969
3290
+ 鵑 -13.3969
3291
+ 乍 -13.62
3292
+ 亟 -13.62
3293
+ 佐 -13.62
3294
+ 侖 -13.62
3295
+ 俏 -13.62
3296
+ 倩 -13.62
3297
+ 僥 -13.62
3298
+ 凰 -13.62
3299
+ 刨 -13.62
3300
+ 吩 -13.62
3301
+ 呎 -13.62
3302
+ 咀 -13.62
3303
+ 咚 -13.62
3304
+ 咸 -13.62
3305
+ 噱 -13.62
3306
+ 囪 -13.62
3307
+ 圜 -13.62
3308
+ 奄 -13.62
3309
+ 嬴 -13.62
3310
+ 孵 -13.62
3311
+ 孺 -13.62
3312
+ 屹 -13.62
3313
+ 岌 -13.62
3314
+ 峨 -13.62
3315
+ 峭 -13.62
3316
+ 巽 -13.62
3317
+ 幌 -13.62
3318
+ 怯 -13.62
3319
+ 怵 -13.62
3320
+ 懿 -13.62
3321
+ 扔 -13.62
3322
+ 拇 -13.62
3323
+ 拌 -13.62
3324
+ 拙 -13.62
3325
+ 据 -13.62
3326
+ 掐 -13.62
3327
+ 搗 -13.62
3328
+ 摳 -13.62
3329
+ 擂 -13.62
3330
+ 攣 -13.62
3331
+ 斟 -13.62
3332
+ 柬 -13.62
3333
+ 柺 -13.62
3334
+ 柿 -13.62
3335
+ 梆 -13.62
3336
+ 棰 -13.62
3337
+ 榨 -13.62
3338
+ 榻 -13.62
3339
+ 槤 -13.62
3340
+ 歎 -13.62
3341
+ 氨 -13.62
3342
+ 氮 -13.62
3343
+ 汀 -13.62
3344
+ 汲 -13.62
3345
+ 沼 -13.62
3346
+ 浙 -13.62
3347
+ 浚 -13.62
3348
+ 涮 -13.62
3349
+ 湄 -13.62
3350
+ 湮 -13.62
3351
+ 溺 -13.62
3352
+ 滔 -13.62
3353
+ 潟 -13.62
3354
+ 澹 -13.62
3355
+ 瀑 -13.62
3356
+ 炙 -13.62
3357
+ 烙 -13.62
3358
+ 烽 -13.62
3359
+ 煎 -13.62
3360
+ 煒 -13.62
3361
+ 煥 -13.62
3362
+ 珣 -13.62
3363
+ 琢 -13.62
3364
+ 璧 -13.62
3365
+ 畝 -13.62
3366
+ 疤 -13.62
3367
+ 痘 -13.62
3368
+ 盔 -13.62
3369
+ 眩 -13.62
3370
+ 睦 -13.62
3371
+ 睽 -13.62
3372
+ 瞰 -13.62
3373
+ 矇 -13.62
3374
+ 磅 -13.62
3375
+ 窺 -13.62
3376
+ 筠 -13.62
3377
+ 筷 -13.62
3378
+ 粱 -13.62
3379
+ 糯 -13.62
3380
+ 絨 -13.62
3381
+ 緋 -13.62
3382
+ 縛 -13.62
3383
+ 繡 -13.62
3384
+ 芸 -13.62
3385
+ 苑 -13.62
3386
+ 苓 -13.62
3387
+ 荳 -13.62
3388
+ 莘 -13.62
3389
+ 蓉 -13.62
3390
+ 蕈 -13.62
3391
+ 蕨 -13.62
3392
+ 蕪 -13.62
3393
+ 虱 -13.62
3394
+ 蠢 -13.62
3395
+ 裴 -13.62
3396
+ 覈 -13.62
3397
+ 訶 -13.62
3398
+ 譚 -13.62
3399
+ 豁 -13.62
3400
+ 豎 -13.62
3401
+ 賊 -13.62
3402
+ 賤 -13.62
3403
+ 贅 -13.62
3404
+ 跋 -13.62
3405
+ 蹋 -13.62
3406
+ 蹦 -13.62
3407
+ 蹺 -13.62
3408
+ 躂 -13.62
3409
+ 躪 -13.62
3410
+ 軀 -13.62
3411
+ 遂 -13.62
3412
+ 錨 -13.62
3413
+ 鍚 -13.62
3414
+ 閡 -13.62
3415
+ 頰 -13.62
3416
+ 馭 -13.62
3417
+ 駱 -13.62
3418
+ 驥 -13.62
3419
+ 魘 -13.62
3420
+ 鯉 -13.62
3421
+ 鶩 -13.62
3422
+ 仕 -13.9077
3423
+ 倘 -13.9077
3424
+ 倦 -13.9077
3425
+ 儂 -13.9077
3426
+ 冀 -13.9077
3427
+ 剃 -13.9077
3428
+ 剌 -13.9077
3429
+ 匍 -13.9077
3430
+ 匐 -13.9077
3431
+ 匣 -13.9077
3432
+ 匱 -13.9077
3433
+ 厄 -13.9077
3434
+ 叱 -13.9077
3435
+ 吝 -13.9077
3436
+ 咱 -13.9077
3437
+ 哮 -13.9077
3438
+ 唏 -13.9077
3439
+ 唾 -13.9077
3440
+ 喋 -13.9077
3441
+ 嗑 -13.9077
3442
+ 嗦 -13.9077
3443
+ 嗨 -13.9077
3444
+ 嘎 -13.9077
3445
+ 噢 -13.9077
3446
+ 嚷 -13.9077
3447
+ 囑 -13.9077
3448
+ 塭 -13.9077
3449
+ 壅 -13.9077
3450
+ 奚 -13.9077
3451
+ 姪 -13.9077
3452
+ 媚 -13.9077
3453
+ 寥 -13.9077
3454
+ 尉 -13.9077
3455
+ 尹 -13.9077
3456
+ 岐 -13.9077
3457
+ 崔 -13.9077
3458
+ 崛 -13.9077
3459
+ 嶇 -13.9077
3460
+ 嶽 -13.9077
3461
+ 巔 -13.9077
3462
+ 弩 -13.9077
3463
+ 彪 -13.9077
3464
+ 御 -13.9077
3465
+ 徨 -13.9077
3466
+ 恕 -13.9077
3467
+ 恥 -13.9077
3468
+ 悖 -13.9077
3469
+ 慚 -13.9077
3470
+ 憧 -13.9077
3471
+ 憬 -13.9077
3472
+ 戕 -13.9077
3473
+ 扥 -13.9077
3474
+ 拐 -13.9077
3475
+ 摺 -13.9077
3476
+ 攘 -13.9077
3477
+ 晞 -13.9077
3478
+ 晟 -13.9077
3479
+ 晾 -13.9077
3480
+ 曄 -13.9077
3481
+ 曳 -13.9077
3482
+ 梗 -13.9077
3483
+ 梳 -13.9077
3484
+ 棕 -13.9077
3485
+ 棠 -13.9077
3486
+ 棧 -13.9077
3487
+ 楠 -13.9077
3488
+ 榭 -13.9077
3489
+ 榷 -13.9077
3490
+ 樟 -13.9077
3491
+ 橢 -13.9077
3492
+ 殉 -13.9077
3493
+ 浬 -13.9077
3494
+ 涕 -13.9077
3495
+ 淒 -13.9077
3496
+ 淫 -13.9077
3497
+ 淮 -13.9077
3498
+ 渲 -13.9077
3499
+ 湍 -13.9077
3500
+ 溥 -13.9077
3501
+ 漥 -13.9077
3502
+ 濂 -13.9077
3503
+ 濛 -13.9077
3504
+ 灸 -13.9077
3505
+ 燮 -13.9077
3506
+ 爍 -13.9077
3507
+ 牟 -13.9077
3508
+ 狠 -13.9077
3509
+ 璀 -13.9077
3510
+ 璇 -13.9077
3511
+ 瓣 -13.9077
3512
+ 疚 -13.9077
3513
+ 痞 -13.9077
3514
+ 痧 -13.9077
3515
+ 瘟 -13.9077
3516
+ 皎 -13.9077
3517
+ 皿 -13.9077
3518
+ 睬 -13.9077
3519
+ 礡 -13.9077
3520
+ 祺 -13.9077
3521
+ 秧 -13.9077
3522
+ 笆 -13.9077
3523
+ 筊 -13.9077
3524
+ 筏 -13.9077
3525
+ 箋 -13.9077
3526
+ 箴 -13.9077
3527
+ 簣 -13.9077
3528
+ 糜 -13.9077
3529
+ 紳 -13.9077
3530
+ 絃 -13.9077
3531
+ 緯 -13.9077
3532
+ 縝 -13.9077
3533
+ 繭 -13.9077
3534
+ 纜 -13.9077
3535
+ 羶 -13.9077
3536
+ 羸 -13.9077
3537
+ 耿 -13.9077
3538
+ 芙 -13.9077
3539
+ 苞 -13.9077
3540
+ 苟 -13.9077
3541
+ 茍 -13.9077
3542
+ 茸 -13.9077
3543
+ 莠 -13.9077
3544
+ 葫 -13.9077
3545
+ 蓓 -13.9077
3546
+ 蔑 -13.9077
3547
+ 蔥 -13.9077
3548
+ 蚩 -13.9077
3549
+ 蜀 -13.9077
3550
+ 螂 -13.9077
3551
+ 衪 -13.9077
3552
+ 袒 -13.9077
3553
+ 褒 -13.9077
3554
+ 褘 -13.9077
3555
+ 褥 -13.9077
3556
+ 覦 -13.9077
3557
+ 覬 -13.9077
3558
+ 誨 -13.9077
3559
+ 諉 -13.9077
3560
+ 謁 -13.9077
3561
+ 譁 -13.9077
3562
+ 豫 -13.9077
3563
+ 貳 -13.9077
3564
+ 跪 -13.9077
3565
+ 踰 -13.9077
3566
+ 蹂 -13.9077
3567
+ 蹊 -13.9077
3568
+ 輓 -13.9077
3569
+ 迺 -13.9077
3570
+ 逍 -13.9077
3571
+ 遨 -13.9077
3572
+ 酋 -13.9077
3573
+ 醮 -13.9077
3574
+ 釧 -13.9077
3575
+ 銬 -13.9077
3576
+ 錡 -13.9077
3577
+ 錮 -13.9077
3578
+ 頹 -13.9077
3579
+ 餌 -13.9077
3580
+ 駝 -13.9077
3581
+ 骷 -13.9077
3582
+ 髏 -13.9077
3583
+ 鱷 -13.9077
3584
+ 鱺 -13.9077
3585
+ 鴛 -13.9077
3586
+ 鴿 -13.9077
3587
+ 鷥 -13.9077
3588
+ 黜 -13.9077
3589
+ 乞 -14.3132
3590
+ 仆 -14.3132
3591
+ 佬 -14.3132
3592
+ 佷 -14.3132
3593
+ 佼 -14.3132
3594
+ 俞 -14.3132
3595
+ 儒 -14.3132
3596
+ 冕 -14.3132
3597
+ 凳 -14.3132
3598
+ 刃 -14.3132
3599
+ 剋 -14.3132
3600
+ 剿 -14.3132
3601
+ 勻 -14.3132
3602
+ 匈 -14.3132
3603
+ 匝 -14.3132
3604
+ 卒 -14.3132
3605
+ 吁 -14.3132
3606
+ 吆 -14.3132
3607
+ 吠 -14.3132
3608
+ 吶 -14.3132
3609
+ 吾 -14.3132
3610
+ 呱 -14.3132
3611
+ 咄 -14.3132
3612
+ 咆 -14.3132
3613
+ 咒 -14.3132
3614
+ 咫 -14.3132
3615
+ 唆 -14.3132
3616
+ 喲 -14.3132
3617
+ 喳 -14.3132
3618
+ 嗆 -14.3132
3619
+ 嗇 -14.3132
3620
+ 嗚 -14.3132
3621
+ 嘖 -14.3132
3622
+ 嘩 -14.3132
3623
+ 嘰 -14.3132
3624
+ 嚀 -14.3132
3625
+ 坎 -14.3132
3626
+ 坨 -14.3132
3627
+ 垣 -14.3132
3628
+ 埠 -14.3132
3629
+ 堯 -14.3132
3630
+ 塹 -14.3132
3631
+ 壹 -14.3132
3632
+ 奴 -14.3132
3633
+ 妄 -14.3132
3634
+ 姣 -14.3132
3635
+ 姮 -14.3132
3636
+ 娣 -14.3132
3637
+ 婁 -14.3132
3638
+ 媲 -14.3132
3639
+ 嫂 -14.3132
3640
+ 嫡 -14.3132
3641
+ 孰 -14.3132
3642
+ 屎 -14.3132
3643
+ 峴 -14.3132
3644
+ 帷 -14.3132
3645
+ 廓 -14.3132
3646
+ 徬 -14.3132
3647
+ 忪 -14.3132
3648
+ 忿 -14.3132
3649
+ 恃 -14.3132
3650
+ 恙 -14.3132
3651
+ 恣 -14.3132
3652
+ 恿 -14.3132
3653
+ 惺 -14.3132
3654
+ 愿 -14.3132
3655
+ 慫 -14.3132
3656
+ 戊 -14.3132
3657
+ 戮 -14.3132
3658
+ 扈 -14.3132
3659
+ 拈 -14.3132
3660
+ 拎 -14.3132
3661
+ 挶 -14.3132
3662
+ 捅 -14.3132
3663
+ 捩 -14.3132
3664
+ 揀 -14.3132
3665
+ 揪 -14.3132
3666
+ 搤 -14.3132
3667
+ 摀 -14.3132
3668
+ 摜 -14.3132
3669
+ 摯 -14.3132
3670
+ 撩 -14.3132
3671
+ 撻 -14.3132
3672
+ 擒 -14.3132
3673
+ 擷 -14.3132
3674
+ 擻 -14.3132
3675
+ 晝 -14.3132
3676
+ 暄 -14.3132
3677
+ 曇 -14.3132
3678
+ 朔 -14.3132
3679
+ 枷 -14.3132
3680
+ 柄 -14.3132
3681
+ 柚 -14.3132
3682
+ 榆 -14.3132
3683
+ 槳 -14.3132
3684
+ 殞 -14.3132
3685
+ 沌 -14.3132
3686
+ 洄 -14.3132
3687
+ 浹 -14.3132
3688
+ 涅 -14.3132
3689
+ 涸 -14.3132
3690
+ 淦 -14.3132
3691
+ 淳 -14.3132
3692
+ 滬 -14.3132
3693
+ 漣 -14.3132
3694
+ 漧 -14.3132
3695
+ 漪 -14.3132
3696
+ 漱 -14.3132
3697
+ 漳 -14.3132
3698
+ 漾 -14.3132
3699
+ 澐 -14.3132
3700
+ 瀚 -14.3132
3701
+ 瀟 -14.3132
3702
+ 灶 -14.3132
3703
+ 炊 -14.3132
3704
+ 炯 -14.3132
3705
+ 烊 -14.3132
3706
+ 焉 -14.3132
3707
+ 焙 -14.3132
3708
+ 牴 -14.3132
3709
+ 犁 -14.3132
3710
+ 猥 -14.3132
3711
+ 玨 -14.3132
3712
+ 珈 -14.3132
3713
+ 琅 -14.3132
3714
+ 琨 -14.3132
3715
+ 瑩 -14.3132
3716
+ 瑾 -14.3132
3717
+ 璨 -14.3132
3718
+ 璽 -14.3132
3719
+ 瓢 -14.3132
3720
+ 甥 -14.3132
3721
+ 甯 -14.3132
3722
+ 疇 -14.3132
3723
+ 疣 -14.3132
3724
+ 痍 -14.3132
3725
+ 皚 -14.3132
3726
+ 盬 -14.3132
3727
+ 眈 -14.3132
3728
+ 瞌 -14.3132
3729
+ 瞥 -14.3132
3730
+ 瞪 -14.3132
3731
+ 瞳 -14.3132
3732
+ 矓 -14.3132
3733
+ 祟 -14.3132
3734
+ 祠 -14.3132
3735
+ 稠 -14.3132
3736
+ 笠 -14.3132
3737
+ 箔 -14.3132
3738
+ 篙 -14.3132
3739
+ 篷 -14.3132
3740
+ 簷 -14.3132
3741
+ 簸 -14.3132
3742
+ 粥 -14.3132
3743
+ 粼 -14.3132
3744
+ 糰 -14.3132
3745
+ 綺 -14.3132
3746
+ 纍 -14.3132
3747
+ 羹 -14.3132
3748
+ 翦 -14.3132
3749
+ 翱 -14.3132
3750
+ 翹 -14.3132
3751
+ 胯 -14.3132
3752
+ 腴 -14.3132
3753
+ 膝 -14.3132
3754
+ 膾 -14.3132
3755
+ 臀 -14.3132
3756
+ 舢 -14.3132
3757
+ 芃 -14.3132
3758
+ 芎 -14.3132
3759
+ 芯 -14.3132
3760
+ 芻 -14.3132
3761
+ 苔 -14.3132
3762
+ 苯 -14.3132
3763
+ 苳 -14.3132
3764
+ 莆 -14.3132
3765
+ 莓 -14.3132
3766
+ 菩 -14.3132
3767
+ 蒴 -14.3132
3768
+ 蓍 -14.3132
3769
+ 蚤 -14.3132
3770
+ 蜒 -14.3132
3771
+ 蜿 -14.3132
3772
+ 螳 -14.3132
3773
+ 袂 -14.3132
3774
+ 褫 -14.3132
3775
+ 褻 -14.3132
3776
+ 訛 -14.3132
3777
+ 詔 -14.3132
3778
+ 詛 -14.3132
3779
+ 詼 -14.3132
3780
+ 誡 -14.3132
3781
+ 謐 -14.3132
3782
+ 謢 -14.3132
3783
+ 譏 -14.3132
3784
+ 跤 -14.3132
3785
+ 蹶 -14.3132
3786
+ 轅 -14.3132
3787
+ 轆 -14.3132
3788
+ 迥 -14.3132
3789
+ 迭 -14.3132
3790
+ 遁 -14.3132
3791
+ 遑 -14.3132
3792
+ 邃 -14.3132
3793
+ 鄙 -14.3132
3794
+ 酢 -14.3132
3795
+ 醃 -14.3132
3796
+ 醋 -14.3132
3797
+ 醯 -14.3132
3798
+ 釗 -14.3132
3799
+ 釜 -14.3132
3800
+ 釦 -14.3132
3801
+ 錕 -14.3132
3802
+ 鍘 -14.3132
3803
+ 鍥 -14.3132
3804
+ 鍬 -14.3132
3805
+ 鎊 -14.3132
3806
+ 鎩 -14.3132
3807
+ 鏟 -14.3132
3808
+ 鏤 -14.3132
3809
+ 鐮 -14.3132
3810
+ 鑲 -14.3132
3811
+ 閻 -14.3132
3812
+ 闌 -14.3132
3813
+ 闕 -14.3132
3814
+ 阪 -14.3132
3815
+ 陜 -14.3132
3816
+ 霄 -14.3132
3817
+ 霆 -14.3132
3818
+ 霹 -14.3132
3819
+ 靶 -14.3132
3820
+ 餞 -14.3132
3821
+ 饑 -14.3132
3822
+ 騁 -14.3132
3823
+ 髦 -14.3132
3824
+ 鬍 -14.3132
3825
+ 魷 -14.3132
3826
+ 鯖 -14.3132
3827
+ 鯛 -14.3132
3828
+ 鴦 -14.3132
3829
+ 麒 -14.3132
3830
+ 齦 -14.3132
3831
+ 齲 -14.3132
3832
+ 丐 -15.0063
3833
+ 丕 -15.0063
3834
+ 乒 -15.0063
3835
+ 乓 -15.0063
3836
+ 仄 -15.0063
3837
+ 伎 -15.0063
3838
+ 伕 -15.0063
3839
+ 佃 -15.0063
3840
+ 佇 -15.0063
3841
+ 侈 -15.0063
3842
+ 侑 -15.0063
3843
+ 俑 -15.0063
3844
+ 倔 -15.0063
3845
+ 倣 -15.0063
3846
+ 傀 -15.0063
3847
+ 僈 -15.0063
3848
+ 兀 -15.0063
3849
+ 冑 -15.0063
3850
+ 剁 -15.0063
3851
+ 剉 -15.0063
3852
+ 匕 -15.0063
3853
+ 厥 -15.0063
3854
+ 叟 -15.0063
3855
+ 叼 -15.0063
3856
+ 吏 -15.0063
3857
+ 呦 -15.0063
3858
+ 咋 -15.0063
3859
+ 咐 -15.0063
3860
+ 咨 -15.0063
3861
+ 咪 -15.0063
3862
+ 哦 -15.0063
3863
+ 哽 -15.0063
3864
+ 唄 -15.0063
3865
+ 唬 -15.0063
3866
+ 啃 -15.0063
3867
+ 啣 -15.0063
3868
+ 啥 -15.0063
3869
+ 啪 -15.0063
3870
+ 喵 -15.0063
3871
+ 嗄 -15.0063
3872
+ 嘯 -15.0063
3873
+ 嘶 -15.0063
3874
+ 噗 -15.0063
3875
+ 噩 -15.0063
3876
+ 噬 -15.0063
3877
+ 嚓 -15.0063
3878
+ 囍 -15.0063
3879
+ 圴 -15.0063
3880
+ 埸 -15.0063
3881
+ 壑 -15.0063
3882
+ 奎 -15.0063
3883
+ 奐 -15.0063
3884
+ 奘 -15.0063
3885
+ 妊 -15.0063
3886
+ 妖 -15.0063
3887
+ 妾 -15.0063
3888
+ 娠 -15.0063
3889
+ 婀 -15.0063
3890
+ 婓 -15.0063
3891
+ 嫚 -15.0063
3892
+ 嫣 -15.0063
3893
+ 嬤 -15.0063
3894
+ 孿 -15.0063
3895
+ 寇 -15.0063
3896
+ 寐 -15.0063
3897
+ 屜 -15.0063
3898
+ 岔 -15.0063
3899
+ 崁 -15.0063
3900
+ 崢 -15.0063
3901
+ 嵋 -15.0063
3902
+ 嵩 -15.0063
3903
+ 嶸 -15.0063
3904
+ 巍 -15.0063
3905
+ 帑 -15.0063
3906
+ 帘 -15.0063
3907
+ 帚 -15.0063
3908
+ 帛 -15.0063
3909
+ 幔 -15.0063
3910
+ 幗 -15.0063
3911
+ 幛 -15.0063
3912
+ 幡 -15.0063
3913
+ 廬 -15.0063
3914
+ 弈 -15.0063
3915
+ 弛 -15.0063
3916
+ 彊 -15.0063
3917
+ 彤 -15.0063
3918
+ 徇 -15.0063
3919
+ 徉 -15.0063
3920
+ 徜 -15.0063
3921
+ 忐 -15.0063
3922
+ 忑 -15.0063
3923
+ 悍 -15.0063
3924
+ 悚 -15.0063
3925
+ 悱 -15.0063
3926
+ 悴 -15.0063
3927
+ 悸 -15.0063
3928
+ 惇 -15.0063
3929
+ 惘 -15.0063
3930
+ 惟 -15.0063
3931
+ 惻 -15.0063
3932
+ 愜 -15.0063
3933
+ 愫 -15.0063
3934
+ 愷 -15.0063
3935
+ 憊 -15.0063
3936
+ 憔 -15.0063
3937
+ 懦 -15.0063
3938
+ 懵 -15.0063
3939
+ 懾 -15.0063
3940
+ 戡 -15.0063
3941
+ 戩 -15.0063
3942
+ 扉 -15.0063
3943
+ 扺 -15.0063
3944
+ 抉 -15.0063
3945
+ 拄 -15.0063
3946
+ 拴 -15.0063
3947
+ 捆 -15.0063
3948
+ 捺 -15.0063
3949
+ 捻 -15.0063
3950
+ 掬 -15.0063
3951
+ 掰 -15.0063
3952
+ 揍 -15.0063
3953
+ 揹 -15.0063
3954
+ 搆 -15.0063
3955
+ 搥 -15.0063
3956
+ 攢 -15.0063
3957
+ 攫 -15.0063
3958
+ 昀 -15.0063
3959
+ 晦 -15.0063
3960
+ 暉 -15.0063
3961
+ 暱 -15.0063
3962
+ 曩 -15.0063
3963
+ 朮 -15.0063
3964
+ 杆 -15.0063
3965
+ 杏 -15.0063
3966
+ 杓 -15.0063
3967
+ 杞 -15.0063
3968
+ 杵 -15.0063
3969
+ 枸 -15.0063
3970
+ 桔 -15.0063
3971
+ 椪 -15.0063
3972
+ 楞 -15.0063
3973
+ 楨 -15.0063
3974
+ 楷 -15.0063
3975
+ 楸 -15.0063
3976
+ 楹 -15.0063
3977
+ 榖 -15.0063
3978
+ 槃 -15.0063
3979
+ 槌 -15.0063
3980
+ 槲 -15.0063
3981
+ 樵 -15.0063
3982
+ 橇 -15.0063
3983
+ 檀 -15.0063
3984
+ 檬 -15.0063
3985
+ 檸 -15.0063
3986
+ 欷 -15.0063
3987
+ 殆 -15.0063
3988
+ 殭 -15.0063
3989
+ 殲 -15.0063
3990
+ 毋 -15.0063
3991
+ 毌 -15.0063
3992
+ 毓 -15.0063
3993
+ 氐 -15.0063
3994
+ 氦 -15.0063
3995
+ 汎 -15.0063
3996
+ 沚 -15.0063
3997
+ 泓 -15.0063
3998
+ 洒 -15.0063
3999
+ 浥 -15.0063
4000
+ 涇 -15.0063
4001
+ 涎 -15.0063
4002
+ 涷 -15.0063
4003
+ 淀 -15.0063
4004
+ 淅 -15.0063
4005
+ 渙 -15.0063
4006
+ 渝 -15.0063
4007
+ 渭 -15.0063
4008
+ 湃 -15.0063
4009
+ 湎 -15.0063
4010
+ 滸 -15.0063
4011
+ 漕 -15.0063
4012
+ 濘 -15.0063
4013
+ 濺 -15.0063
4014
+ 瀝 -15.0063
4015
+ 炘 -15.0063
4016
+ 炬 -15.0063
4017
+ 炷 -15.0063
4018
+ 焗 -15.0063
4019
+ 煽 -15.0063
4020
+ 熨 -15.0063
4021
+ 燁 -15.0063
4022
+ 燜 -15.0063
4023
+ 燻 -15.0063
4024
+ 燼 -15.0063
4025
+ 爹 -15.0063
4026
+ 爻 -15.0063
4027
+ 牒 -15.0063
4028
+ 犀 -15.0063
4029
+ 犢 -15.0063
4030
+ 狐 -15.0063
4031
+ 狡 -15.0063
4032
+ 狸 -15.0063
4033
+ 猝 -15.0063
4034
+ 猷 -15.0063
4035
+ 猾 -15.0063
4036
+ 獼 -15.0063
4037
+ 玥 -15.0063
4038
+ 玼 -15.0063
4039
+ 珀 -15.0063
4040
+ 琥 -15.0063
4041
+ 琺 -15.0063
4042
+ 瑙 -15.0063
4043
+ 璟 -15.0063
4044
+ 瓅 -15.0063
4045
+ 瓔 -15.0063
4046
+ 甪 -15.0063
4047
+ 痠 -15.0063
4048
+ 痲 -15.0063
4049
+ 痹 -15.0063
4050
+ 瘀 -15.0063
4051
+ 瘠 -15.0063
4052
+ 癇 -15.0063
4053
+ 癲 -15.0063
4054
+ 皓 -15.0063
4055
+ 盎 -15.0063
4056
+ 盥 -15.0063
4057
+ 矣 -15.0063
4058
+ 硝 -15.0063
4059
+ 硨 -15.0063
4060
+ 硼 -15.0063
4061
+ 碇 -15.0063
4062
+ 碘 -15.0063
4063
+ 碾 -15.0063
4064
+ ��� -15.0063
4065
+ 磲 -15.0063
4066
+ 磷 -15.0063
4067
+ 祇 -15.0063
4068
+ 秏 -15.0063
4069
+ 穢 -15.0063
4070
+ 穹 -15.0063
4071
+ 窈 -15.0063
4072
+ 窕 -15.0063
4073
+ 窖 -15.0063
4074
+ 窪 -15.0063
4075
+ 窿 -15.0063
4076
+ 竽 -15.0063
4077
+ 笈 -15.0063
4078
+ 笙 -15.0063
4079
+ 筐 -15.0063
4080
+ 筑 -15.0063
4081
+ 箇 -15.0063
4082
+ 箕 -15.0063
4083
+ 簧 -15.0063
4084
+ 簪 -15.0063
4085
+ 籟 -15.0063
4086
+ 籮 -15.0063
4087
+ 籽 -15.0063
4088
+ 粲 -15.0063
4089
+ 糠 -15.0063
4090
+ 糢 -15.0063
4091
+ 紆 -15.0063
4092
+ 紉 -15.0063
4093
+ 紕 -15.0063
4094
+ 絀 -15.0063
4095
+ 絆 -15.0063
4096
+ 絢 -15.0063
4097
+ 綏 -15.0063
4098
+ 緘 -15.0063
4099
+ 緞 -15.0063
4100
+ 繕 -15.0063
4101
+ 繚 -15.0063
4102
+ 缽 -15.0063
4103
+ 罔 -15.0063
4104
+ 罝 -15.0063
4105
+ 羔 -15.0063
4106
+ 羲 -15.0063
4107
+ 翎 -15.0063
4108
+ 肋 -15.0063
4109
+ 肴 -15.0063
4110
+ 胱 -15.0063
4111
+ 脘 -15.0063
4112
+ 脣 -15.0063
4113
+ 脯 -15.0063
4114
+ 腆 -15.0063
4115
+ 腋 -15.0063
4116
+ 腑 -15.0063
4117
+ 腱 -15.0063
4118
+ 臃 -15.0063
4119
+ 舀 -15.0063
4120
+ 舔 -15.0063
4121
+ 舺 -15.0063
4122
+ 艋 -15.0063
4123
+ 茉 -15.0063
4124
+ 茗 -15.0063
4125
+ 茯 -15.0063
4126
+ 荊 -15.0063
4127
+ 荖 -15.0063
4128
+ 莒 -15.0063
4129
+ 萁 -15.0063
4130
+ 蓀 -15.0063
4131
+ 蔭 -15.0063
4132
+ 蔽 -15.0063
4133
+ 薏 -15.0063
4134
+ 藪 -15.0063
4135
+ 蘑 -15.0063
4136
+ 蚪 -15.0063
4137
+ 蛉 -15.0063
4138
+ 蛹 -15.0063
4139
+ 蜍 -15.0063
4140
+ 蜷 -15.0063
4141
+ 蝌 -15.0063
4142
+ 蝸 -15.0063
4143
+ 螯 -15.0063
4144
+ 螽 -15.0063
4145
+ 蟑 -15.0063
4146
+ 蟾 -15.0063
4147
+ 蠅 -15.0063
4148
+ 蠆 -15.0063
4149
+ 蠋 -15.0063
4150
+ 蠕 -15.0063
4151
+ 蠣 -15.0063
4152
+ 衊 -15.0063
4153
+ 裘 -15.0063
4154
+ 褟 -15.0063
4155
+ 褵 -15.0063
4156
+ 褶 -15.0063
4157
+ 襌 -15.0063
4158
+ 襖 -15.0063
4159
+ 覲 -15.0063
4160
+ 訐 -15.0063
4161
+ 訖 -15.0063
4162
+ 訥 -15.0063
4163
+ 誅 -15.0063
4164
+ 諳 -15.0063
4165
+ 謔 -15.0063
4166
+ 譟 -15.0063
4167
+ 豌 -15.0063
4168
+ 豺 -15.0063
4169
+ 貽 -15.0063
4170
+ 賁 -15.0063
4171
+ 贍 -15.0063
4172
+ 跚 -15.0063
4173
+ 蹣 -15.0063
4174
+ 蹭 -15.0063
4175
+ 軋 -15.0063
4176
+ 迂 -15.0063
4177
+ 迄 -15.0063
4178
+ 遶 -15.0063
4179
+ 酚 -15.0063
4180
+ 酥 -15.0063
4181
+ 醱 -15.0063
4182
+ 釉 -15.0063
4183
+ 鈍 -15.0063
4184
+ 鈐 -15.0063
4185
+ 鈙 -15.0063
4186
+ 鈦 -15.0063
4187
+ 鈺 -15.0063
4188
+ 鉀 -15.0063
4189
+ 鉗 -15.0063
4190
+ 鋅 -15.0063
4191
+ 鋌 -15.0063
4192
+ 鋤 -15.0063
4193
+ 錏 -15.0063
4194
+ 錐 -15.0063
4195
+ 錘 -15.0063
4196
+ 錠 -15.0063
4197
+ 錳 -15.0063
4198
+ 鍍 -15.0063
4199
+ 鎘 -15.0063
4200
+ 鎳 -15.0063
4201
+ 鏘 -15.0063
4202
+ 鐓 -15.0063
4203
+ 鑄 -15.0063
4204
+ 鑾 -15.0063
4205
+ 闔 -15.0063
4206
+ 陂 -15.0063
4207
+ 陛 -15.0063
4208
+ 隄 -15.0063
4209
+ 隍 -15.0063
4210
+ 隕 -15.0063
4211
+ 隘 -15.0063
4212
+ 隴 -15.0063
4213
+ 隼 -15.0063
4214
+ 雌 -15.0063
4215
+ 靂 -15.0063
4216
+ 靛 -15.0063
4217
+ 靦 -15.0063
4218
+ 靨 -15.0063
4219
+ 鞦 -15.0063
4220
+ 韁 -15.0063
4221
+ 韆 -15.0063
4222
+ 韜 -15.0063
4223
+ 頡 -15.0063
4224
+ 顎 -15.0063
4225
+ 顴 -15.0063
4226
+ 颳 -15.0063
4227
+ 飴 -15.0063
4228
+ 餒 -15.0063
4229
+ 餔 -15.0063
4230
+ 餽 -15.0063
4231
+ 饌 -15.0063
4232
+ 駢 -15.0063
4233
+ 騖 -15.0063
4234
+ 驍 -15.0063
4235
+ 驛 -15.0063
4236
+ 驢 -15.0063
4237
+ 驪 -15.0063
4238
+ 骯 -15.0063
4239
+ 髖 -15.0063
4240
+ 鬢 -15.0063
4241
+ 鬩 -15.0063
4242
+ 鰭 -15.0063
4243
+ 鰲 -15.0063
4244
+ 鱒 -15.0063
4245
+ 鳧 -15.0063
4246
+ 鳩 -15.0063
4247
+ 鴆 -15.0063
4248
+ 鴯 -15.0063
4249
+ 鵜 -15.0063
4250
+ 鵡 -15.0063
4251
+ 鷿 -15.0063
4252
+ 鸚 -15.0063
4253
+ 鸞 -15.0063
4254
+ 黝 -15.0063
4255
+ 齪 -15.0063
4256
+ 齷 -15.0063
4257
+ 龕 -15.0063
ASR-model/tokenizer_seg_bpe5k_char/env.log ADDED
@@ -0,0 +1,195 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ SpeechBrain system description
2
+ ==============================
3
+ Python version:
4
+ 3.8.10 (default, Jun 2 2021, 10:49:15)
5
+ [GCC 9.4.0]
6
+ ==============================
7
+ Installed Python packages:
8
+ appdirs==1.4.4
9
+ argon2-cffi==20.1.0
10
+ async-generator==1.10
11
+ attrs==19.3.0
12
+ Automat==0.8.0
13
+ autopep8==1.5.7
14
+ backcall==0.2.0
15
+ backports.entry-points-selectable==1.1.0
16
+ black==19.10b0
17
+ bleach==3.3.1
18
+ blessings==1.7
19
+ blinker==1.4
20
+ bottle==0.12.19
21
+ certifi==2019.11.28
22
+ cffi==1.14.6
23
+ cfgv==3.3.0
24
+ chardet==3.0.4
25
+ Click==7.0
26
+ cloud-init==21.2
27
+ colorama==0.4.3
28
+ command-not-found==0.3
29
+ configobj==5.0.6
30
+ constantly==15.1.0
31
+ cryptography==2.8
32
+ cupshelpers==1.0
33
+ cycler==0.10.0
34
+ d2l==0.16.6
35
+ datasets==1.11.0
36
+ dbus-python==1.2.16
37
+ debugpy==1.3.0
38
+ decorator==5.0.9
39
+ defer==1.0.6
40
+ defusedxml==0.7.1
41
+ dill==0.3.4
42
+ distlib==0.3.2
43
+ distro==1.4.0
44
+ distro-info===0.23ubuntu1
45
+ entrypoints==0.3
46
+ filelock==3.0.12
47
+ flake8==3.7.9
48
+ fsspec==2021.7.0
49
+ gpustat==0.6.0
50
+ gpuview==0.4.0
51
+ httplib2==0.14.0
52
+ huggingface-hub==0.0.16
53
+ hyperlink==19.0.0
54
+ HyperPyYAML==1.0.0
55
+ identify==2.2.11
56
+ idna==2.8
57
+ importlib-metadata==1.5.0
58
+ incremental==16.10.1
59
+ ipykernel==6.0.2
60
+ ipython==7.25.0
61
+ ipython-genutils==0.2.0
62
+ ipywidgets==7.6.3
63
+ jedi==0.18.0
64
+ Jinja2==2.10.1
65
+ joblib==1.0.1
66
+ jsonpatch==1.22
67
+ jsonpointer==2.0
68
+ jsonschema==3.2.0
69
+ jupyter==1.0.0
70
+ jupyter-client==6.1.12
71
+ jupyter-console==6.4.0
72
+ jupyter-core==4.7.1
73
+ jupyterlab-pygments==0.1.2
74
+ jupyterlab-widgets==1.0.0
75
+ keyring==18.0.1
76
+ kiwisolver==1.3.1
77
+ language-selector==0.1
78
+ launchpadlib==1.10.13
79
+ lazr.restfulclient==0.14.2
80
+ lazr.uri==1.0.3
81
+ macaroonbakery==1.3.1
82
+ MarkupSafe==1.1.0
83
+ matplotlib==3.4.2
84
+ matplotlib-inline==0.1.2
85
+ mccabe==0.6.1
86
+ mistune==0.8.4
87
+ more-itertools==4.2.0
88
+ multiprocess==0.70.12.2
89
+ nbclient==0.5.3
90
+ nbconvert==6.1.0
91
+ nbformat==5.1.3
92
+ nest-asyncio==1.5.1
93
+ netifaces==0.10.4
94
+ nodeenv==1.6.0
95
+ notebook==6.4.0
96
+ numpy==1.21.2
97
+ nvidia-ml-py3==7.352.0
98
+ oauthlib==3.1.0
99
+ packaging==21.0
100
+ pandas==1.3.0
101
+ pandocfilters==1.4.3
102
+ parso==0.8.2
103
+ pathspec==0.9.0
104
+ pexpect==4.6.0
105
+ pickleshare==0.7.5
106
+ Pillow==8.3.1
107
+ platformdirs==2.0.2
108
+ pluggy==0.13.1
109
+ pre-commit==2.15.0
110
+ prometheus-client==0.11.0
111
+ prompt-toolkit==3.0.19
112
+ protobuf==3.6.1
113
+ psutil==5.8.0
114
+ ptyprocess==0.7.0
115
+ py==1.10.0
116
+ pyarrow==5.0.0
117
+ pyasn1==0.4.2
118
+ pyasn1-modules==0.2.1
119
+ pycairo==1.16.2
120
+ pycodestyle==2.5.0
121
+ pycparser==2.20
122
+ pycups==1.9.73
123
+ pyflakes==2.1.1
124
+ Pygments==2.9.0
125
+ PyGObject==3.36.0
126
+ PyHamcrest==1.9.0
127
+ PyJWT==1.7.1
128
+ pymacaroons==0.13.0
129
+ PyMySQL==1.0.2
130
+ PyNaCl==1.3.0
131
+ pyOpenSSL==19.0.0
132
+ pyparsing==2.4.7
133
+ pyRFC3339==1.1
134
+ pyrsistent==0.15.5
135
+ pyserial==3.4
136
+ pytest==5.4.1
137
+ python-apt==2.0.0+ubuntu0.20.4.5
138
+ python-dateutil==2.8.2
139
+ python-debian===0.1.36ubuntu1
140
+ pytube==10.9.3
141
+ pytz==2019.3
142
+ PyYAML==5.3.1
143
+ pyzmq==22.1.0
144
+ qtconsole==5.1.1
145
+ QtPy==1.9.0
146
+ regex==2021.7.6
147
+ requests==2.22.0
148
+ requests-unixsocket==0.2.0
149
+ ruamel.yaml==0.17.10
150
+ ruamel.yaml.clib==0.2.6
151
+ scipy==1.7.1
152
+ screen-resolution-extra==0.0.0
153
+ SecretStorage==2.3.1
154
+ Send2Trash==1.7.1
155
+ sentencepiece==0.1.96
156
+ service-identity==18.1.0
157
+ simplejson==3.16.0
158
+ six==1.14.0
159
+ sos==4.1
160
+ -e git+https://github.com/speechbrain/speechbrain.git@1d194bfc51ae20b9e38596d220cdf0f4977e69de#egg=speechbrain
161
+ ssh-import-id==5.10
162
+ supervisor==4.1.0
163
+ systemd-python==234
164
+ terminado==0.10.1
165
+ testpath==0.5.0
166
+ toml==0.10.2
167
+ torch==1.8.1
168
+ torchaudio==0.8.1
169
+ torchvision==0.10.0
170
+ tornado==6.1
171
+ tqdm==4.62.2
172
+ traitlets==5.0.5
173
+ Twisted==18.9.0
174
+ typed-ast==1.4.3
175
+ typing-extensions==3.10.0.0
176
+ ubuntu-advantage-tools==27.2
177
+ ufw==0.36
178
+ unattended-upgrades==0.1
179
+ urllib3==1.25.8
180
+ virtualenv==20.6.0
181
+ wadllib==1.3.3
182
+ wcwidth==0.2.5
183
+ webencodings==0.5.1
184
+ widgetsnbextension==3.5.1
185
+ xkit==0.0.0
186
+ xxhash==2.0.2
187
+ yamllint==1.23.0
188
+ zipp==1.0.0
189
+ zope.interface==4.7.1
190
+ ==============================
191
+ Git revision:
192
+ e3e51338
193
+ ==============================
194
+ Cuda version:
195
+ 10.2
ASR-model/tokenizer_seg_bpe5k_char/hyperparams.yaml ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Generated 2021-10-04 from:
2
+ # /mnt/md0/user_wayne/speechbrain/recipes/MATBN/Tokenizer/hparams/tokenizer_seg_bpe5k_char.yaml
3
+ # yamllint disable
4
+ dataset_folder: /home/wayne/CORPUS/MATBN_SEG
5
+ prepare_folder: results/prepare_seg
6
+ output_folder: results/tokenizer_seg_bpe5k_char
7
+ keep_unk: false
8
+
9
+ token_type: char # ["unigram", "bpe", "char"]
10
+ token_output: 5000 # index(blank/eos/bos/unk) = 0
11
+ character_coverage: 1.0
12
+ annotation_read: transcription
13
+
14
+ train_json: results/prepare_seg/train.json
15
+ dev_json: results/prepare_seg/dev.json
16
+ eval_json: results/prepare_seg/eval.json
17
+ test_json: results/prepare_seg/test.json
18
+
19
+
20
+ tokenizer: !name:speechbrain.tokenizers.SentencePiece.SentencePiece
21
+ model_dir: results/tokenizer_seg_bpe5k_char
22
+ vocab_size: 5000
23
+ annotation_train: results/prepare_seg/train.json
24
+ annotation_read: transcription
25
+ model_type: char # ["unigram", "bpe", "char"]
26
+ character_coverage: 1.0
27
+ annotation_list_to_check: [results/prepare_seg/dev.json, results/prepare_seg/eval.json,
28
+ results/prepare_seg/test.json]
29
+ annotation_format: json
30
+ bos_id: 1
31
+ eos_id: 2
ASR-model/tokenizer_seg_bpe5k_char/log.txt ADDED
@@ -0,0 +1,1037 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 2021-09-16 18:20:17,572 - speechbrain.core - INFO - Beginning experiment!
2
+ 2021-09-16 18:20:17,572 - speechbrain.core - INFO - Experiment folder: results/tokenizer_seg_bpe5k_char
3
+ 2021-09-16 18:20:17,773 - speechbrain.utils.superpowers - DEBUG - appdirs==1.4.4
4
+ argon2-cffi==20.1.0
5
+ async-generator==1.10
6
+ attrs==21.2.0
7
+ autopep8==1.5.7
8
+ backcall==0.2.0
9
+ backports.entry-points-selectable==1.1.0
10
+ black==19.10b0
11
+ bleach==3.3.1
12
+ certifi==2021.5.30
13
+ cffi==1.14.6
14
+ cfgv==3.3.0
15
+ charset-normalizer==2.0.4
16
+ click==8.0.1
17
+ cycler==0.10.0
18
+ d2l==0.16.6
19
+ datasets==1.11.0
20
+ debugpy==1.3.0
21
+ decorator==5.0.9
22
+ defusedxml==0.7.1
23
+ dill==0.3.4
24
+ distlib==0.3.2
25
+ entrypoints==0.3
26
+ filelock==3.0.12
27
+ flake8==3.7.9
28
+ fsspec==2021.7.0
29
+ huggingface-hub==0.0.16
30
+ HyperPyYAML==1.0.0
31
+ identify==2.2.11
32
+ idna==3.2
33
+ ipykernel==6.0.2
34
+ ipython==7.25.0
35
+ ipython-genutils==0.2.0
36
+ ipywidgets==7.6.3
37
+ jedi==0.18.0
38
+ joblib==1.0.1
39
+ jupyter==1.0.0
40
+ jupyter-client==6.1.12
41
+ jupyter-console==6.4.0
42
+ jupyter-core==4.7.1
43
+ jupyterlab-pygments==0.1.2
44
+ jupyterlab-widgets==1.0.0
45
+ kiwisolver==1.3.1
46
+ matplotlib==3.4.2
47
+ matplotlib-inline==0.1.2
48
+ mccabe==0.6.1
49
+ mistune==0.8.4
50
+ mkl-fft==1.3.0
51
+ mkl-random @ file:///tmp/build/80754af9/mkl_random_1626186064646/work
52
+ mkl-service==2.4.0
53
+ more-itertools==8.9.0
54
+ multiprocess==0.70.12.2
55
+ nbclient==0.5.3
56
+ nbconvert==6.1.0
57
+ nbformat==5.1.3
58
+ nest-asyncio==1.5.1
59
+ nodeenv==1.6.0
60
+ notebook==6.4.0
61
+ numpy==1.21.2
62
+ olefile @ file:///Users/ktietz/demo/mc3/conda-bld/olefile_1629805411829/work
63
+ packaging==21.0
64
+ pandas==1.3.0
65
+ pandocfilters==1.4.3
66
+ parso==0.8.2
67
+ pathspec==0.9.0
68
+ pickleshare==0.7.5
69
+ Pillow==8.3.1
70
+ platformdirs==2.0.2
71
+ pluggy==0.13.1
72
+ pre-commit==2.15.0
73
+ prometheus-client==0.11.0
74
+ prompt-toolkit==3.0.19
75
+ ptyprocess==0.7.0
76
+ py==1.10.0
77
+ pyarrow==5.0.0
78
+ pycodestyle==2.5.0
79
+ pycparser==2.20
80
+ pydub @ file:///home/conda/feedstock_root/build_artifacts/pydub_1615612442567/work
81
+ pyflakes==2.1.1
82
+ Pygments==2.9.0
83
+ pyparsing==2.4.7
84
+ pytest==5.4.1
85
+ python-dateutil==2.8.2
86
+ pytube==10.9.3
87
+ PyYAML==5.4.1
88
+ pyzmq==22.1.0
89
+ qtconsole==5.1.1
90
+ QtPy==1.9.0
91
+ regex==2021.7.6
92
+ requests==2.26.0
93
+ ruamel.yaml==0.17.10
94
+ ruamel.yaml.clib==0.2.6
95
+ scikit-learn @ file:///tmp/build/80754af9/scikit-learn_1621370412049/work
96
+ scipy==1.7.1
97
+ Send2Trash==1.7.1
98
+ sentencepiece==0.1.96
99
+ six @ file:///tmp/build/80754af9/six_1623709665295/work
100
+ -e git+https://github.com/speechbrain/speechbrain.git@2ec4839746970875fc763aa354c44a3356685ef6#egg=speechbrain
101
+ terminado==0.10.1
102
+ testpath==0.5.0
103
+ threadpoolctl @ file:///Users/ktietz/demo/mc3/conda-bld/threadpoolctl_1629802263681/work
104
+ toml==0.10.2
105
+ torch==1.8.1
106
+ torchaudio==0.8.1
107
+ torchvision==0.10.0
108
+ tornado==6.1
109
+ tqdm==4.62.2
110
+ traitlets==5.0.5
111
+ typed-ast==1.4.3
112
+ typing-extensions==3.10.0.0
113
+ urllib3==1.26.6
114
+ virtualenv==20.6.0
115
+ wcwidth==0.2.5
116
+ webencodings==0.5.1
117
+ widgetsnbextension==3.5.1
118
+ xxhash==2.0.2
119
+ yamllint==1.23.0
120
+
121
+
122
+ 2021-09-16 18:20:17,777 - speechbrain.utils.superpowers - DEBUG - e3e51338
123
+
124
+
125
+ 2021-09-16 18:20:19,582 - speechbrain.tokenizers.SentencePiece - INFO - Train tokenizer with type:char
126
+ 2021-09-16 18:20:19,582 - speechbrain.tokenizers.SentencePiece - INFO - Extract transcription sequences from:results/prepare_seg/train.json
127
+ 2021-09-16 18:20:19,788 - speechbrain.tokenizers.SentencePiece - INFO - Text file created at: results/prepare_seg/train.txt
128
+ 2021-09-16 18:20:20,080 - speechbrain.tokenizers.SentencePiece - INFO - ==== Loading Tokenizer ===
129
+ 2021-09-16 18:20:20,080 - speechbrain.tokenizers.SentencePiece - INFO - Tokenizer path: results/tokenizer_seg_bpe5k_char/5000_char.model
130
+ 2021-09-16 18:20:20,080 - speechbrain.tokenizers.SentencePiece - INFO - Tokenizer vocab_size: 5000
131
+ 2021-09-16 18:20:20,080 - speechbrain.tokenizers.SentencePiece - INFO - Tokenizer type: char
132
+ 2021-09-16 18:20:20,082 - speechbrain.tokenizers.SentencePiece - INFO - ==== Accuracy checking for recovering text from tokenizer ===
133
+ 2021-09-16 18:20:32,793 - speechbrain.tokenizers.SentencePiece - INFO - recover words from: results/prepare_seg/dev.json
134
+ 2021-09-16 18:20:32,793 - speechbrain.tokenizers.SentencePiece - INFO - Wrong recover words: 0
135
+ 2021-09-16 18:20:32,793 - speechbrain.tokenizers.SentencePiece - WARNING - accuracy recovering words: 1.0
136
+ 2021-09-16 18:20:32,793 - speechbrain.tokenizers.SentencePiece - INFO - ==== Accuracy checking for recovering text from tokenizer ===
137
+ 2021-09-16 18:20:45,713 - speechbrain.tokenizers.SentencePiece - INFO - recover words from: results/prepare_seg/eval.json
138
+ 2021-09-16 18:20:45,713 - speechbrain.tokenizers.SentencePiece - INFO - Wrong recover words: 0
139
+ 2021-09-16 18:20:45,714 - speechbrain.tokenizers.SentencePiece - WARNING - accuracy recovering words: 1.0
140
+ 2021-09-16 18:20:45,714 - speechbrain.tokenizers.SentencePiece - INFO - ==== Accuracy checking for recovering text from tokenizer ===
141
+ 2021-09-16 18:20:58,798 - speechbrain.tokenizers.SentencePiece - INFO - recover words from: results/prepare_seg/test.json
142
+ 2021-09-16 18:20:58,798 - speechbrain.tokenizers.SentencePiece - INFO - Wrong recover words: 0
143
+ 2021-09-16 18:20:58,798 - speechbrain.tokenizers.SentencePiece - WARNING - accuracy recovering words: 1.0
144
+ 2021-09-16 18:57:44,327 - speechbrain.core - INFO - Beginning experiment!
145
+ 2021-09-16 18:57:44,327 - speechbrain.core - INFO - Experiment folder: results/tokenizer_seg_bpe5k_char
146
+ 2021-09-16 18:57:44,526 - speechbrain.utils.superpowers - DEBUG - appdirs==1.4.4
147
+ argon2-cffi==20.1.0
148
+ async-generator==1.10
149
+ attrs==21.2.0
150
+ autopep8==1.5.7
151
+ backcall==0.2.0
152
+ backports.entry-points-selectable==1.1.0
153
+ black==19.10b0
154
+ bleach==3.3.1
155
+ certifi==2021.5.30
156
+ cffi==1.14.6
157
+ cfgv==3.3.0
158
+ charset-normalizer==2.0.4
159
+ click==8.0.1
160
+ cycler==0.10.0
161
+ d2l==0.16.6
162
+ datasets==1.11.0
163
+ debugpy==1.3.0
164
+ decorator==5.0.9
165
+ defusedxml==0.7.1
166
+ dill==0.3.4
167
+ distlib==0.3.2
168
+ entrypoints==0.3
169
+ filelock==3.0.12
170
+ flake8==3.7.9
171
+ fsspec==2021.7.0
172
+ huggingface-hub==0.0.16
173
+ HyperPyYAML==1.0.0
174
+ identify==2.2.11
175
+ idna==3.2
176
+ ipykernel==6.0.2
177
+ ipython==7.25.0
178
+ ipython-genutils==0.2.0
179
+ ipywidgets==7.6.3
180
+ jedi==0.18.0
181
+ joblib==1.0.1
182
+ jupyter==1.0.0
183
+ jupyter-client==6.1.12
184
+ jupyter-console==6.4.0
185
+ jupyter-core==4.7.1
186
+ jupyterlab-pygments==0.1.2
187
+ jupyterlab-widgets==1.0.0
188
+ kiwisolver==1.3.1
189
+ matplotlib==3.4.2
190
+ matplotlib-inline==0.1.2
191
+ mccabe==0.6.1
192
+ mistune==0.8.4
193
+ mkl-fft==1.3.0
194
+ mkl-random @ file:///tmp/build/80754af9/mkl_random_1626186064646/work
195
+ mkl-service==2.4.0
196
+ more-itertools==8.9.0
197
+ multiprocess==0.70.12.2
198
+ nbclient==0.5.3
199
+ nbconvert==6.1.0
200
+ nbformat==5.1.3
201
+ nest-asyncio==1.5.1
202
+ nodeenv==1.6.0
203
+ notebook==6.4.0
204
+ numpy==1.21.2
205
+ olefile @ file:///Users/ktietz/demo/mc3/conda-bld/olefile_1629805411829/work
206
+ packaging==21.0
207
+ pandas==1.3.0
208
+ pandocfilters==1.4.3
209
+ parso==0.8.2
210
+ pathspec==0.9.0
211
+ pickleshare==0.7.5
212
+ Pillow==8.3.1
213
+ platformdirs==2.0.2
214
+ pluggy==0.13.1
215
+ pre-commit==2.15.0
216
+ prometheus-client==0.11.0
217
+ prompt-toolkit==3.0.19
218
+ ptyprocess==0.7.0
219
+ py==1.10.0
220
+ pyarrow==5.0.0
221
+ pycodestyle==2.5.0
222
+ pycparser==2.20
223
+ pydub @ file:///home/conda/feedstock_root/build_artifacts/pydub_1615612442567/work
224
+ pyflakes==2.1.1
225
+ Pygments==2.9.0
226
+ pyparsing==2.4.7
227
+ pytest==5.4.1
228
+ python-dateutil==2.8.2
229
+ pytube==10.9.3
230
+ PyYAML==5.4.1
231
+ pyzmq==22.1.0
232
+ qtconsole==5.1.1
233
+ QtPy==1.9.0
234
+ regex==2021.7.6
235
+ requests==2.26.0
236
+ ruamel.yaml==0.17.10
237
+ ruamel.yaml.clib==0.2.6
238
+ scikit-learn @ file:///tmp/build/80754af9/scikit-learn_1621370412049/work
239
+ scipy==1.7.1
240
+ Send2Trash==1.7.1
241
+ sentencepiece==0.1.96
242
+ six @ file:///tmp/build/80754af9/six_1623709665295/work
243
+ -e git+https://github.com/speechbrain/speechbrain.git@2ec4839746970875fc763aa354c44a3356685ef6#egg=speechbrain
244
+ terminado==0.10.1
245
+ testpath==0.5.0
246
+ threadpoolctl @ file:///Users/ktietz/demo/mc3/conda-bld/threadpoolctl_1629802263681/work
247
+ toml==0.10.2
248
+ torch==1.8.1
249
+ torchaudio==0.8.1
250
+ torchvision==0.10.0
251
+ tornado==6.1
252
+ tqdm==4.62.2
253
+ traitlets==5.0.5
254
+ typed-ast==1.4.3
255
+ typing-extensions==3.10.0.0
256
+ urllib3==1.26.6
257
+ virtualenv==20.6.0
258
+ wcwidth==0.2.5
259
+ webencodings==0.5.1
260
+ widgetsnbextension==3.5.1
261
+ xxhash==2.0.2
262
+ yamllint==1.23.0
263
+
264
+
265
+ 2021-09-16 18:57:44,530 - speechbrain.utils.superpowers - DEBUG - e3e51338
266
+
267
+
268
+ 2021-09-16 18:57:46,443 - speechbrain.tokenizers.SentencePiece - INFO - Tokenizer is already trained.
269
+ 2021-09-16 18:57:46,443 - speechbrain.tokenizers.SentencePiece - INFO - ==== Loading Tokenizer ===
270
+ 2021-09-16 18:57:46,443 - speechbrain.tokenizers.SentencePiece - INFO - Tokenizer path: results/tokenizer_seg_bpe5k_char/5000_char.model
271
+ 2021-09-16 18:57:46,443 - speechbrain.tokenizers.SentencePiece - INFO - Tokenizer vocab_size: 5000
272
+ 2021-09-16 18:57:46,444 - speechbrain.tokenizers.SentencePiece - INFO - Tokenizer type: char
273
+ 2021-09-16 18:57:46,447 - speechbrain.tokenizers.SentencePiece - INFO - ==== Accuracy checking for recovering text from tokenizer ===
274
+ 2021-09-16 18:57:59,747 - speechbrain.tokenizers.SentencePiece - INFO - recover words from: results/prepare_seg/dev.json
275
+ 2021-09-16 18:57:59,747 - speechbrain.tokenizers.SentencePiece - INFO - Wrong recover words: 0
276
+ 2021-09-16 18:57:59,747 - speechbrain.tokenizers.SentencePiece - WARNING - accuracy recovering words: 1.0
277
+ 2021-09-16 18:57:59,747 - speechbrain.tokenizers.SentencePiece - INFO - ==== Accuracy checking for recovering text from tokenizer ===
278
+ 2021-09-16 18:58:12,647 - speechbrain.tokenizers.SentencePiece - INFO - recover words from: results/prepare_seg/eval.json
279
+ 2021-09-16 18:58:12,647 - speechbrain.tokenizers.SentencePiece - INFO - Wrong recover words: 0
280
+ 2021-09-16 18:58:12,647 - speechbrain.tokenizers.SentencePiece - WARNING - accuracy recovering words: 1.0
281
+ 2021-09-16 18:58:12,647 - speechbrain.tokenizers.SentencePiece - INFO - ==== Accuracy checking for recovering text from tokenizer ===
282
+ 2021-09-16 18:58:25,444 - speechbrain.tokenizers.SentencePiece - INFO - recover words from: results/prepare_seg/test.json
283
+ 2021-09-16 18:58:25,444 - speechbrain.tokenizers.SentencePiece - INFO - Wrong recover words: 0
284
+ 2021-09-16 18:58:25,444 - speechbrain.tokenizers.SentencePiece - WARNING - accuracy recovering words: 1.0
285
+ 2021-09-18 01:09:55,556 - speechbrain.core - INFO - Beginning experiment!
286
+ 2021-09-18 01:09:55,576 - speechbrain.core - INFO - Experiment folder: results/tokenizer_seg_bpe5k_char
287
+ 2021-09-18 01:09:55,858 - speechbrain.utils.superpowers - DEBUG - appdirs==1.4.4
288
+ argon2-cffi==20.1.0
289
+ async-generator==1.10
290
+ attrs==21.2.0
291
+ autopep8==1.5.7
292
+ backcall==0.2.0
293
+ backports.entry-points-selectable==1.1.0
294
+ black==19.10b0
295
+ bleach==3.3.1
296
+ certifi==2021.5.30
297
+ cffi==1.14.6
298
+ cfgv==3.3.0
299
+ charset-normalizer==2.0.4
300
+ click==8.0.1
301
+ cycler==0.10.0
302
+ d2l==0.16.6
303
+ datasets==1.11.0
304
+ debugpy==1.3.0
305
+ decorator==5.0.9
306
+ defusedxml==0.7.1
307
+ dill==0.3.4
308
+ distlib==0.3.2
309
+ entrypoints==0.3
310
+ filelock==3.0.12
311
+ flake8==3.7.9
312
+ fsspec==2021.7.0
313
+ huggingface-hub==0.0.16
314
+ HyperPyYAML==1.0.0
315
+ identify==2.2.11
316
+ idna==3.2
317
+ ipykernel==6.0.2
318
+ ipython==7.25.0
319
+ ipython-genutils==0.2.0
320
+ ipywidgets==7.6.3
321
+ jedi==0.18.0
322
+ joblib==1.0.1
323
+ jupyter==1.0.0
324
+ jupyter-client==6.1.12
325
+ jupyter-console==6.4.0
326
+ jupyter-core==4.7.1
327
+ jupyterlab-pygments==0.1.2
328
+ jupyterlab-widgets==1.0.0
329
+ kiwisolver==1.3.1
330
+ matplotlib==3.4.2
331
+ matplotlib-inline==0.1.2
332
+ mccabe==0.6.1
333
+ mistune==0.8.4
334
+ mkl-fft==1.3.0
335
+ mkl-random @ file:///tmp/build/80754af9/mkl_random_1626186064646/work
336
+ mkl-service==2.4.0
337
+ more-itertools==8.9.0
338
+ multiprocess==0.70.12.2
339
+ nbclient==0.5.3
340
+ nbconvert==6.1.0
341
+ nbformat==5.1.3
342
+ nest-asyncio==1.5.1
343
+ nodeenv==1.6.0
344
+ notebook==6.4.0
345
+ numpy==1.21.2
346
+ olefile @ file:///Users/ktietz/demo/mc3/conda-bld/olefile_1629805411829/work
347
+ packaging==21.0
348
+ pandas==1.3.0
349
+ pandocfilters==1.4.3
350
+ parso==0.8.2
351
+ pathspec==0.9.0
352
+ pickleshare==0.7.5
353
+ Pillow==8.3.1
354
+ platformdirs==2.0.2
355
+ pluggy==0.13.1
356
+ pre-commit==2.15.0
357
+ prometheus-client==0.11.0
358
+ prompt-toolkit==3.0.19
359
+ ptyprocess==0.7.0
360
+ py==1.10.0
361
+ pyarrow==5.0.0
362
+ pycodestyle==2.5.0
363
+ pycparser==2.20
364
+ pydub @ file:///home/conda/feedstock_root/build_artifacts/pydub_1615612442567/work
365
+ pyflakes==2.1.1
366
+ Pygments==2.9.0
367
+ pyparsing==2.4.7
368
+ pytest==5.4.1
369
+ python-dateutil==2.8.2
370
+ pytube==10.9.3
371
+ PyYAML==5.4.1
372
+ pyzmq==22.1.0
373
+ qtconsole==5.1.1
374
+ QtPy==1.9.0
375
+ regex==2021.7.6
376
+ requests==2.26.0
377
+ ruamel.yaml==0.17.10
378
+ ruamel.yaml.clib==0.2.6
379
+ scikit-learn @ file:///tmp/build/80754af9/scikit-learn_1621370412049/work
380
+ scipy==1.7.1
381
+ Send2Trash==1.7.1
382
+ sentencepiece==0.1.96
383
+ six @ file:///tmp/build/80754af9/six_1623709665295/work
384
+ -e git+https://github.com/speechbrain/speechbrain.git@2ec4839746970875fc763aa354c44a3356685ef6#egg=speechbrain
385
+ terminado==0.10.1
386
+ testpath==0.5.0
387
+ threadpoolctl @ file:///Users/ktietz/demo/mc3/conda-bld/threadpoolctl_1629802263681/work
388
+ toml==0.10.2
389
+ torch==1.8.1
390
+ torchaudio==0.8.1
391
+ torchvision==0.10.0
392
+ tornado==6.1
393
+ tqdm==4.62.2
394
+ traitlets==5.0.5
395
+ typed-ast==1.4.3
396
+ typing-extensions==3.10.0.0
397
+ urllib3==1.26.6
398
+ virtualenv==20.6.0
399
+ wcwidth==0.2.5
400
+ webencodings==0.5.1
401
+ widgetsnbextension==3.5.1
402
+ xxhash==2.0.2
403
+ yamllint==1.23.0
404
+
405
+
406
+ 2021-09-18 01:09:55,862 - speechbrain.utils.superpowers - DEBUG - e3e51338
407
+
408
+
409
+ 2021-09-18 01:09:57,778 - speechbrain.tokenizers.SentencePiece - INFO - Tokenizer is already trained.
410
+ 2021-09-18 01:09:57,778 - speechbrain.tokenizers.SentencePiece - INFO - ==== Loading Tokenizer ===
411
+ 2021-09-18 01:09:57,778 - speechbrain.tokenizers.SentencePiece - INFO - Tokenizer path: results/tokenizer_seg_bpe5k_char/5000_char.model
412
+ 2021-09-18 01:09:57,778 - speechbrain.tokenizers.SentencePiece - INFO - Tokenizer vocab_size: 5000
413
+ 2021-09-18 01:09:57,778 - speechbrain.tokenizers.SentencePiece - INFO - Tokenizer type: char
414
+ 2021-09-18 01:09:57,780 - speechbrain.tokenizers.SentencePiece - INFO - ==== Accuracy checking for recovering text from tokenizer ===
415
+ 2021-09-18 01:10:15,083 - speechbrain.tokenizers.SentencePiece - INFO - recover words from: results/prepare_seg/dev.json
416
+ 2021-09-18 01:10:15,083 - speechbrain.tokenizers.SentencePiece - INFO - Wrong recover words: 0
417
+ 2021-09-18 01:10:15,083 - speechbrain.tokenizers.SentencePiece - WARNING - accuracy recovering words: 1.0
418
+ 2021-09-18 01:10:15,083 - speechbrain.tokenizers.SentencePiece - INFO - ==== Accuracy checking for recovering text from tokenizer ===
419
+ 2021-09-18 01:10:32,413 - speechbrain.tokenizers.SentencePiece - INFO - recover words from: results/prepare_seg/eval.json
420
+ 2021-09-18 01:10:32,413 - speechbrain.tokenizers.SentencePiece - INFO - Wrong recover words: 0
421
+ 2021-09-18 01:10:32,413 - speechbrain.tokenizers.SentencePiece - WARNING - accuracy recovering words: 1.0
422
+ 2021-09-18 01:10:32,413 - speechbrain.tokenizers.SentencePiece - INFO - ==== Accuracy checking for recovering text from tokenizer ===
423
+ 2021-09-18 01:10:49,667 - speechbrain.tokenizers.SentencePiece - INFO - recover words from: results/prepare_seg/test.json
424
+ 2021-09-18 01:10:49,667 - speechbrain.tokenizers.SentencePiece - INFO - Wrong recover words: 0
425
+ 2021-09-18 01:10:49,667 - speechbrain.tokenizers.SentencePiece - WARNING - accuracy recovering words: 1.0
426
+ 2021-10-04 03:04:09,349 - speechbrain.core - INFO - Beginning experiment!
427
+ 2021-10-04 03:04:09,349 - speechbrain.core - INFO - Experiment folder: results/tokenizer_seg_bpe5k_char
428
+ 2021-10-04 03:04:09,986 - speechbrain.utils.superpowers - DEBUG - appdirs==1.4.4
429
+ argon2-cffi==20.1.0
430
+ async-generator==1.10
431
+ attrs==19.3.0
432
+ Automat==0.8.0
433
+ autopep8==1.5.7
434
+ backcall==0.2.0
435
+ backports.entry-points-selectable==1.1.0
436
+ black==19.10b0
437
+ bleach==3.3.1
438
+ blessings==1.7
439
+ blinker==1.4
440
+ bottle==0.12.19
441
+ certifi==2019.11.28
442
+ cffi==1.14.6
443
+ cfgv==3.3.0
444
+ chardet==3.0.4
445
+ Click==7.0
446
+ cloud-init==21.2
447
+ colorama==0.4.3
448
+ command-not-found==0.3
449
+ configobj==5.0.6
450
+ constantly==15.1.0
451
+ cryptography==2.8
452
+ cupshelpers==1.0
453
+ cycler==0.10.0
454
+ d2l==0.16.6
455
+ datasets==1.11.0
456
+ dbus-python==1.2.16
457
+ debugpy==1.3.0
458
+ decorator==5.0.9
459
+ defer==1.0.6
460
+ defusedxml==0.7.1
461
+ dill==0.3.4
462
+ distlib==0.3.2
463
+ distro==1.4.0
464
+ distro-info===0.23ubuntu1
465
+ entrypoints==0.3
466
+ filelock==3.0.12
467
+ flake8==3.7.9
468
+ fsspec==2021.7.0
469
+ gpustat==0.6.0
470
+ gpuview==0.4.0
471
+ httplib2==0.14.0
472
+ huggingface-hub==0.0.16
473
+ hyperlink==19.0.0
474
+ HyperPyYAML==1.0.0
475
+ identify==2.2.11
476
+ idna==2.8
477
+ importlib-metadata==1.5.0
478
+ incremental==16.10.1
479
+ ipykernel==6.0.2
480
+ ipython==7.25.0
481
+ ipython-genutils==0.2.0
482
+ ipywidgets==7.6.3
483
+ jedi==0.18.0
484
+ Jinja2==2.10.1
485
+ joblib==1.0.1
486
+ jsonpatch==1.22
487
+ jsonpointer==2.0
488
+ jsonschema==3.2.0
489
+ jupyter==1.0.0
490
+ jupyter-client==6.1.12
491
+ jupyter-console==6.4.0
492
+ jupyter-core==4.7.1
493
+ jupyterlab-pygments==0.1.2
494
+ jupyterlab-widgets==1.0.0
495
+ keyring==18.0.1
496
+ kiwisolver==1.3.1
497
+ language-selector==0.1
498
+ launchpadlib==1.10.13
499
+ lazr.restfulclient==0.14.2
500
+ lazr.uri==1.0.3
501
+ macaroonbakery==1.3.1
502
+ MarkupSafe==1.1.0
503
+ matplotlib==3.4.2
504
+ matplotlib-inline==0.1.2
505
+ mccabe==0.6.1
506
+ mistune==0.8.4
507
+ more-itertools==4.2.0
508
+ multiprocess==0.70.12.2
509
+ nbclient==0.5.3
510
+ nbconvert==6.1.0
511
+ nbformat==5.1.3
512
+ nest-asyncio==1.5.1
513
+ netifaces==0.10.4
514
+ nodeenv==1.6.0
515
+ notebook==6.4.0
516
+ numpy==1.21.2
517
+ nvidia-ml-py3==7.352.0
518
+ oauthlib==3.1.0
519
+ packaging==21.0
520
+ pandas==1.3.0
521
+ pandocfilters==1.4.3
522
+ parso==0.8.2
523
+ pathspec==0.9.0
524
+ pexpect==4.6.0
525
+ pickleshare==0.7.5
526
+ Pillow==8.3.1
527
+ platformdirs==2.0.2
528
+ pluggy==0.13.1
529
+ pre-commit==2.15.0
530
+ prometheus-client==0.11.0
531
+ prompt-toolkit==3.0.19
532
+ protobuf==3.6.1
533
+ psutil==5.8.0
534
+ ptyprocess==0.7.0
535
+ py==1.10.0
536
+ pyarrow==5.0.0
537
+ pyasn1==0.4.2
538
+ pyasn1-modules==0.2.1
539
+ pycairo==1.16.2
540
+ pycodestyle==2.5.0
541
+ pycparser==2.20
542
+ pycups==1.9.73
543
+ pyflakes==2.1.1
544
+ Pygments==2.9.0
545
+ PyGObject==3.36.0
546
+ PyHamcrest==1.9.0
547
+ PyJWT==1.7.1
548
+ pymacaroons==0.13.0
549
+ PyMySQL==1.0.2
550
+ PyNaCl==1.3.0
551
+ pyOpenSSL==19.0.0
552
+ pyparsing==2.4.7
553
+ pyRFC3339==1.1
554
+ pyrsistent==0.15.5
555
+ pyserial==3.4
556
+ pytest==5.4.1
557
+ python-apt==2.0.0+ubuntu0.20.4.5
558
+ python-dateutil==2.8.2
559
+ python-debian===0.1.36ubuntu1
560
+ pytube==10.9.3
561
+ pytz==2019.3
562
+ PyYAML==5.3.1
563
+ pyzmq==22.1.0
564
+ qtconsole==5.1.1
565
+ QtPy==1.9.0
566
+ regex==2021.7.6
567
+ requests==2.22.0
568
+ requests-unixsocket==0.2.0
569
+ ruamel.yaml==0.17.10
570
+ ruamel.yaml.clib==0.2.6
571
+ scipy==1.7.1
572
+ screen-resolution-extra==0.0.0
573
+ SecretStorage==2.3.1
574
+ Send2Trash==1.7.1
575
+ sentencepiece==0.1.96
576
+ service-identity==18.1.0
577
+ simplejson==3.16.0
578
+ six==1.14.0
579
+ sos==4.1
580
+ -e git+https://github.com/speechbrain/speechbrain.git@1d194bfc51ae20b9e38596d220cdf0f4977e69de#egg=speechbrain
581
+ ssh-import-id==5.10
582
+ supervisor==4.1.0
583
+ systemd-python==234
584
+ terminado==0.10.1
585
+ testpath==0.5.0
586
+ toml==0.10.2
587
+ torch==1.8.1
588
+ torchaudio==0.8.1
589
+ torchvision==0.10.0
590
+ tornado==6.1
591
+ tqdm==4.62.2
592
+ traitlets==5.0.5
593
+ Twisted==18.9.0
594
+ typed-ast==1.4.3
595
+ typing-extensions==3.10.0.0
596
+ ubuntu-advantage-tools==27.2
597
+ ufw==0.36
598
+ unattended-upgrades==0.1
599
+ urllib3==1.25.8
600
+ virtualenv==20.6.0
601
+ wadllib==1.3.3
602
+ wcwidth==0.2.5
603
+ webencodings==0.5.1
604
+ widgetsnbextension==3.5.1
605
+ xkit==0.0.0
606
+ xxhash==2.0.2
607
+ yamllint==1.23.0
608
+ zipp==1.0.0
609
+ zope.interface==4.7.1
610
+
611
+
612
+ 2021-10-04 03:04:09,989 - speechbrain.utils.superpowers - DEBUG - e3e51338
613
+
614
+
615
+ 2021-10-04 03:04:10,195 - speechbrain.core - ERROR - Exception:
616
+ Traceback (most recent call last):
617
+ File "Tokenizer/train.py", line 21, in <module>
618
+ run_on_main(
619
+ File "/home/wayne/speechbrain/speechbrain/utils/distributed.py", line 61, in run_on_main
620
+ func(*args, **kwargs)
621
+ File "/mnt/md0/user_wayne/speechbrain/recipes/MATBN/Tokenizer/matbn_prepare.py", line 67, in prepare_matbn
622
+ segments_info = extract_segments_info(segments_path)
623
+ File "/mnt/md0/user_wayne/speechbrain/recipes/MATBN/Tokenizer/matbn_prepare.py", line 117, in extract_segments_info
624
+ with open(segments_path, "r", encoding="utf-8") as segments_file:
625
+ FileNotFoundError: [Errno 2] No such file or directory: '/home/wayne/CORPUS/MATBN_SEG/data/eval/segments'
626
+ 2021-10-04 03:04:50,119 - speechbrain.core - INFO - Beginning experiment!
627
+ 2021-10-04 03:04:50,119 - speechbrain.core - INFO - Experiment folder: results/tokenizer_seg_bpe5k_char
628
+ 2021-10-04 03:04:50,727 - speechbrain.utils.superpowers - DEBUG - appdirs==1.4.4
629
+ argon2-cffi==20.1.0
630
+ async-generator==1.10
631
+ attrs==19.3.0
632
+ Automat==0.8.0
633
+ autopep8==1.5.7
634
+ backcall==0.2.0
635
+ backports.entry-points-selectable==1.1.0
636
+ black==19.10b0
637
+ bleach==3.3.1
638
+ blessings==1.7
639
+ blinker==1.4
640
+ bottle==0.12.19
641
+ certifi==2019.11.28
642
+ cffi==1.14.6
643
+ cfgv==3.3.0
644
+ chardet==3.0.4
645
+ Click==7.0
646
+ cloud-init==21.2
647
+ colorama==0.4.3
648
+ command-not-found==0.3
649
+ configobj==5.0.6
650
+ constantly==15.1.0
651
+ cryptography==2.8
652
+ cupshelpers==1.0
653
+ cycler==0.10.0
654
+ d2l==0.16.6
655
+ datasets==1.11.0
656
+ dbus-python==1.2.16
657
+ debugpy==1.3.0
658
+ decorator==5.0.9
659
+ defer==1.0.6
660
+ defusedxml==0.7.1
661
+ dill==0.3.4
662
+ distlib==0.3.2
663
+ distro==1.4.0
664
+ distro-info===0.23ubuntu1
665
+ entrypoints==0.3
666
+ filelock==3.0.12
667
+ flake8==3.7.9
668
+ fsspec==2021.7.0
669
+ gpustat==0.6.0
670
+ gpuview==0.4.0
671
+ httplib2==0.14.0
672
+ huggingface-hub==0.0.16
673
+ hyperlink==19.0.0
674
+ HyperPyYAML==1.0.0
675
+ identify==2.2.11
676
+ idna==2.8
677
+ importlib-metadata==1.5.0
678
+ incremental==16.10.1
679
+ ipykernel==6.0.2
680
+ ipython==7.25.0
681
+ ipython-genutils==0.2.0
682
+ ipywidgets==7.6.3
683
+ jedi==0.18.0
684
+ Jinja2==2.10.1
685
+ joblib==1.0.1
686
+ jsonpatch==1.22
687
+ jsonpointer==2.0
688
+ jsonschema==3.2.0
689
+ jupyter==1.0.0
690
+ jupyter-client==6.1.12
691
+ jupyter-console==6.4.0
692
+ jupyter-core==4.7.1
693
+ jupyterlab-pygments==0.1.2
694
+ jupyterlab-widgets==1.0.0
695
+ keyring==18.0.1
696
+ kiwisolver==1.3.1
697
+ language-selector==0.1
698
+ launchpadlib==1.10.13
699
+ lazr.restfulclient==0.14.2
700
+ lazr.uri==1.0.3
701
+ macaroonbakery==1.3.1
702
+ MarkupSafe==1.1.0
703
+ matplotlib==3.4.2
704
+ matplotlib-inline==0.1.2
705
+ mccabe==0.6.1
706
+ mistune==0.8.4
707
+ more-itertools==4.2.0
708
+ multiprocess==0.70.12.2
709
+ nbclient==0.5.3
710
+ nbconvert==6.1.0
711
+ nbformat==5.1.3
712
+ nest-asyncio==1.5.1
713
+ netifaces==0.10.4
714
+ nodeenv==1.6.0
715
+ notebook==6.4.0
716
+ numpy==1.21.2
717
+ nvidia-ml-py3==7.352.0
718
+ oauthlib==3.1.0
719
+ packaging==21.0
720
+ pandas==1.3.0
721
+ pandocfilters==1.4.3
722
+ parso==0.8.2
723
+ pathspec==0.9.0
724
+ pexpect==4.6.0
725
+ pickleshare==0.7.5
726
+ Pillow==8.3.1
727
+ platformdirs==2.0.2
728
+ pluggy==0.13.1
729
+ pre-commit==2.15.0
730
+ prometheus-client==0.11.0
731
+ prompt-toolkit==3.0.19
732
+ protobuf==3.6.1
733
+ psutil==5.8.0
734
+ ptyprocess==0.7.0
735
+ py==1.10.0
736
+ pyarrow==5.0.0
737
+ pyasn1==0.4.2
738
+ pyasn1-modules==0.2.1
739
+ pycairo==1.16.2
740
+ pycodestyle==2.5.0
741
+ pycparser==2.20
742
+ pycups==1.9.73
743
+ pyflakes==2.1.1
744
+ Pygments==2.9.0
745
+ PyGObject==3.36.0
746
+ PyHamcrest==1.9.0
747
+ PyJWT==1.7.1
748
+ pymacaroons==0.13.0
749
+ PyMySQL==1.0.2
750
+ PyNaCl==1.3.0
751
+ pyOpenSSL==19.0.0
752
+ pyparsing==2.4.7
753
+ pyRFC3339==1.1
754
+ pyrsistent==0.15.5
755
+ pyserial==3.4
756
+ pytest==5.4.1
757
+ python-apt==2.0.0+ubuntu0.20.4.5
758
+ python-dateutil==2.8.2
759
+ python-debian===0.1.36ubuntu1
760
+ pytube==10.9.3
761
+ pytz==2019.3
762
+ PyYAML==5.3.1
763
+ pyzmq==22.1.0
764
+ qtconsole==5.1.1
765
+ QtPy==1.9.0
766
+ regex==2021.7.6
767
+ requests==2.22.0
768
+ requests-unixsocket==0.2.0
769
+ ruamel.yaml==0.17.10
770
+ ruamel.yaml.clib==0.2.6
771
+ scipy==1.7.1
772
+ screen-resolution-extra==0.0.0
773
+ SecretStorage==2.3.1
774
+ Send2Trash==1.7.1
775
+ sentencepiece==0.1.96
776
+ service-identity==18.1.0
777
+ simplejson==3.16.0
778
+ six==1.14.0
779
+ sos==4.1
780
+ -e git+https://github.com/speechbrain/speechbrain.git@1d194bfc51ae20b9e38596d220cdf0f4977e69de#egg=speechbrain
781
+ ssh-import-id==5.10
782
+ supervisor==4.1.0
783
+ systemd-python==234
784
+ terminado==0.10.1
785
+ testpath==0.5.0
786
+ toml==0.10.2
787
+ torch==1.8.1
788
+ torchaudio==0.8.1
789
+ torchvision==0.10.0
790
+ tornado==6.1
791
+ tqdm==4.62.2
792
+ traitlets==5.0.5
793
+ Twisted==18.9.0
794
+ typed-ast==1.4.3
795
+ typing-extensions==3.10.0.0
796
+ ubuntu-advantage-tools==27.2
797
+ ufw==0.36
798
+ unattended-upgrades==0.1
799
+ urllib3==1.25.8
800
+ virtualenv==20.6.0
801
+ wadllib==1.3.3
802
+ wcwidth==0.2.5
803
+ webencodings==0.5.1
804
+ widgetsnbextension==3.5.1
805
+ xkit==0.0.0
806
+ xxhash==2.0.2
807
+ yamllint==1.23.0
808
+ zipp==1.0.0
809
+ zope.interface==4.7.1
810
+
811
+
812
+ 2021-10-04 03:04:50,730 - speechbrain.utils.superpowers - DEBUG - e3e51338
813
+
814
+
815
+ 2021-10-04 03:04:52,523 - speechbrain.tokenizers.SentencePiece - INFO - Tokenizer is already trained.
816
+ 2021-10-04 03:04:52,523 - speechbrain.tokenizers.SentencePiece - INFO - ==== Loading Tokenizer ===
817
+ 2021-10-04 03:04:52,523 - speechbrain.tokenizers.SentencePiece - INFO - Tokenizer path: results/tokenizer_seg_bpe5k_char/5000_char.model
818
+ 2021-10-04 03:04:52,523 - speechbrain.tokenizers.SentencePiece - INFO - Tokenizer vocab_size: 5000
819
+ 2021-10-04 03:04:52,523 - speechbrain.tokenizers.SentencePiece - INFO - Tokenizer type: char
820
+ 2021-10-04 03:04:52,558 - speechbrain.tokenizers.SentencePiece - INFO - ==== Accuracy checking for recovering text from tokenizer ===
821
+ 2021-10-04 03:05:04,514 - speechbrain.tokenizers.SentencePiece - INFO - recover words from: results/prepare_seg/dev.json
822
+ 2021-10-04 03:05:04,514 - speechbrain.tokenizers.SentencePiece - INFO - Wrong recover words: 0
823
+ 2021-10-04 03:05:04,514 - speechbrain.tokenizers.SentencePiece - WARNING - accuracy recovering words: 1.0
824
+ 2021-10-04 03:05:04,514 - speechbrain.tokenizers.SentencePiece - INFO - ==== Accuracy checking for recovering text from tokenizer ===
825
+ 2021-10-04 03:05:16,440 - speechbrain.tokenizers.SentencePiece - INFO - recover words from: results/prepare_seg/eval.json
826
+ 2021-10-04 03:05:16,440 - speechbrain.tokenizers.SentencePiece - INFO - Wrong recover words: 0
827
+ 2021-10-04 03:05:16,440 - speechbrain.tokenizers.SentencePiece - WARNING - accuracy recovering words: 1.0
828
+ 2021-10-04 03:05:16,440 - speechbrain.tokenizers.SentencePiece - INFO - ==== Accuracy checking for recovering text from tokenizer ===
829
+ 2021-10-04 03:05:28,162 - speechbrain.tokenizers.SentencePiece - INFO - recover words from: results/prepare_seg/test.json
830
+ 2021-10-04 03:05:28,162 - speechbrain.tokenizers.SentencePiece - INFO - Wrong recover words: 0
831
+ 2021-10-04 03:05:28,162 - speechbrain.tokenizers.SentencePiece - WARNING - accuracy recovering words: 1.0
832
+ 2021-10-04 19:38:40,556 - speechbrain.core - INFO - Beginning experiment!
833
+ 2021-10-04 19:38:40,556 - speechbrain.core - INFO - Experiment folder: results/tokenizer_seg_bpe5k_char
834
+ 2021-10-04 19:38:41,182 - speechbrain.utils.superpowers - DEBUG - appdirs==1.4.4
835
+ argon2-cffi==20.1.0
836
+ async-generator==1.10
837
+ attrs==19.3.0
838
+ Automat==0.8.0
839
+ autopep8==1.5.7
840
+ backcall==0.2.0
841
+ backports.entry-points-selectable==1.1.0
842
+ black==19.10b0
843
+ bleach==3.3.1
844
+ blessings==1.7
845
+ blinker==1.4
846
+ bottle==0.12.19
847
+ certifi==2019.11.28
848
+ cffi==1.14.6
849
+ cfgv==3.3.0
850
+ chardet==3.0.4
851
+ Click==7.0
852
+ cloud-init==21.2
853
+ colorama==0.4.3
854
+ command-not-found==0.3
855
+ configobj==5.0.6
856
+ constantly==15.1.0
857
+ cryptography==2.8
858
+ cupshelpers==1.0
859
+ cycler==0.10.0
860
+ d2l==0.16.6
861
+ datasets==1.11.0
862
+ dbus-python==1.2.16
863
+ debugpy==1.3.0
864
+ decorator==5.0.9
865
+ defer==1.0.6
866
+ defusedxml==0.7.1
867
+ dill==0.3.4
868
+ distlib==0.3.2
869
+ distro==1.4.0
870
+ distro-info===0.23ubuntu1
871
+ entrypoints==0.3
872
+ filelock==3.0.12
873
+ flake8==3.7.9
874
+ fsspec==2021.7.0
875
+ gpustat==0.6.0
876
+ gpuview==0.4.0
877
+ httplib2==0.14.0
878
+ huggingface-hub==0.0.16
879
+ hyperlink==19.0.0
880
+ HyperPyYAML==1.0.0
881
+ identify==2.2.11
882
+ idna==2.8
883
+ importlib-metadata==1.5.0
884
+ incremental==16.10.1
885
+ ipykernel==6.0.2
886
+ ipython==7.25.0
887
+ ipython-genutils==0.2.0
888
+ ipywidgets==7.6.3
889
+ jedi==0.18.0
890
+ Jinja2==2.10.1
891
+ joblib==1.0.1
892
+ jsonpatch==1.22
893
+ jsonpointer==2.0
894
+ jsonschema==3.2.0
895
+ jupyter==1.0.0
896
+ jupyter-client==6.1.12
897
+ jupyter-console==6.4.0
898
+ jupyter-core==4.7.1
899
+ jupyterlab-pygments==0.1.2
900
+ jupyterlab-widgets==1.0.0
901
+ keyring==18.0.1
902
+ kiwisolver==1.3.1
903
+ language-selector==0.1
904
+ launchpadlib==1.10.13
905
+ lazr.restfulclient==0.14.2
906
+ lazr.uri==1.0.3
907
+ macaroonbakery==1.3.1
908
+ MarkupSafe==1.1.0
909
+ matplotlib==3.4.2
910
+ matplotlib-inline==0.1.2
911
+ mccabe==0.6.1
912
+ mistune==0.8.4
913
+ more-itertools==4.2.0
914
+ multiprocess==0.70.12.2
915
+ nbclient==0.5.3
916
+ nbconvert==6.1.0
917
+ nbformat==5.1.3
918
+ nest-asyncio==1.5.1
919
+ netifaces==0.10.4
920
+ nodeenv==1.6.0
921
+ notebook==6.4.0
922
+ numpy==1.21.2
923
+ nvidia-ml-py3==7.352.0
924
+ oauthlib==3.1.0
925
+ packaging==21.0
926
+ pandas==1.3.0
927
+ pandocfilters==1.4.3
928
+ parso==0.8.2
929
+ pathspec==0.9.0
930
+ pexpect==4.6.0
931
+ pickleshare==0.7.5
932
+ Pillow==8.3.1
933
+ platformdirs==2.0.2
934
+ pluggy==0.13.1
935
+ pre-commit==2.15.0
936
+ prometheus-client==0.11.0
937
+ prompt-toolkit==3.0.19
938
+ protobuf==3.6.1
939
+ psutil==5.8.0
940
+ ptyprocess==0.7.0
941
+ py==1.10.0
942
+ pyarrow==5.0.0
943
+ pyasn1==0.4.2
944
+ pyasn1-modules==0.2.1
945
+ pycairo==1.16.2
946
+ pycodestyle==2.5.0
947
+ pycparser==2.20
948
+ pycups==1.9.73
949
+ pyflakes==2.1.1
950
+ Pygments==2.9.0
951
+ PyGObject==3.36.0
952
+ PyHamcrest==1.9.0
953
+ PyJWT==1.7.1
954
+ pymacaroons==0.13.0
955
+ PyMySQL==1.0.2
956
+ PyNaCl==1.3.0
957
+ pyOpenSSL==19.0.0
958
+ pyparsing==2.4.7
959
+ pyRFC3339==1.1
960
+ pyrsistent==0.15.5
961
+ pyserial==3.4
962
+ pytest==5.4.1
963
+ python-apt==2.0.0+ubuntu0.20.4.5
964
+ python-dateutil==2.8.2
965
+ python-debian===0.1.36ubuntu1
966
+ pytube==10.9.3
967
+ pytz==2019.3
968
+ PyYAML==5.3.1
969
+ pyzmq==22.1.0
970
+ qtconsole==5.1.1
971
+ QtPy==1.9.0
972
+ regex==2021.7.6
973
+ requests==2.22.0
974
+ requests-unixsocket==0.2.0
975
+ ruamel.yaml==0.17.10
976
+ ruamel.yaml.clib==0.2.6
977
+ scipy==1.7.1
978
+ screen-resolution-extra==0.0.0
979
+ SecretStorage==2.3.1
980
+ Send2Trash==1.7.1
981
+ sentencepiece==0.1.96
982
+ service-identity==18.1.0
983
+ simplejson==3.16.0
984
+ six==1.14.0
985
+ sos==4.1
986
+ -e git+https://github.com/speechbrain/speechbrain.git@1d194bfc51ae20b9e38596d220cdf0f4977e69de#egg=speechbrain
987
+ ssh-import-id==5.10
988
+ supervisor==4.1.0
989
+ systemd-python==234
990
+ terminado==0.10.1
991
+ testpath==0.5.0
992
+ toml==0.10.2
993
+ torch==1.8.1
994
+ torchaudio==0.8.1
995
+ torchvision==0.10.0
996
+ tornado==6.1
997
+ tqdm==4.62.2
998
+ traitlets==5.0.5
999
+ Twisted==18.9.0
1000
+ typed-ast==1.4.3
1001
+ typing-extensions==3.10.0.0
1002
+ ubuntu-advantage-tools==27.2
1003
+ ufw==0.36
1004
+ unattended-upgrades==0.1
1005
+ urllib3==1.25.8
1006
+ virtualenv==20.6.0
1007
+ wadllib==1.3.3
1008
+ wcwidth==0.2.5
1009
+ webencodings==0.5.1
1010
+ widgetsnbextension==3.5.1
1011
+ xkit==0.0.0
1012
+ xxhash==2.0.2
1013
+ yamllint==1.23.0
1014
+ zipp==1.0.0
1015
+ zope.interface==4.7.1
1016
+
1017
+
1018
+ 2021-10-04 19:38:41,186 - speechbrain.utils.superpowers - DEBUG - e3e51338
1019
+
1020
+
1021
+ 2021-10-04 19:38:43,292 - speechbrain.tokenizers.SentencePiece - INFO - Tokenizer is already trained.
1022
+ 2021-10-04 19:38:43,292 - speechbrain.tokenizers.SentencePiece - INFO - ==== Loading Tokenizer ===
1023
+ 2021-10-04 19:38:43,292 - speechbrain.tokenizers.SentencePiece - INFO - Tokenizer path: results/tokenizer_seg_bpe5k_char/5000_char.model
1024
+ 2021-10-04 19:38:43,292 - speechbrain.tokenizers.SentencePiece - INFO - Tokenizer vocab_size: 5000
1025
+ 2021-10-04 19:38:43,292 - speechbrain.tokenizers.SentencePiece - INFO - Tokenizer type: char
1026
+ 2021-10-04 19:38:43,294 - speechbrain.tokenizers.SentencePiece - INFO - ==== Accuracy checking for recovering text from tokenizer ===
1027
+ 2021-10-04 19:38:54,904 - speechbrain.tokenizers.SentencePiece - INFO - recover words from: results/prepare_seg/dev.json
1028
+ 2021-10-04 19:38:54,904 - speechbrain.tokenizers.SentencePiece - INFO - Wrong recover words: 0
1029
+ 2021-10-04 19:38:54,904 - speechbrain.tokenizers.SentencePiece - WARNING - accuracy recovering words: 1.0
1030
+ 2021-10-04 19:38:54,904 - speechbrain.tokenizers.SentencePiece - INFO - ==== Accuracy checking for recovering text from tokenizer ===
1031
+ 2021-10-04 19:39:06,509 - speechbrain.tokenizers.SentencePiece - INFO - recover words from: results/prepare_seg/eval.json
1032
+ 2021-10-04 19:39:06,510 - speechbrain.tokenizers.SentencePiece - INFO - Wrong recover words: 0
1033
+ 2021-10-04 19:39:06,510 - speechbrain.tokenizers.SentencePiece - WARNING - accuracy recovering words: 1.0
1034
+ 2021-10-04 19:39:06,510 - speechbrain.tokenizers.SentencePiece - INFO - ==== Accuracy checking for recovering text from tokenizer ===
1035
+ 2021-10-04 19:39:18,349 - speechbrain.tokenizers.SentencePiece - INFO - recover words from: results/prepare_seg/test.json
1036
+ 2021-10-04 19:39:18,349 - speechbrain.tokenizers.SentencePiece - INFO - Wrong recover words: 0
1037
+ 2021-10-04 19:39:18,349 - speechbrain.tokenizers.SentencePiece - WARNING - accuracy recovering words: 1.0
ASR-model/tokenizer_seg_bpe5k_char/train.py ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import sys
2
+
3
+ import speechbrain as sb
4
+ from hyperpyyaml import load_hyperpyyaml
5
+ from speechbrain.utils.distributed import run_on_main
6
+
7
+ if __name__ == "__main__":
8
+ hparams_file_path, run_opts, overrides = sb.parse_arguments(sys.argv[1:])
9
+
10
+ with open(hparams_file_path) as hparams_file:
11
+ hparams = load_hyperpyyaml(hparams_file, overrides)
12
+
13
+ sb.create_experiment_directory(
14
+ experiment_directory=hparams["output_folder"],
15
+ hyperparams_to_save=hparams_file_path,
16
+ overrides=overrides,
17
+ )
18
+
19
+ from matbn_prepare import prepare_matbn
20
+
21
+ run_on_main(
22
+ prepare_matbn,
23
+ kwargs={
24
+ "dataset_folder": hparams["dataset_folder"],
25
+ "save_folder": hparams["prepare_folder"],
26
+ "keep_unk": hparams["keep_unk"],
27
+ },
28
+ )
29
+
30
+ hparams["tokenizer"]()